code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9
values | license stringclasses 15
values | size int32 3 1.05M |
|---|---|---|---|---|---|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.unit;
/**
* A {@code SizeUnit} represents size at a given unit of
* granularity and provides utility methods to convert across units.
* A {@code SizeUnit} does not maintain size information, but only
* helps organize and use size representations that may be maintained
* separately across various contexts.
*/
public enum ByteSizeUnit {
BYTES {
@Override
public long toBytes(long size) {
return size;
}
@Override
public long toKB(long size) {
return size / (C1 / C0);
}
@Override
public long toMB(long size) {
return size / (C2 / C0);
}
@Override
public long toGB(long size) {
return size / (C3 / C0);
}
@Override
public long toTB(long size) {
return size / (C4 / C0);
}
@Override
public long toPB(long size) {
return size / (C5 / C0);
}
@Override
public String getSuffix() {
return "b";
}
},
KB {
@Override
public long toBytes(long size) {
return x(size, C1 / C0, MAX / (C1 / C0));
}
@Override
public long toKB(long size) {
return size;
}
@Override
public long toMB(long size) {
return size / (C2 / C1);
}
@Override
public long toGB(long size) {
return size / (C3 / C1);
}
@Override
public long toTB(long size) {
return size / (C4 / C1);
}
@Override
public long toPB(long size) {
return size / (C5 / C1);
}
@Override
public String getSuffix() {
return "kb";
}
},
MB {
@Override
public long toBytes(long size) {
return x(size, C2 / C0, MAX / (C2 / C0));
}
@Override
public long toKB(long size) {
return x(size, C2 / C1, MAX / (C2 / C1));
}
@Override
public long toMB(long size) {
return size;
}
@Override
public long toGB(long size) {
return size / (C3 / C2);
}
@Override
public long toTB(long size) {
return size / (C4 / C2);
}
@Override
public long toPB(long size) {
return size / (C5 / C2);
}
@Override
public String getSuffix() {
return "mb";
}
},
GB {
@Override
public long toBytes(long size) {
return x(size, C3 / C0, MAX / (C3 / C0));
}
@Override
public long toKB(long size) {
return x(size, C3 / C1, MAX / (C3 / C1));
}
@Override
public long toMB(long size) {
return x(size, C3 / C2, MAX / (C3 / C2));
}
@Override
public long toGB(long size) {
return size;
}
@Override
public long toTB(long size) {
return size / (C4 / C3);
}
@Override
public long toPB(long size) {
return size / (C5 / C3);
}
@Override
public String getSuffix() {
return "gb";
}
},
TB {
@Override
public long toBytes(long size) {
return x(size, C4 / C0, MAX / (C4 / C0));
}
@Override
public long toKB(long size) {
return x(size, C4 / C1, MAX / (C4 / C1));
}
@Override
public long toMB(long size) {
return x(size, C4 / C2, MAX / (C4 / C2));
}
@Override
public long toGB(long size) {
return x(size, C4 / C3, MAX / (C4 / C3));
}
@Override
public long toTB(long size) {
return size;
}
@Override
public long toPB(long size) {
return size / (C5 / C4);
}
@Override
public String getSuffix() {
return "tb";
}
},
PB {
@Override
public long toBytes(long size) {
return x(size, C5 / C0, MAX / (C5 / C0));
}
@Override
public long toKB(long size) {
return x(size, C5 / C1, MAX / (C5 / C1));
}
@Override
public long toMB(long size) {
return x(size, C5 / C2, MAX / (C5 / C2));
}
@Override
public long toGB(long size) {
return x(size, C5 / C3, MAX / (C5 / C3));
}
@Override
public long toTB(long size) {
return x(size, C5 / C4, MAX / (C5 / C4));
}
@Override
public long toPB(long size) {
return size;
}
@Override
public String getSuffix() {
return "pb";
}
};
static final long C0 = 1L;
static final long C1 = C0 * 1024L;
static final long C2 = C1 * 1024L;
static final long C3 = C2 * 1024L;
static final long C4 = C3 * 1024L;
static final long C5 = C4 * 1024L;
static final long MAX = Long.MAX_VALUE;
public static ByteSizeUnit fromId(int id) {
if (id < 0 || id >= values().length) {
throw new IllegalArgumentException("No byte size unit found for id [" + id + "]");
}
return values()[id];
}
/**
* Scale d by m, checking for overflow.
* This has a short name to make above code more readable.
*/
static long x(long d, long m, long over) {
if (d > over) return Long.MAX_VALUE;
if (d < -over) return Long.MIN_VALUE;
return d * m;
}
public abstract long toBytes(long size);
public abstract long toKB(long size);
public abstract long toMB(long size);
public abstract long toGB(long size);
public abstract long toTB(long size);
public abstract long toPB(long size);
public abstract String getSuffix();
}
| EvilMcJerkface/crate | server/src/main/java/org/elasticsearch/common/unit/ByteSizeUnit.java | Java | apache-2.0 | 6,846 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
function friendsWrapper() {
/*
* Loads the owner, the viewer, the owner's friends, and the viewer's
* friends and mutual friends between owner and viewer. Response data is put into the variables owner, viewer,
* ownerFriends, and viewerFriends, mutualFriends respectively.
*
*
*/
this.loadFriends = function(){
var req = opensocial.newDataRequest();
req.add(req.newFetchPersonRequest(opensocial.IdSpec.PersonId.VIEWER), 'viewer');
req.add(req.newFetchPersonRequest(opensocial.IdSpec.PersonId.OWNER), 'owner');
var viewerFriends = opensocial.newIdSpec({ "userId" : "VIEWER", "groupId" : "FRIENDS" });
var ownerFriends = opensocial.newIdSpec({ "userId" : "OWNER", "groupId" : "FRIENDS" });
var opt_params = {};
opt_params[opensocial.DataRequest.PeopleRequestFields.MAX] = 100;
req.add(req.newFetchPeopleRequest(viewerFriends, opt_params), 'viewerFriends');
req.add(req.newFetchPeopleRequest(ownerFriends, opt_params), 'ownerFriends');
var params = {};
params[opensocial.DataRequest.PeopleRequestFields.MAX] = 100;
// Usage of isFriendsWith filter to get mutual friends. filterValue should be set to the friend with whom mutual friends is to be found.
params[opensocial.DataRequest.PeopleRequestFields.FILTER] = opensocial.DataRequest.FilterType.IS_FRIENDS_WITH;
params["filterValue"] = opensocial.IdSpec.PersonId.VIEWER;
req.add(req.newFetchPeopleRequest(ownerFriends, params), 'mutualFriends');
var app_params = {};
app_params[opensocial.DataRequest.PeopleRequestFields.MAX] = 100;
// Usage of hasApp filter to get list of friends who use this app.
app_params[opensocial.DataRequest.PeopleRequestFields.FILTER] = opensocial.DataRequest.FilterType.HAS_APP;
req.add(req.newFetchPeopleRequest(ownerFriends, app_params), 'friendsUsingApp');
req.send(displayFriends);
};
function displayFriends(data) {
var viewer = data.get('viewer').getData();
var viewerFriends = data.get('viewerFriends').getData();
var owner = data.get('owner').getData();
var ownerFriends = data.get('ownerFriends').getData();
html = new Array();
html.push(owner.getDisplayName() + '\'s Friends(',ownerFriends.size(),') <br>');
html.push('<ul>');
ownerFriends.each(function(person) {
if (person.getId()) {
html.push('<li>', person.getDisplayName(), '</li>');
}
});
html.push('</ul>');
if(owner.getDisplayName()!=viewer.getDisplayName()) {
var mutualFriends = data.get('mutualFriends').getData();
html.push('Mutual Friends with ',viewer.getDisplayName(),'(',mutualFriends.size(),') <br>');
html.push('<ul>');
mutualFriends.each(function(person) {
if (person.getId()) {
html.push('<li>', person.getDisplayName(), '</li>');
}
});
html.push('</ul>');
}
var friendsUsingApp = data.get('friendsUsingApp').getData();
html.push('Friends using this Widget (',friendsUsingApp.size(),') <br>');
html.push('<ul>');
friendsUsingApp.each(function(person) {
if (person.getId()) {
html.push('<li>', person.getDisplayName(), '</li>');
}
});
html.push('</ul>');
document.getElementById('friends').innerHTML = html.join('');
gadgets.window.adjustHeight();
}
} | kidaa/rave | rave-demo-gadgets/src/main/webapp/friendsWrapper.js | JavaScript | apache-2.0 | 4,255 |
/* Yet Another Forum.NET
* Copyright (C) 2003-2005 Bjørnar Henden
* Copyright (C) 2006-2013 Jaben Cargman
* Copyright (C) 2014-2015 Ingo Herbote
* http://www.yetanotherforum.net/
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
namespace YAF.Modules.BBCode
{
using System.Linq;
using System.Web;
using System.Web.UI;
using YAF.Classes;
using YAF.Controls;
using YAF.Core;
using YAF.Core.Model;
using YAF.Types.Extensions;
using YAF.Types.Interfaces;
using YAF.Types.Models;
using YAF.Utils;
/// <summary>
/// The Attachment BB Code Module.
/// </summary>
public class Attach : YafBBCodeControl
{
/// <summary>
/// Render The Album Image as Link with Image
/// </summary>
/// <param name="writer">The writer.</param>
protected override void Render(HtmlTextWriter writer)
{
var attachment =
this.GetRepository<Attachment>()
.ListTyped(attachmentID: this.Parameters["inner"].ToType<int>())
.FirstOrDefault();
if (attachment == null)
{
return;
}
var stats = this.GetText("ATTACHMENTINFO");
var fileIcon = this.Get<ITheme>().GetItem("ICONS", "ATTACHED_FILE");
var filename = attachment.FileName.ToLower();
var showImage = false;
var session = this.Get<HttpSessionStateBase>();
var settings = this.Get<YafBoardSettings>();
if (session["imagePreviewWidth"] == null)
{
session["imagePreviewWidth"] = settings.ImageAttachmentResizeWidth;
}
if (session["imagePreviewHeight"] == null)
{
session["imagePreviewHeight"] = settings.ImageAttachmentResizeHeight;
}
if (session["imagePreviewCropped"] == null)
{
session["imagePreviewCropped"] = settings.ImageAttachmentResizeCropped;
}
if (session["localizationFile"] == null)
{
session["localizationFile"] = this.Get<ILocalization>().LanguageFileName;
}
// verify it's not too large to display
// Ederon : 02/17/2009 - made it board setting
if (attachment.Bytes.ToType<int>() <= this.Get<YafBoardSettings>().PictureAttachmentDisplayTreshold)
{
// is it an image file?
showImage = filename.IsImageName();
}
if (showImage)
{
// Ederon : download rights
if (this.PageContext.ForumDownloadAccess || this.PageContext.ForumModeratorAccess)
{
// user has rights to download, show him image
writer.Write(
!this.Get<YafBoardSettings>().EnableImageAttachmentResize
? @"<img src=""{0}resource.ashx?a={1}&b={3}"" alt=""{2}"" class=""UserPostedImage attachedImage"" />"
: @"<a href=""{0}resource.ashx?i={1}&b={3}"" date-img=""{0}resource.ashx?a={1}&b={3}"" class=""attachedImage""><img src=""{0}resource.ashx?p={1}&b={3}"" alt=""{2}"" title=""{2}"" /></a>",
YafForumInfo.ForumClientFileRoot,
attachment.ID,
this.HtmlEncode(attachment.FileName),
this.PageContext.PageBoardID);
}
else
{
var kb = (1023 + attachment.Bytes.ToType<int>()) / 1024;
// user doesn't have rights to download, don't show the image
writer.Write(
@"<img border=""0"" alt="""" src=""{0}"" /> {1} <span class=""attachmentinfo"">{2}</span>",
fileIcon,
attachment.FileName,
stats.FormatWith(kb, attachment.Downloads));
}
}
else
{
// regular file attachment
var kb = (1023 + attachment.Bytes.ToType<int>()) / 1024;
// Ederon : download rights
if (this.PageContext.ForumDownloadAccess || this.PageContext.ForumModeratorAccess)
{
writer.Write(
@"<img border=""0"" alt="""" src=""{0}"" /> <a class=""attachedImageLink {{html:false,image:false,video:false}}"" href=""{1}resource.ashx?a={2}&b={5}"">{3}</a> <span class=""attachmentinfo"">{4}</span>",
fileIcon,
YafForumInfo.ForumClientFileRoot,
attachment.ID,
attachment.FileName,
stats.FormatWith(kb, attachment.Downloads),
this.PageContext.PageBoardID);
}
else
{
writer.Write(
@"<img border=""0"" alt="""" src=""{0}"" /> {1} <span class=""attachmentinfo"">{2}</span>",
fileIcon,
attachment.FileName,
stats.FormatWith(kb, attachment.Downloads));
}
}
}
}
} | mexxanit/YAFNET | yafsrc/YetAnotherForum.NET/Modules/BBCode/Attach.cs | C# | apache-2.0 | 6,079 |
// Copyright 2017 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package local
import (
"github.com/coreos/go-omaha/omaha"
)
// OmahaWrapper wraps the omaha trivial server to log any errors returned by destroy
// and doesn't return anything instead
type OmahaWrapper struct {
*omaha.TrivialServer
}
func (o OmahaWrapper) Destroy() {
if err := o.TrivialServer.Destroy(); err != nil {
plog.Errorf("Error destroying omaha server: %v", err)
}
}
| dm0-/mantle | platform/local/omaha.go | GO | apache-2.0 | 975 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.newplan.logical.rules;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.pig.LoadFunc;
import org.apache.pig.LoadPushDown;
import org.apache.pig.LoadPushDown.RequiredField;
import org.apache.pig.LoadPushDown.RequiredFieldList;
import org.apache.pig.data.DataType;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.util.Pair;
import org.apache.pig.newplan.Operator;
import org.apache.pig.newplan.OperatorPlan;
import org.apache.pig.newplan.ReverseDependencyOrderWalker;
import org.apache.pig.newplan.logical.Util;
import org.apache.pig.newplan.logical.expression.LogicalExpressionPlan;
import org.apache.pig.newplan.logical.expression.ProjectExpression;
import org.apache.pig.newplan.logical.relational.LOCogroup;
import org.apache.pig.newplan.logical.relational.LOCross;
import org.apache.pig.newplan.logical.relational.LOFilter;
import org.apache.pig.newplan.logical.relational.LOForEach;
import org.apache.pig.newplan.logical.relational.LOGenerate;
import org.apache.pig.newplan.logical.relational.LOInnerLoad;
import org.apache.pig.newplan.logical.relational.LOJoin;
import org.apache.pig.newplan.logical.relational.LOLoad;
import org.apache.pig.newplan.logical.relational.LOSort;
import org.apache.pig.newplan.logical.relational.LOSplit;
import org.apache.pig.newplan.logical.relational.LOSplitOutput;
import org.apache.pig.newplan.logical.relational.LOStore;
import org.apache.pig.newplan.logical.relational.LOUnion;
import org.apache.pig.newplan.logical.relational.LogicalPlan;
import org.apache.pig.newplan.logical.relational.LogicalRelationalNodesVisitor;
import org.apache.pig.newplan.logical.relational.LogicalRelationalOperator;
import org.apache.pig.newplan.logical.relational.LogicalSchema;
public class ColumnPruneVisitor extends LogicalRelationalNodesVisitor {
protected static final Log log = LogFactory.getLog(ColumnPruneVisitor.class);
private Map<LOLoad,Pair<Map<Integer,Set<String>>,Set<Integer>>> requiredItems =
new HashMap<LOLoad,Pair<Map<Integer,Set<String>>,Set<Integer>>>();
private boolean columnPrune;
public ColumnPruneVisitor(OperatorPlan plan, Map<LOLoad,Pair<Map<Integer,Set<String>>,Set<Integer>>> requiredItems,
boolean columnPrune) throws FrontendException {
super(plan, new ReverseDependencyOrderWalker(plan));
this.columnPrune = columnPrune;
this.requiredItems = requiredItems;
}
public void addRequiredItems(LOLoad load, Pair<Map<Integer,Set<String>>,Set<Integer>> requiredItem) {
requiredItems.put(load, requiredItem);
}
@Override
public void visit(LOLoad load) throws FrontendException {
if(! requiredItems.containsKey( load ) ) {
return;
}
Pair<Map<Integer,Set<String>>,Set<Integer>> required =
requiredItems.get(load);
RequiredFieldList requiredFields = new RequiredFieldList();
LogicalSchema s = load.getSchema();
for (int i=0;i<s.size();i++) {
RequiredField requiredField = null;
// As we have done processing ahead, we assume that
// a column is not present in both ColumnPruner and
// MapPruner
if( required.first != null && required.first.containsKey(i) ) {
requiredField = new RequiredField();
requiredField.setIndex(i);
requiredField.setAlias(s.getField(i).alias);
requiredField.setType(s.getField(i).type);
List<RequiredField> subFields = new ArrayList<RequiredField>();
for( String key : required.first.get(i) ) {
RequiredField subField = new RequiredField(key,-1,null,DataType.BYTEARRAY);
subFields.add(subField);
}
requiredField.setSubFields(subFields);
requiredFields.add(requiredField);
}
if( required.second != null && required.second.contains(i) ) {
requiredField = new RequiredField();
requiredField.setIndex(i);
requiredField.setAlias(s.getField(i).alias);
requiredField.setType(s.getField(i).type);
requiredFields.add(requiredField);
}
}
boolean[] columnRequired = new boolean[s.size()];
for (RequiredField rf : requiredFields.getFields())
columnRequired[rf.getIndex()] = true;
List<Pair<Integer, Integer>> pruneList = new ArrayList<Pair<Integer, Integer>>();
for (int i=0;i<columnRequired.length;i++)
{
if (!columnRequired[i])
pruneList.add(new Pair<Integer, Integer>(0, i));
}
StringBuffer message = new StringBuffer();
if (pruneList.size()!=0)
{
message.append("Columns pruned for " + load.getAlias() + ": ");
for (int i=0;i<pruneList.size();i++)
{
message.append("$"+pruneList.get(i).second);
if (i!=pruneList.size()-1)
message.append(", ");
}
log.info(message);
}
message = new StringBuffer();
for(RequiredField rf: requiredFields.getFields()) {
List<RequiredField> sub = rf.getSubFields();
if (sub != null) {
message.append("Map key required for " + load.getAlias() + ": $" + rf.getIndex() + "->" + sub + "\n");
}
}
if (message.length()!=0)
log.info(message);
LoadPushDown.RequiredFieldResponse response = null;
try {
LoadFunc loadFunc = load.getLoadFunc();
if (loadFunc instanceof LoadPushDown) {
response = ((LoadPushDown)loadFunc).pushProjection(requiredFields);
}
} catch (FrontendException e) {
log.warn("pushProjection on "+load+" throw an exception, skip it");
}
// Loader does not support column pruning, insert foreach
if (columnPrune) {
if (response==null || !response.getRequiredFieldResponse()) {
LogicalPlan p = (LogicalPlan)load.getPlan();
Operator next = p.getSuccessors(load).get(0);
// if there is already a LOForEach after load, we don't need to
// add another LOForEach
if (next instanceof LOForEach) {
return;
}
LOForEach foreach = new LOForEach(load.getPlan());
// add foreach to the base plan
p.add(foreach);
p.insertBetween(load, foreach, next);
LogicalPlan innerPlan = new LogicalPlan();
foreach.setInnerPlan(innerPlan);
// build foreach inner plan
List<LogicalExpressionPlan> exps = new ArrayList<LogicalExpressionPlan>();
LOGenerate gen = new LOGenerate(innerPlan, exps, new boolean[requiredFields.getFields().size()]);
innerPlan.add(gen);
for (int i=0; i<requiredFields.getFields().size(); i++) {
LoadPushDown.RequiredField rf = requiredFields.getFields().get(i);
LOInnerLoad innerLoad = new LOInnerLoad(innerPlan, foreach, rf.getIndex());
innerPlan.add(innerLoad);
innerPlan.connect(innerLoad, gen);
LogicalExpressionPlan exp = new LogicalExpressionPlan();
ProjectExpression prj = new ProjectExpression(exp, i, -1, gen);
exp.add(prj);
exps.add(exp);
}
} else {
// columns are pruned, reset schema for LOLoader
List<Integer> requiredIndexes = new ArrayList<Integer>();
List<LoadPushDown.RequiredField> fieldList = requiredFields.getFields();
for (int i=0; i<fieldList.size(); i++) {
requiredIndexes.add(fieldList.get(i).getIndex());
}
load.setRequiredFields(requiredIndexes);
LogicalSchema newSchema = new LogicalSchema();
for (int i=0; i<fieldList.size(); i++) {
newSchema.addField(s.getField(fieldList.get(i).getIndex()));
}
load.setSchema(newSchema);
}
}
}
@Override
public void visit(LOFilter filter) throws FrontendException {
}
@Override
public void visit(LOSplitOutput splitOutput) throws FrontendException {
}
@SuppressWarnings("unchecked")
@Override
public void visit(LOSplit split) throws FrontendException {
List<Operator> branchOutputs = split.getPlan().getSuccessors(split);
for (int i=0;i<branchOutputs.size();i++) {
Operator branchOutput = branchOutputs.get(i);
Set<Long> branchOutputUids = (Set<Long>)branchOutput.getAnnotation(ColumnPruneHelper.INPUTUIDS);
if (branchOutputUids!=null) {
Set<Integer> columnsToDrop = new HashSet<Integer>();
for (int j=0;j<split.getSchema().size();j++) {
if (!branchOutputUids.contains(split.getSchema().getField(j).uid))
columnsToDrop.add(j);
}
if (!columnsToDrop.isEmpty()) {
LOForEach foreach = Util.addForEachAfter((LogicalPlan)split.getPlan(), split, i, columnsToDrop);
foreach.getSchema();
}
}
}
}
@Override
public void visit(LOSort sort) throws FrontendException {
}
@Override
public void visit(LOStore store) throws FrontendException {
}
@Override
public void visit( LOCogroup cg ) throws FrontendException {
addForEachIfNecessary(cg);
}
@Override
public void visit(LOJoin join) throws FrontendException {
}
@Override
public void visit(LOCross cross) throws FrontendException {
}
@Override
@SuppressWarnings("unchecked")
public void visit(LOForEach foreach) throws FrontendException {
if (!columnPrune) {
return;
}
// get column numbers from input uids
Set<Long> inputUids = (Set<Long>)foreach.getAnnotation(ColumnPruneHelper.INPUTUIDS);
// Get all top level projects
LogicalPlan innerPlan = foreach.getInnerPlan();
List<LOInnerLoad> innerLoads= new ArrayList<LOInnerLoad>();
List<Operator> sources = innerPlan.getSources();
for (Operator s : sources) {
if (s instanceof LOInnerLoad)
innerLoads.add((LOInnerLoad)s);
}
// If project of the innerLoad is not in INPUTUIDS, remove this innerLoad
Set<LOInnerLoad> innerLoadsToRemove = new HashSet<LOInnerLoad>();
for (LOInnerLoad innerLoad: innerLoads) {
ProjectExpression project = innerLoad.getProjection();
if (project.isProjectStar()) {
LogicalSchema.LogicalFieldSchema tupleFS = project.getFieldSchema();
// Check the first component of the star projection
long uid = tupleFS.schema.getField(0).uid;
if (!inputUids.contains(uid))
innerLoadsToRemove.add(innerLoad);
}
else {
if (!inputUids.contains(project.getFieldSchema().uid))
innerLoadsToRemove.add(innerLoad);
}
}
// Find the logical operator immediate precede LOGenerate which should be removed (the whole branch)
Set<LogicalRelationalOperator> branchHeadToRemove = new HashSet<LogicalRelationalOperator>();
for (LOInnerLoad innerLoad : innerLoadsToRemove) {
Operator op = innerLoad;
while (!(innerPlan.getSuccessors(op).get(0) instanceof LOGenerate)) {
op = innerPlan.getSuccessors(op).get(0);
}
branchHeadToRemove.add((LogicalRelationalOperator)op);
}
// Find the expression plan to remove
LOGenerate gen = (LOGenerate)innerPlan.getSinks().get(0);
List<LogicalExpressionPlan> genPlansToRemove = new ArrayList<LogicalExpressionPlan>();
List<LogicalExpressionPlan> genPlans = gen.getOutputPlans();
for (int i=0;i<genPlans.size();i++) {
LogicalExpressionPlan expPlan = genPlans.get(i);
List<Operator> expSources = expPlan.getSinks();
for (Operator expSrc : expSources) {
if (expSrc instanceof ProjectExpression) {
LogicalRelationalOperator reference = ((ProjectExpression)expSrc).findReferent();
if (branchHeadToRemove.contains(reference)) {
genPlansToRemove.add(expPlan);
}
}
}
}
// Build the temporary structure based on genPlansToRemove, which include:
// * flattenList
// * outputPlanSchemas
// * uidOnlySchemas
// * inputsRemoved
// We first construct inputsNeeded, and inputsRemoved = (all inputs) - inputsNeeded.
// We cannot figure out inputsRemoved directly since the inputs may be used by other output plan.
// We can only get inputsRemoved after visiting all output plans.
List<Boolean> flattenList = new ArrayList<Boolean>();
Set<Integer> inputsNeeded = new HashSet<Integer>();
Set<Integer> inputsRemoved = new HashSet<Integer>();
List<LogicalSchema> outputPlanSchemas = new ArrayList<LogicalSchema>();
List<LogicalSchema> uidOnlySchemas = new ArrayList<LogicalSchema>();
List<LogicalSchema> userDefinedSchemas = null;
if (gen.getUserDefinedSchema()!=null)
userDefinedSchemas = new ArrayList<LogicalSchema>();
for (int i=0;i<genPlans.size();i++) {
LogicalExpressionPlan genPlan = genPlans.get(i);
if (!genPlansToRemove.contains(genPlan)) {
flattenList.add(gen.getFlattenFlags()[i]);
outputPlanSchemas.add(gen.getOutputPlanSchemas().get(i));
uidOnlySchemas.add(gen.getUidOnlySchemas().get(i));
if (gen.getUserDefinedSchema()!=null) {
userDefinedSchemas.add(gen.getUserDefinedSchema().get(i));
}
List<Operator> sinks = genPlan.getSinks();
for(Operator s: sinks) {
if (s instanceof ProjectExpression) {
inputsNeeded.add(((ProjectExpression)s).getInputNum());
}
}
}
}
List<Operator> preds = innerPlan.getPredecessors(gen);
if (preds!=null) { // otherwise, all gen plan are based on constant, no need to adjust
for (int i=0;i<preds.size();i++) {
if (!inputsNeeded.contains(i))
inputsRemoved.add(i);
}
}
// Change LOGenerate: remove unneeded output expression plan
// change flatten flag, outputPlanSchema, uidOnlySchemas
boolean[] flatten = new boolean[flattenList.size()];
for (int i=0;i<flattenList.size();i++)
flatten[i] = flattenList.get(i);
gen.setFlattenFlags(flatten);
gen.setOutputPlanSchemas(outputPlanSchemas);
gen.setUidOnlySchemas(uidOnlySchemas);
gen.setUserDefinedSchema(userDefinedSchemas);
for (LogicalExpressionPlan genPlanToRemove : genPlansToRemove) {
genPlans.remove(genPlanToRemove);
}
// shift project input
if (!inputsRemoved.isEmpty()) {
for (LogicalExpressionPlan genPlan : genPlans) {
List<Operator> sinks = genPlan.getSinks();
for(Operator s: sinks) {
if (s instanceof ProjectExpression) {
int input = ((ProjectExpression)s).getInputNum();
int numToShift = 0;
for (int i :inputsRemoved) {
if (i<input)
numToShift++;
}
((ProjectExpression)s).setInputNum(input-numToShift);
}
}
}
}
// Prune unneeded LOInnerLoad
List<LogicalRelationalOperator> predToRemove = new ArrayList<LogicalRelationalOperator>();
for (int i : inputsRemoved) {
predToRemove.add((LogicalRelationalOperator)preds.get(i));
}
for (LogicalRelationalOperator pred : predToRemove) {
removeSubTree(pred);
}
}
@Override
public void visit(LOUnion union) throws FrontendException {
// AddForEach before union if necessary.
List<Operator> preds = new ArrayList<Operator>();
preds.addAll(plan.getPredecessors(union));
for (Operator pred : preds) {
addForEachIfNecessary((LogicalRelationalOperator)pred);
}
}
// remove all the operators starting from an operator
private void removeSubTree(LogicalRelationalOperator op) throws FrontendException {
LogicalPlan p = (LogicalPlan)op.getPlan();
List<Operator> ll = p.getPredecessors(op);
if (ll != null) {
for(Operator pred: ll) {
removeSubTree((LogicalRelationalOperator)pred);
}
}
if (p.getSuccessors(op) != null) {
Operator[] succs = p.getSuccessors(op).toArray(new Operator[0]);
for(Operator s: succs) {
p.disconnect(op, s);
}
}
p.remove(op);
}
// Add ForEach after op to prune unnecessary columns
@SuppressWarnings("unchecked")
private void addForEachIfNecessary(LogicalRelationalOperator op) throws FrontendException {
Set<Long> outputUids = (Set<Long>)op.getAnnotation(ColumnPruneHelper.OUTPUTUIDS);
if (outputUids!=null) {
LogicalSchema schema = op.getSchema();
Set<Integer> columnsToDrop = new HashSet<Integer>();
for (int i=0;i<schema.size();i++) {
if (!outputUids.contains(schema.getField(i).uid))
columnsToDrop.add(i);
}
if (!columnsToDrop.isEmpty()) {
LOForEach foreach = Util.addForEachAfter((LogicalPlan)op.getPlan(), op, 0, columnsToDrop);
foreach.getSchema();
}
}
}
}
| dmeister/pig-cll-gz | src/org/apache/pig/newplan/logical/rules/ColumnPruneVisitor.java | Java | apache-2.0 | 20,333 |
using System;
using NServiceBus;
public class ShipOrder:IMessage
{
public Guid OrderId { get; set; }
} | WojcikMike/docs.particular.net | samples/ravendb/simple/Version_6/Server/ShipOrder.cs | C# | apache-2.0 | 110 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.milo.server;
import java.io.IOException;
import org.apache.camel.EndpointInject;
import org.apache.camel.RoutesBuilder;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.milo.Ports;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.eclipse.milo.opcua.stack.core.types.builtin.DataValue;
import org.eclipse.milo.opcua.stack.core.types.builtin.Variant;
import org.junit.Before;
import org.junit.Test;
/**
* Unit tests for milo server component without using an actual connection
*/
public class ServerLocalTest extends CamelTestSupport {
private static final String MILO_ITEM_1 = "milo-server:myitem1";
private static final String MOCK_TEST = "mock:test";
@EndpointInject(MOCK_TEST)
protected MockEndpoint testEndpoint;
@Before
public void pickFreePort() throws IOException {
final MiloServerComponent component = context().getComponent("milo-server", MiloServerComponent.class);
component.setBindPort(Ports.pickServerPort());
}
@Override
protected RoutesBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from(MILO_ITEM_1).to(MOCK_TEST);
}
};
}
@Test
public void shouldStartComponent() {
}
@Test
public void testAcceptVariantString() {
sendBody(MILO_ITEM_1, new Variant("Foo"));
}
@Test
public void testAcceptVariantDouble() {
sendBody(MILO_ITEM_1, new Variant(0.0));
}
@Test
public void testAcceptString() {
sendBody(MILO_ITEM_1, "Foo");
}
@Test
public void testAcceptDouble() {
sendBody(MILO_ITEM_1, 0.0);
}
@Test
public void testAcceptDataValueString() {
sendBody(MILO_ITEM_1, new DataValue(new Variant("Foo")));
}
@Test
public void testAcceptDataValueDouble() {
sendBody(MILO_ITEM_1, new DataValue(new Variant(0.0)));
}
}
| Fabryprog/camel | components/camel-milo/src/test/java/org/apache/camel/component/milo/server/ServerLocalTest.java | Java | apache-2.0 | 2,912 |
/*
* Copyright 2015-2016 USEF Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package energy.usef.cro.dto;
/**
* A data transfer object for relevant Participant data.
*/
public class ParticipantAction {
private String method;
private String domain;
public ParticipantAction() {
// Required for json deserialisation
}
public ParticipantAction(String method, String domain) {
this.method = method;
this.domain = domain;
}
public String getMethod() {
return method;
}
public void setMethod(String method) {
this.method = method;
}
public String getDomain() {
return domain;
}
public void setDomain(String domain) {
this.domain = domain;
}
}
| USEF-Foundation/ri.usef.energy | usef-build/usef-workflow/usef-cro/src/main/java/energy/usef/cro/dto/ParticipantAction.java | Java | apache-2.0 | 1,281 |
import { FunctionNode } from './FunctionNode.js';
function ExpressionNode( src, type, keywords, extensions, includes ) {
FunctionNode.call( this, src, includes, extensions, keywords, type );
}
ExpressionNode.prototype = Object.create( FunctionNode.prototype );
ExpressionNode.prototype.constructor = ExpressionNode;
ExpressionNode.prototype.nodeType = "Expression";
export { ExpressionNode };
| sinorise/sinorise.github.io | three/jsm/nodes/core/ExpressionNode.js | JavaScript | apache-2.0 | 399 |
package examples.stateless;
import java.util.Collection;
import javax.ejb.Stateless;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
import examples.model.Project;
@Stateless
public class ProjectServiceBean implements ProjectService {
@PersistenceContext(unitName="EmployeeService")
protected EntityManager em;
public Project createProject(String name) {
Project proj = new Project();
proj.setName(name);
em.persist(proj);
return proj;
}
public Collection<Project> findAllProjects() {
Query query = em.createQuery("SELECT p FROM Project p");
return (Collection<Project>) query.getResultList();
}
}
| velmuruganvelayutham/jpa | examples/Chapter4/22-manyToManyJoinTable/src/model/examples/stateless/ProjectServiceBean.java | Java | apache-2.0 | 754 |
package org.insightech.er.db.impl.postgres;
import org.insightech.er.db.sqltype.SqlType;
import org.insightech.er.db.sqltype.SqlTypeManagerBase;
public class PostgresSqlTypeManager extends SqlTypeManagerBase {
@Override
public int getByteLength(final SqlType type, final Integer length, final Integer decimal) {
return 0;
}
}
| roundrop/ermaster-fast | src/org/insightech/er/db/impl/postgres/PostgresSqlTypeManager.java | Java | apache-2.0 | 363 |
# n peg hanoi tower problem, use bfs instead of dfs, and don't have a full
# analytical solution
import sys
import copy
def solutionWorks(currentSolution, stacksAfterSolution, initialStacks, finalStacks):
for x in range(len(currentSolution)):
i, j = currentSolution[x]
stacksAfterSolution[j].append(stacksAfterSolution[i].pop())
if str(stacksAfterSolution) == str(finalStacks):
return True
else:
return False
def stepLegitimate(stacksAfterSolution, i, j):
if len(stacksAfterSolution[i]) == 0 or \
(len(stacksAfterSolution[j]) > 0 and stacksAfterSolution[i][-1] > stacksAfterSolution[j][-1]):
return False
return True
# DFS cannot work, need to use BFS
def moveDiscs(initialStacks, finalStacks, results):
import collections
solutions = collections.deque()
solutions.append([])
K = len(initialStacks) - 1
while len(solutions) > 0:
currentSolution = copy.deepcopy(solutions.popleft())
if len(currentSolution) > 7:
continue
stacksAfterSolution = copy.deepcopy(initialStacks)
if solutionWorks(currentSolution, stacksAfterSolution, initialStacks, finalStacks):
for x in range(len(currentSolution)):
results.append(list(currentSolution[x]))
return
# add other solutions in queue
for i in range(1, K + 1):
for j in range(1, K + 1):
if j != i and stepLegitimate(stacksAfterSolution, i, j):
currentSolution.append([i, j])
solutions.append(copy.deepcopy(currentSolution))
currentSolution.pop()
if __name__ == '__main__':
# N, K = [int(x) for x in sys.stdin.readline().split()]
N, K = 6, 4
initialStacks = [[] for x in range(K + 1)]
finalStacks = [[] for x in range(K + 1)]
# initial = [int(x) for x in sys.stdin.readline().split()]
# final = [int(x) for x in sys.stdin.readline().split()]
initial = [4, 2, 4, 3, 1, 1]
final = [1, 1, 1, 1, 1, 1]
for i in range(N - 1, -1, -1):
initialStacks[initial[i]].append(i + 1)
for i in range(N - 1, -1, -1):
finalStacks[final[i]].append(i + 1)
print(initialStacks)
print(finalStacks)
results = []
moveDiscs(initialStacks, finalStacks, results)
print(len(results))
for i in range(len(results)):
print(results[i][0], results[i][1])
| baiyubin/python_practice | pegs.py | Python | apache-2.0 | 2,445 |
/**
* @file
* Provides JavaScript additions to the managed file field type.
*
* This file provides progress bar support (if available), popup windows for
* file previews, and disabling of other file fields during Ajax uploads (which
* prevents separate file fields from accidentally uploading files).
*/
(function ($) {
"use strict";
/**
* Attach behaviors to managed file element upload fields.
*/
Drupal.behaviors.fileValidateAutoAttach = {
attach: function (context, settings) {
var $context = $(context);
var validateExtension = Drupal.file.validateExtension;
var selector, elements;
if (settings.file && settings.file.elements) {
elements = settings.file.elements;
for (selector in elements) {
if (elements.hasOwnProperty(selector)) {
$context.find(selector).bind('change', {extensions: elements[selector]}, validateExtension);
}
}
}
},
detach: function (context, settings) {
var $context = $(context);
var validateExtension = Drupal.file.validateExtension;
var selector, elements;
if (settings.file && settings.file.elements) {
elements = settings.file.elements;
for (selector in elements) {
if (elements.hasOwnProperty(selector)) {
$context.find(selector).unbind('change', validateExtension);
}
}
}
}
};
/**
* Attach behaviors to managed file element upload fields.
*/
Drupal.behaviors.fileAutoUpload = {
attach: function (context) {
$(context).find('input[type="file"]').once('auto-file-upload').on('change.autoFileUpload', Drupal.file.triggerUploadButton);
},
detach: function (context, setting, trigger) {
if (trigger === 'unload') {
$(context).find('input[type="file"]').removeOnce('auto-file-upload').off('.autoFileUpload');
}
}
};
/**
* Attach behaviors to the file upload and remove buttons.
*/
Drupal.behaviors.fileButtons = {
attach: function (context) {
var $context = $(context);
$context.find('input.form-submit').bind('mousedown', Drupal.file.disableFields);
$context.find('div.form-managed-file input.form-submit').bind('mousedown', Drupal.file.progressBar);
},
detach: function (context) {
var $context = $(context);
$context.find('input.form-submit').unbind('mousedown', Drupal.file.disableFields);
$context.find('div.form-managed-file input.form-submit').unbind('mousedown', Drupal.file.progressBar);
}
};
/**
* Attach behaviors to links within managed file elements.
*/
Drupal.behaviors.filePreviewLinks = {
attach: function (context) {
$(context).find('div.form-managed-file .file a, .file-widget .file a').bind('click',Drupal.file.openInNewWindow);
},
detach: function (context){
$(context).find('div.form-managed-file .file a, .file-widget .file a').unbind('click', Drupal.file.openInNewWindow);
}
};
/**
* File upload utility functions.
*/
Drupal.file = Drupal.file || {
/**
* Client-side file input validation of file extensions.
*/
validateExtension: function (event) {
event.preventDefault();
// Remove any previous errors.
$('.file-upload-js-error').remove();
// Add client side validation for the input[type=file].
var extensionPattern = event.data.extensions.replace(/,\s*/g, '|');
if (extensionPattern.length > 1 && this.value.length > 0) {
var acceptableMatch = new RegExp('\\.(' + extensionPattern + ')$', 'gi');
if (!acceptableMatch.test(this.value)) {
var error = Drupal.t("The selected file %filename cannot be uploaded. Only files with the following extensions are allowed: %extensions.", {
// According to the specifications of HTML5, a file upload control
// should not reveal the real local path to the file that a user
// has selected. Some web browsers implement this restriction by
// replacing the local path with "C:\fakepath\", which can cause
// confusion by leaving the user thinking perhaps Drupal could not
// find the file because it messed up the file path. To avoid this
// confusion, therefore, we strip out the bogus fakepath string.
'%filename': this.value.replace('C:\\fakepath\\', ''),
'%extensions': extensionPattern.replace(/\|/g, ', ')
});
$(this).closest('div.form-managed-file').prepend('<div class="messages messages--error file-upload-js-error" aria-live="polite">' + error + '</div>');
this.value = '';
}
}
},
/**
* Trigger the upload_button mouse event to auto-upload as a managed file.
*/
triggerUploadButton: function (event){
$(event.target).closest('.form-managed-file').find('.form-submit').trigger('mousedown');
},
/**
* Prevent file uploads when using buttons not intended to upload.
*/
disableFields: function (event){
var clickedButton = this;
// Only disable upload fields for Ajax buttons.
if (!$(clickedButton).hasClass('ajax-processed')) {
return;
}
// Check if we're working with an "Upload" button.
var $enabledFields = [];
if ($(this).closest('div.form-managed-file').length > 0) {
$enabledFields = $(this).closest('div.form-managed-file').find('input.form-file');
}
// Temporarily disable upload fields other than the one we're currently
// working with. Filter out fields that are already disabled so that they
// do not get enabled when we re-enable these fields at the end of behavior
// processing. Re-enable in a setTimeout set to a relatively short amount
// of time (1 second). All the other mousedown handlers (like Drupal's Ajax
// behaviors) are excuted before any timeout functions are called, so we
// don't have to worry about the fields being re-enabled too soon.
// @todo If the previous sentence is true, why not set the timeout to 0?
var $fieldsToTemporarilyDisable = $('div.form-managed-file input.form-file').not($enabledFields).not(':disabled');
$fieldsToTemporarilyDisable.prop('disabled', true);
setTimeout(function (){
$fieldsToTemporarilyDisable.prop('disabled', false);
}, 1000);
},
/**
* Add progress bar support if possible.
*/
progressBar: function (event) {
var clickedButton = this;
var $progressId = $(clickedButton).closest('div.form-managed-file').find('input.file-progress');
if ($progressId.length) {
var originalName = $progressId.attr('name');
// Replace the name with the required identifier.
$progressId.attr('name', originalName.match(/APC_UPLOAD_PROGRESS|UPLOAD_IDENTIFIER/)[0]);
// Restore the original name after the upload begins.
setTimeout(function () {
$progressId.attr('name', originalName);
}, 1000);
}
// Show the progress bar if the upload takes longer than half a second.
setTimeout(function () {
$(clickedButton).closest('div.form-managed-file').find('div.ajax-progress-bar').slideDown();
}, 500);
},
/**
* Open links to files within forms in a new window.
*/
openInNewWindow: function (event) {
event.preventDefault();
$(this).attr('target', '_blank');
window.open(this.href, 'filePreview', 'toolbar=0,scrollbars=1,location=1,statusbar=1,menubar=0,resizable=1,width=500,height=550');
}
};
})(jQuery);
| nickopris/musicapp | www/core/modules/file/file.js | JavaScript | apache-2.0 | 7,305 |
/**
* Bean Validation TCK
*
* License: Apache License, Version 2.0
* See the license.txt file in the root directory or <http://www.apache.org/licenses/LICENSE-2.0>.
*/
package org.hibernate.beanvalidation.tck.tests.constraints.application.method;
import static java.lang.annotation.ElementType.ANNOTATION_TYPE;
import static java.lang.annotation.ElementType.CONSTRUCTOR;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import javax.validation.Constraint;
import javax.validation.Payload;
/**
* @author Gunnar Morling
*/
@Constraint(validatedBy = OnlineCalendarServiceValidator.class)
@Target({ METHOD, CONSTRUCTOR, ANNOTATION_TYPE })
@Retention(RUNTIME)
@Documented
public @interface OnlineCalendarService {
String message() default "{validation.onlineCalendarService}";
Class<?>[] groups() default { };
Class<? extends Payload>[] payload() default { };
}
| gunnarmorling/beanvalidation-tck | tests/src/main/java/org/hibernate/beanvalidation/tck/tests/constraints/application/method/OnlineCalendarService.java | Java | apache-2.0 | 1,055 |
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using QuantConnect.Securities;
using System;
using System.Collections.Generic;
namespace QuantConnect.Brokerages
{
/// <summary>
/// Provides Binance.US specific properties
/// </summary>
public class BinanceUSBrokerageModel : BinanceBrokerageModel
{
/// <summary>
/// Market name
/// </summary>
protected override string MarketName => Market.BinanceUS;
/// <summary>
/// Gets a map of the default markets to be used for each security type
/// </summary>
public override IReadOnlyDictionary<SecurityType, string> DefaultMarkets { get; } = GetDefaultMarkets(Market.BinanceUS);
/// <summary>
/// Initializes a new instance of the <see cref="BinanceBrokerageModel"/> class
/// </summary>
/// <param name="accountType">The type of account to be modeled, defaults to <see cref="AccountType.Cash"/></param>
public BinanceUSBrokerageModel(AccountType accountType = AccountType.Cash) : base(accountType)
{
if (accountType == AccountType.Margin)
{
throw new ArgumentException("The Binance.US brokerage does not currently support Margin trading.");
}
}
/// <summary>
/// Binance global leverage rule
/// </summary>
/// <param name="security"></param>
/// <returns></returns>
public override decimal GetLeverage(Security security)
{
// margin trading is not currently supported by Binance.US
return 1m;
}
}
}
| QuantConnect/Lean | Common/Brokerages/BinanceUSBrokerageModel.cs | C# | apache-2.0 | 2,292 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.karaf.features.internal.model.processing;
import javax.xml.bind.annotation.XmlRegistry;
@XmlRegistry
public class ObjectFactory {
public FeaturesProcessing createFeaturesProcessing() {
return new FeaturesProcessing();
}
}
| grgrzybek/karaf | features/core/src/main/java/org/apache/karaf/features/internal/model/processing/ObjectFactory.java | Java | apache-2.0 | 1,069 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.milton.http.fs;
import io.milton.http.LockManager;
import io.milton.http.LockInfo;
import io.milton.http.LockResult;
import io.milton.http.LockTimeout;
import io.milton.http.LockToken;
import io.milton.resource.LockableResource;
import io.milton.http.exceptions.NotAuthorizedException;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Keys on getUniqueID of the locked resource.
*
*/
public class SimpleLockManager implements LockManager {
private static final Logger log = LoggerFactory.getLogger( SimpleLockManager.class );
/**
* maps current locks by the file associated with the resource
*/
Map<String, CurrentLock> locksByUniqueId;
Map<String, CurrentLock> locksByToken;
public SimpleLockManager() {
locksByUniqueId = new HashMap<String, CurrentLock>();
locksByToken = new HashMap<String, CurrentLock>();
}
@Override
public synchronized LockResult lock( LockTimeout timeout, LockInfo lockInfo, LockableResource r ) {
String token = UUID.randomUUID().toString();
return lock(timeout, lockInfo, r, token);
}
private LockResult lock( LockTimeout timeout, LockInfo lockInfo, LockableResource r, String token ) {
LockToken currentLock = currentLock( r );
if( currentLock != null ) {
return LockResult.failed( LockResult.FailureReason.ALREADY_LOCKED );
}
LockToken newToken = new LockToken( token, lockInfo, timeout );
CurrentLock newLock = new CurrentLock( r.getUniqueId(), newToken, lockInfo.lockedByUser );
locksByUniqueId.put( r.getUniqueId(), newLock );
locksByToken.put( newToken.tokenId, newLock );
return LockResult.success( newToken );
}
@Override
public synchronized LockResult refresh( String tokenId, LockableResource resource ) {
CurrentLock curLock = locksByToken.get( tokenId );
if( curLock == null || curLock.token == null ) {
log.warn("attempt to refresh missing token: " + tokenId + " on resource: " + resource.getName() + " will create a new lock");
LockTimeout timeout = new LockTimeout(60*60l);
LockInfo lockInfo = new LockInfo(LockInfo.LockScope.EXCLUSIVE, LockInfo.LockType.WRITE, tokenId, LockInfo.LockDepth.ZERO);
return lock(timeout, lockInfo, resource, tokenId);
}
curLock.token.setFrom( new Date() );
return LockResult.success( curLock.token );
}
@Override
public synchronized void unlock( String tokenId, LockableResource r ) throws NotAuthorizedException {
LockToken lockToken = currentLock( r );
if( lockToken == null ) {
log.debug( "not locked" );
return;
}
if( lockToken.tokenId.equals( tokenId ) ) {
removeLock( lockToken );
} else {
throw new NotAuthorizedException( r );
}
}
private LockToken currentLock( LockableResource resource ) {
CurrentLock curLock = locksByUniqueId.get( resource.getUniqueId() );
if( curLock == null ) {
return null;
}
LockToken token = curLock.token;
if( token.isExpired() ) {
removeLock( token );
return null;
} else {
return token;
}
}
private void removeLock( LockToken token ) {
log.debug( "removeLock: " + token.tokenId );
CurrentLock currentLock = locksByToken.get( token.tokenId );
if( currentLock != null ) {
locksByUniqueId.remove( currentLock.id );
locksByToken.remove( currentLock.token.tokenId );
} else {
log.warn( "couldnt find lock: " + token.tokenId );
}
}
public LockToken getCurrentToken( LockableResource r ) {
CurrentLock lock = locksByUniqueId.get( r.getUniqueId() );
if( lock == null ) return null;
LockToken token = new LockToken();
token.info = new LockInfo( LockInfo.LockScope.EXCLUSIVE, LockInfo.LockType.WRITE, lock.lockedByUser, LockInfo.LockDepth.ZERO );
token.info.lockedByUser = lock.lockedByUser;
token.timeout = lock.token.timeout;
token.tokenId = lock.token.tokenId;
return token;
}
class CurrentLock {
final String id;
final LockToken token;
final String lockedByUser;
public CurrentLock( String id, LockToken token, String lockedByUser ) {
this.id = id;
this.token = token;
this.lockedByUser = lockedByUser;
}
}
}
| skoulouzis/lobcder | milton2/milton-server-ce/src/main/java/io/milton/http/fs/SimpleLockManager.java | Java | apache-2.0 | 5,398 |
var Event = {
addListener: function (obj, event, listener, scope) {
if (obj) {
if (typeof obj.__listeners == "undefined") {
obj.__listeners = new Object();
}
if (typeof obj.__listeners[event] == "undefined") {
obj.__listeners[event] = new Array();
obj.__listeners[event].__listenerCount = 0;
}
if (typeof scope == "undefined")
obj.__listeners[event].push(listener);
else
obj.__listeners[event].push({ "listener": listener, "scope": scope });
obj.__listeners[event].__listenerCount++;
obj["on" + event] = function () {
Event.fire(obj, event, arguments);
};
return obj.__listeners[event].length - 1;
}
},
removeListener: function (obj, event, listener, scope) {
if (obj && obj.__listeners && obj.__listeners[event]) {
for (var i = 0; i < obj.__listeners[event].length; i++) {
if (obj.__listeners[event][i] === listener) {
obj.__listeners[event][i] = null;
delete obj.__listeners[event][i];
obj.__listeners[event].__listenerCount--;
}
else {
var l = obj.__listeners[event][i];
if (l && l.listener === listener && l.scope === scope) {
obj.__listeners[event][i] = null;
delete obj.__listeners[event][i];
obj.__listeners[event].__listenerCount--;
}
}
}
Event.defragListeners(obj, event);
}
},
removeListenerById: function (obj, event, listenerId) {
if (obj && obj.__listeners && obj.__listeners[event] && obj.__listeners[event][listenerId]) {
obj.__listeners[event][listenerId] = null;
delete obj.__listeners[event][listenerId];
obj.__listeners[event].__listenerCount--;
}
Event.defragListeners(obj, event);
},
removeAllListeners: function(obj, event) {
if(obj && obj.__listeners) {
if(typeof event == "undefined") {
obj.__listeners = new Object();
}
else if(typeof obj.__listeners[event] != "undefined") {
obj.__listeners[event] = new Array();
obj.__listeners[event].__listenerCount = 0;
}
}
},
defragListener: function(obj, event) {
// do nothing right now
},
fire: function (obj, event, args) {
if(!args) args = new Array();
for (var i = 0; obj && obj.__listeners && obj.__listeners[event] && i < obj.__listeners[event].length; i++) {
var f = obj.__listeners[event][i];
if (typeof f == "function") {
// TODO: should the scope be the obj, the listener, or should it be passed in?
f.apply(obj, args);
}
else if (f && typeof f.listener == "function") {
f.listener.apply(f.scope, args);
}
}
}
};
| donniet/livegame | src/main/webapp/client/0.2/event.js | JavaScript | apache-2.0 | 3,158 |
package org.nibiru.ui.ios.widget;
import org.nibiru.ui.core.api.Popup;
import org.nibiru.ui.core.api.Viewport;
import org.nibiru.ui.core.impl.BasePopup;
import javax.inject.Inject;
import apple.uikit.UIView;
public class IOSPopup extends BasePopup<Overlay, UIView> implements Popup {
@Inject
public IOSPopup(Viewport viewport) {
super(Overlay.create(), viewport);
}
@Override
public void show() {
control.show();
requestLayout();
}
@Override
public void hide() {
control.hide();
}
@Override
public void setAutoHide(boolean autoHide) {
control.setAutoHide(autoHide);
}
@Override
public void requestLayout() {
super.requestLayout();
control.centerContent();
}
@Override
protected void setNativeContent(UIView nativeContent) {
control.setContent(nativeContent);
control.centerContent();
}
@Override
protected void setNativeSize(int width, int height) {
if (getContent() != null) {
WidgetUtils.setNativeSize((UIView) getContent().asNative(), width, height);
}
}
}
| NibiruOS/ui | org.nibiru.ui.ios/src/main/java/org/nibiru/ui/ios/widget/IOSPopup.java | Java | apache-2.0 | 1,155 |
var
utils = require('./utils'),
Signals = require('./Signals');
/**
* Support for the W3C Page Visibility API - http://www.w3.org/TR/page-visibility
*
* {@link module:enyo/pageVisibility.hidden} and {@link module:enyo/pageVisibility.visibilityState}
* contain the same information as `document.hidden` and
* `document.visibilityState` in supported browsers. The `visibilitychange`
* event is channelled through the [Signals]{@link module:enyo/Signals~Signals} mechanism.
*
* Partly based on {@linkplain http://stackoverflow.com/a/1060034}.
*
* Example:
*
* ```javascript
* var
* kind = require('enyo/kind'),
* Signals = require('enyo/Signals');
*
* module.exports = kind({
* name: 'App',
* components: [
* {kind: Signals, onvisibilitychange: 'visibilitychanged'}
* ],
* visibilitychanged: function() {
* if(enyo.hidden){
* // page hidden
* } else {
* // page visible
* }
* }
* });
* ```
*
* @module enyo/pageVisibility
* @private
*/
var
doc = global.document,
hidden = 'hidden',
visibilityState = 'visibilityState',
hiddenMap = {};
var pageVisibility = module.exports = {
// set inital values for enyo.hidden and enyo.visibilityState it's probably save to assume
// that the current document is visible when loading the page
/**
* `true` if the document is hidden; otherwise, `false`.
*
* @readonly
* @type {Boolean}
* @default false
* @public
*/
hidden: typeof doc[hidden] !== 'undefined' ? doc[hidden] : false,
/**
* String indicating the document's visibility state.
*
* @readonly
* @type {String}
* @default 'visible'
* @public
*/
visibilityState: typeof doc[visibilityState] !== 'undefined' ? doc[visibilityState] : 'visible'
};
// map compatibility events to document.hidden state
hiddenMap.blur = hiddenMap.focusout = hiddenMap.pagehide = true;
hiddenMap.focus = hiddenMap.focusin = hiddenMap.pageshow = false;
function onchange (event) {
event = event || global.event;
pageVisibility.hidden = (event.type in hiddenMap) ? hiddenMap[event.type] : doc[hidden];
pageVisibility.visibilityState = (event.type in hiddenMap) ? (hiddenMap[event.type] ? 'hidden' : 'visible' ) : doc[visibilityState];
Signals.send('onvisibilitychange', utils.mixin(event, {hidden: pageVisibility.hidden}));
}
// Standards:
if (hidden in doc) {
doc.addEventListener('visibilitychange', onchange);
} else if ((hidden = 'mozHidden') in doc) {
doc.addEventListener('mozvisibilitychange', onchange);
visibilityState = 'mozVisibilityState';
} else if ((hidden = 'webkitHidden') in doc) {
doc.addEventListener('webkitvisibilitychange', onchange);
visibilityState = 'webkitVisibilityState';
} else if ((hidden = 'msHidden') in doc) {
doc.addEventListener('msvisibilitychange', onchange);
visibilityState = 'msVisibilityState';
} else if ('onfocusin' in doc) { // IE 9 and lower:
doc.onfocusin = doc.onfocusout = onchange;
} else { // All others:
global.onpageshow = global.onpagehide = global.onfocus = global.onblur = onchange;
}
| PKRoma/enyo | src/pageVisibility.js | JavaScript | apache-2.0 | 2,971 |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.diff.tools.simple;
import com.intellij.diff.DiffContext;
import com.intellij.diff.actions.BufferedLineIterator;
import com.intellij.diff.actions.NavigationContextChecker;
import com.intellij.diff.comparison.DiffTooBigException;
import com.intellij.diff.fragments.LineFragment;
import com.intellij.diff.requests.ContentDiffRequest;
import com.intellij.diff.requests.DiffRequest;
import com.intellij.diff.tools.util.*;
import com.intellij.diff.tools.util.base.HighlightPolicy;
import com.intellij.diff.tools.util.base.TextDiffViewerUtil;
import com.intellij.diff.tools.util.side.TwosideTextDiffViewer;
import com.intellij.diff.util.*;
import com.intellij.diff.util.DiffUserDataKeysEx.ScrollToPolicy;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.Separator;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.diff.DiffNavigationContext;
import com.intellij.openapi.editor.Caret;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.UserDataHolder;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.Function;
import org.jetbrains.annotations.*;
import javax.swing.*;
import java.awt.*;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Iterator;
import java.util.List;
import static com.intellij.diff.util.DiffUtil.getLineCount;
public class SimpleDiffViewer extends TwosideTextDiffViewer {
public static final Logger LOG = Logger.getInstance(SimpleDiffViewer.class);
@NotNull private final SyncScrollSupport.SyncScrollable mySyncScrollable;
@NotNull private final PrevNextDifferenceIterable myPrevNextDifferenceIterable;
@NotNull private final StatusPanel myStatusPanel;
@NotNull private final List<SimpleDiffChange> myDiffChanges = new ArrayList<SimpleDiffChange>();
@NotNull private final List<SimpleDiffChange> myInvalidDiffChanges = new ArrayList<SimpleDiffChange>();
@NotNull private final MyFoldingModel myFoldingModel;
@NotNull private final MyInitialScrollHelper myInitialScrollHelper = new MyInitialScrollHelper();
@NotNull private final ModifierProvider myModifierProvider;
public SimpleDiffViewer(@NotNull DiffContext context, @NotNull DiffRequest request) {
super(context, (ContentDiffRequest)request);
mySyncScrollable = new MySyncScrollable();
myPrevNextDifferenceIterable = new MyPrevNextDifferenceIterable();
myStatusPanel = new MyStatusPanel();
myFoldingModel = new MyFoldingModel(getEditors(), this);
myModifierProvider = new ModifierProvider();
}
@Override
@CalledInAwt
protected void onInit() {
super.onInit();
myContentPanel.setPainter(new MyDividerPainter());
myModifierProvider.init();
}
@Override
@CalledInAwt
protected void onDispose() {
destroyChangedBlocks();
super.onDispose();
}
@NotNull
@Override
protected List<AnAction> createToolbarActions() {
List<AnAction> group = new ArrayList<AnAction>();
group.add(new MyIgnorePolicySettingAction());
group.add(new MyHighlightPolicySettingAction());
group.add(new MyToggleExpandByDefaultAction());
group.add(new MyToggleAutoScrollAction());
group.add(new MyReadOnlyLockAction());
group.add(myEditorSettingsAction);
return group;
}
@Nullable
@Override
protected List<AnAction> createPopupActions() {
List<AnAction> group = new ArrayList<AnAction>();
group.add(Separator.getInstance());
group.add(new MyIgnorePolicySettingAction().getPopupGroup());
group.add(Separator.getInstance());
group.add(new MyHighlightPolicySettingAction().getPopupGroup());
group.add(Separator.getInstance());
group.add(new MyToggleAutoScrollAction());
group.add(new MyToggleExpandByDefaultAction());
return group;
}
@NotNull
@Override
protected List<AnAction> createEditorPopupActions() {
List<AnAction> group = new ArrayList<AnAction>();
group.add(new ReplaceSelectedChangesAction());
group.add(new AppendSelectedChangesAction());
group.add(new RevertSelectedChangesAction());
group.add(Separator.getInstance());
group.addAll(super.createEditorPopupActions());
return group;
}
@Override
@CalledInAwt
protected void processContextHints() {
super.processContextHints();
myInitialScrollHelper.processContext(myRequest);
}
@Override
@CalledInAwt
protected void updateContextHints() {
super.updateContextHints();
myFoldingModel.updateContext(myRequest, getFoldingModelSettings());
myInitialScrollHelper.updateContext(myRequest);
}
//
// Diff
//
@NotNull
public FoldingModelSupport.Settings getFoldingModelSettings() {
return TextDiffViewerUtil.getFoldingModelSettings(myContext);
}
@Override
protected void onSlowRediff() {
super.onSlowRediff();
myStatusPanel.setBusy(true);
myInitialScrollHelper.onSlowRediff();
}
@Override
@NotNull
protected Runnable performRediff(@NotNull final ProgressIndicator indicator) {
try {
indicator.checkCanceled();
final Document document1 = getContent1().getDocument();
final Document document2 = getContent2().getDocument();
CharSequence[] texts = ApplicationManager.getApplication().runReadAction(new Computable<CharSequence[]>() {
@Override
public CharSequence[] compute() {
return new CharSequence[]{document1.getImmutableCharSequence(), document2.getImmutableCharSequence()};
}
});
List<LineFragment> lineFragments = null;
if (getHighlightPolicy().isShouldCompare()) {
lineFragments = DiffUtil.compare(texts[0], texts[1], getDiffConfig(), indicator);
}
boolean isEqualContents = (lineFragments == null || lineFragments.isEmpty()) &&
StringUtil.equals(document1.getCharsSequence(), document2.getCharsSequence());
return apply(new CompareData(lineFragments, isEqualContents));
}
catch (DiffTooBigException e) {
return applyNotification(DiffNotifications.DIFF_TOO_BIG);
}
catch (ProcessCanceledException e) {
throw e;
}
catch (Throwable e) {
LOG.error(e);
return applyNotification(DiffNotifications.ERROR);
}
}
@NotNull
private Runnable apply(@NotNull final CompareData data) {
return new Runnable() {
@Override
public void run() {
myFoldingModel.updateContext(myRequest, getFoldingModelSettings());
clearDiffPresentation();
if (data.isEqualContent()) myPanel.addNotification(DiffNotifications.EQUAL_CONTENTS);
if (data.getFragments() != null) {
for (LineFragment fragment : data.getFragments()) {
myDiffChanges.add(new SimpleDiffChange(SimpleDiffViewer.this, fragment, getHighlightPolicy().isFineFragments()));
}
}
myFoldingModel.install(data.getFragments(), myRequest, getFoldingModelSettings());
myInitialScrollHelper.onRediff();
myContentPanel.repaintDivider();
myStatusPanel.update();
}
};
}
@NotNull
private Runnable applyNotification(@Nullable final JComponent notification) {
return new Runnable() {
@Override
public void run() {
clearDiffPresentation();
if (notification != null) myPanel.addNotification(notification);
}
};
}
private void clearDiffPresentation() {
myStatusPanel.setBusy(false);
myPanel.resetNotifications();
destroyChangedBlocks();
}
@NotNull
private DiffUtil.DiffConfig getDiffConfig() {
return new DiffUtil.DiffConfig(getTextSettings().getIgnorePolicy(), getHighlightPolicy());
}
@NotNull
private HighlightPolicy getHighlightPolicy() {
return getTextSettings().getHighlightPolicy();
}
//
// Impl
//
private void destroyChangedBlocks() {
for (SimpleDiffChange change : myDiffChanges) {
change.destroyHighlighter();
}
myDiffChanges.clear();
for (SimpleDiffChange change : myInvalidDiffChanges) {
change.destroyHighlighter();
}
myInvalidDiffChanges.clear();
myFoldingModel.destroy();
myContentPanel.repaintDivider();
myStatusPanel.update();
}
@Override
@CalledInAwt
protected void onBeforeDocumentChange(@NotNull DocumentEvent e) {
super.onBeforeDocumentChange(e);
if (myDiffChanges.isEmpty()) return;
Side side = null;
if (e.getDocument() == getEditor(Side.LEFT).getDocument()) side = Side.LEFT;
if (e.getDocument() == getEditor(Side.RIGHT).getDocument()) side = Side.RIGHT;
if (side == null) {
LOG.warn("Unknown document changed");
return;
}
int line1 = e.getDocument().getLineNumber(e.getOffset());
int line2 = e.getDocument().getLineNumber(e.getOffset() + e.getOldLength()) + 1;
int shift = DiffUtil.countLinesShift(e);
List<SimpleDiffChange> invalid = new ArrayList<SimpleDiffChange>();
for (SimpleDiffChange change : myDiffChanges) {
if (change.processChange(line1, line2, shift, side)) {
invalid.add(change);
}
}
if (!invalid.isEmpty()) {
myDiffChanges.removeAll(invalid);
myInvalidDiffChanges.addAll(invalid);
}
}
@Override
protected void onDocumentChange(@NotNull DocumentEvent e) {
super.onDocumentChange(e);
myFoldingModel.onDocumentChanged(e);
}
@CalledInAwt
protected boolean doScrollToChange(@NotNull ScrollToPolicy scrollToPolicy) {
SimpleDiffChange targetChange = scrollToPolicy.select(myDiffChanges);
if (targetChange == null) return false;
doScrollToChange(targetChange, false);
return true;
}
private void doScrollToChange(@NotNull SimpleDiffChange change, final boolean animated) {
final int line1 = change.getStartLine(Side.LEFT);
final int line2 = change.getStartLine(Side.RIGHT);
final int endLine1 = change.getEndLine(Side.LEFT);
final int endLine2 = change.getEndLine(Side.RIGHT);
DiffUtil.moveCaret(getEditor1(), line1);
DiffUtil.moveCaret(getEditor2(), line2);
getSyncScrollSupport().makeVisible(getCurrentSide(), line1, endLine1, line2, endLine2, animated);
}
protected boolean doScrollToContext(@NotNull DiffNavigationContext context) {
ChangedLinesIterator changedLinesIterator = new ChangedLinesIterator(Side.RIGHT);
NavigationContextChecker checker = new NavigationContextChecker(changedLinesIterator, context);
int line = checker.contextMatchCheck();
if (line == -1) {
// this will work for the case, when spaces changes are ignored, and corresponding fragments are not reported as changed
// just try to find target line -> +-
AllLinesIterator allLinesIterator = new AllLinesIterator(Side.RIGHT);
NavigationContextChecker checker2 = new NavigationContextChecker(allLinesIterator, context);
line = checker2.contextMatchCheck();
}
if (line == -1) return false;
scrollToLine(Side.RIGHT, line);
return true;
}
//
// Getters
//
@NotNull
protected List<SimpleDiffChange> getDiffChanges() {
return myDiffChanges;
}
@NotNull
@Override
protected SyncScrollSupport.SyncScrollable getSyncScrollable() {
return mySyncScrollable;
}
@NotNull
@Override
protected JComponent getStatusPanel() {
return myStatusPanel;
}
@NotNull
public ModifierProvider getModifierProvider() {
return myModifierProvider;
}
@NotNull
@Override
public SyncScrollSupport.TwosideSyncScrollSupport getSyncScrollSupport() {
//noinspection ConstantConditions
return super.getSyncScrollSupport();
}
//
// Misc
//
@SuppressWarnings("MethodOverridesStaticMethodOfSuperclass")
public static boolean canShowRequest(@NotNull DiffContext context, @NotNull DiffRequest request) {
return TwosideTextDiffViewer.canShowRequest(context, request);
}
@NotNull
@CalledInAwt
private List<SimpleDiffChange> getSelectedChanges(@NotNull Side side) {
final BitSet lines = DiffUtil.getSelectedLines(getEditor(side));
List<SimpleDiffChange> affectedChanges = new ArrayList<SimpleDiffChange>();
for (int i = myDiffChanges.size() - 1; i >= 0; i--) {
SimpleDiffChange change = myDiffChanges.get(i);
int line1 = change.getStartLine(side);
int line2 = change.getEndLine(side);
if (DiffUtil.isSelectedByLine(lines, line1, line2)) {
affectedChanges.add(change);
}
}
return affectedChanges;
}
@Nullable
@CalledInAwt
private SimpleDiffChange getSelectedChange(@NotNull Side side) {
int caretLine = getEditor(side).getCaretModel().getLogicalPosition().line;
for (SimpleDiffChange change : myDiffChanges) {
int line1 = change.getStartLine(side);
int line2 = change.getEndLine(side);
if (DiffUtil.isSelectedByLine(caretLine, line1, line2)) return change;
}
return null;
}
//
// Actions
//
private class MyPrevNextDifferenceIterable extends PrevNextDifferenceIterableBase<SimpleDiffChange> {
@NotNull
@Override
protected List<SimpleDiffChange> getChanges() {
return myDiffChanges;
}
@NotNull
@Override
protected EditorEx getEditor() {
return getCurrentEditor();
}
@Override
protected int getStartLine(@NotNull SimpleDiffChange change) {
return change.getStartLine(getCurrentSide());
}
@Override
protected int getEndLine(@NotNull SimpleDiffChange change) {
return change.getEndLine(getCurrentSide());
}
@Override
protected void scrollToChange(@NotNull SimpleDiffChange change) {
doScrollToChange(change, true);
}
}
private class MyReadOnlyLockAction extends TextDiffViewerUtil.EditorReadOnlyLockAction {
public MyReadOnlyLockAction() {
super(getContext(), getEditableEditors());
}
@Override
protected void doApply(boolean readOnly) {
super.doApply(readOnly);
for (SimpleDiffChange change : myDiffChanges) {
change.updateGutterActions(true);
}
}
}
//
// Modification operations
//
private abstract class ApplySelectedChangesActionBase extends AnAction implements DumbAware {
private final boolean myModifyOpposite;
public ApplySelectedChangesActionBase(@Nullable String text,
@Nullable String description,
@Nullable Icon icon,
boolean modifyOpposite) {
super(text, description, icon);
myModifyOpposite = modifyOpposite;
}
@Override
public void update(@NotNull AnActionEvent e) {
Editor editor = e.getData(CommonDataKeys.EDITOR);
if (editor != getEditor1() && editor != getEditor2()) {
e.getPresentation().setEnabledAndVisible(false);
return;
}
Side side = Side.fromLeft(editor == getEditor(Side.LEFT));
Editor modifiedEditor = getEditor(side.other(myModifyOpposite));
if (!DiffUtil.isEditable(modifiedEditor)) {
e.getPresentation().setEnabledAndVisible(false);
return;
}
e.getPresentation().setIcon(getIcon(side));
e.getPresentation().setVisible(true);
e.getPresentation().setEnabled(isSomeChangeSelected(side));
}
@Override
public void actionPerformed(@NotNull final AnActionEvent e) {
Editor editor = e.getRequiredData(CommonDataKeys.EDITOR);
final Side side = Side.fromLeft(editor == getEditor(Side.LEFT));
final List<SimpleDiffChange> selectedChanges = getSelectedChanges(side);
Editor modifiedEditor = getEditor(side.other(myModifyOpposite));
String title = e.getPresentation().getText() + " selected changes";
DiffUtil.executeWriteCommand(modifiedEditor.getDocument(), e.getProject(), title, new Runnable() {
@Override
public void run() {
apply(side, selectedChanges);
}
});
}
protected boolean isSomeChangeSelected(@NotNull Side side) {
if (myDiffChanges.isEmpty()) return false;
EditorEx editor = getEditor(side);
List<Caret> carets = editor.getCaretModel().getAllCarets();
if (carets.size() != 1) return true;
Caret caret = carets.get(0);
if (caret.hasSelection()) return true;
int line = editor.getDocument().getLineNumber(editor.getExpectedCaretOffset());
for (SimpleDiffChange change : myDiffChanges) {
if (change.isSelectedByLine(line, side)) return true;
}
return false;
}
@NotNull
protected abstract Icon getIcon(@NotNull Side side);
@CalledWithWriteLock
protected abstract void apply(@NotNull Side side, @NotNull List<SimpleDiffChange> changes);
}
private class ReplaceSelectedChangesAction extends ApplySelectedChangesActionBase {
public ReplaceSelectedChangesAction() {
super("Replace", null, AllIcons.Diff.Arrow, true);
}
@NotNull
@Override
protected Icon getIcon(@NotNull Side side) {
return side.isLeft() ? AllIcons.Diff.ArrowRight : AllIcons.Diff.Arrow;
}
@Override
protected void apply(@NotNull Side side, @NotNull List<SimpleDiffChange> changes) {
for (SimpleDiffChange change : changes) {
replaceChange(change, side);
}
}
}
private class AppendSelectedChangesAction extends ApplySelectedChangesActionBase {
public AppendSelectedChangesAction() {
super("Insert", null, AllIcons.Diff.ArrowLeftDown, true);
}
@NotNull
@Override
protected Icon getIcon(@NotNull Side side) {
return side.isLeft() ? AllIcons.Diff.ArrowRightDown : AllIcons.Diff.ArrowLeftDown;
}
@Override
protected void apply(@NotNull Side side, @NotNull List<SimpleDiffChange> changes) {
for (SimpleDiffChange change : changes) {
appendChange(change, side);
}
}
}
private class RevertSelectedChangesAction extends ApplySelectedChangesActionBase {
public RevertSelectedChangesAction() {
super("Revert", null, AllIcons.Diff.Remove, false);
}
@NotNull
@Override
protected Icon getIcon(@NotNull Side side) {
return AllIcons.Diff.Remove;
}
@Override
protected void apply(@NotNull Side side, @NotNull List<SimpleDiffChange> changes) {
for (SimpleDiffChange change : changes) {
replaceChange(change, side.other());
}
}
}
@CalledWithWriteLock
public void replaceChange(@NotNull SimpleDiffChange change, @NotNull final Side sourceSide) {
if (!change.isValid()) return;
Side outputSide = sourceSide.other();
DiffUtil.applyModification(getEditor(outputSide).getDocument(), change.getStartLine(outputSide), change.getEndLine(outputSide),
getEditor(sourceSide).getDocument(), change.getStartLine(sourceSide), change.getEndLine(sourceSide));
change.destroyHighlighter();
myDiffChanges.remove(change);
}
@CalledWithWriteLock
public void appendChange(@NotNull SimpleDiffChange change, @NotNull final Side sourceSide) {
if (!change.isValid()) return;
if (change.getStartLine(sourceSide) == change.getEndLine(sourceSide)) return;
Side outputSide = sourceSide.other();
DiffUtil.applyModification(getEditor(outputSide).getDocument(), change.getEndLine(outputSide), change.getEndLine(outputSide),
getEditor(sourceSide).getDocument(), change.getStartLine(sourceSide), change.getEndLine(sourceSide));
change.destroyHighlighter();
myDiffChanges.remove(change);
}
private class MyHighlightPolicySettingAction extends TextDiffViewerUtil.HighlightPolicySettingAction {
public MyHighlightPolicySettingAction() {
super(getTextSettings());
}
@Override
protected void onSettingsChanged() {
rediff();
}
}
private class MyIgnorePolicySettingAction extends TextDiffViewerUtil.IgnorePolicySettingAction {
public MyIgnorePolicySettingAction() {
super(getTextSettings());
}
@Override
protected void onSettingsChanged() {
rediff();
}
}
private class MyToggleExpandByDefaultAction extends TextDiffViewerUtil.ToggleExpandByDefaultAction {
public MyToggleExpandByDefaultAction() {
super(getTextSettings());
}
@Override
protected void expandAll(boolean expand) {
myFoldingModel.expandAll(expand);
}
}
//
// Scroll from annotate
//
private class AllLinesIterator implements Iterator<Pair<Integer, CharSequence>> {
@NotNull private final Side mySide;
@NotNull private final Document myDocument;
private int myLine = 0;
private AllLinesIterator(@NotNull Side side) {
mySide = side;
myDocument = getEditor(mySide).getDocument();
}
@Override
public boolean hasNext() {
return myLine < getLineCount(myDocument);
}
@Override
public Pair<Integer, CharSequence> next() {
int offset1 = myDocument.getLineStartOffset(myLine);
int offset2 = myDocument.getLineEndOffset(myLine);
CharSequence text = myDocument.getImmutableCharSequence().subSequence(offset1, offset2);
Pair<Integer, CharSequence> pair = new Pair<Integer, CharSequence>(myLine, text);
myLine++;
return pair;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
private class ChangedLinesIterator extends BufferedLineIterator {
@NotNull private final Side mySide;
private int myIndex = 0;
private ChangedLinesIterator(@NotNull Side side) {
mySide = side;
init();
}
@Override
public boolean hasNextBlock() {
return myIndex < myDiffChanges.size();
}
@Override
public void loadNextBlock() {
SimpleDiffChange change = myDiffChanges.get(myIndex);
myIndex++;
int line1 = change.getStartLine(mySide);
int line2 = change.getEndLine(mySide);
Document document = getEditor(mySide).getDocument();
for (int i = line1; i < line2; i++) {
int offset1 = document.getLineStartOffset(i);
int offset2 = document.getLineEndOffset(i);
CharSequence text = document.getImmutableCharSequence().subSequence(offset1, offset2);
addLine(i, text);
}
}
}
//
// Helpers
//
@Nullable
@Override
public Object getData(@NonNls String dataId) {
if (DiffDataKeys.PREV_NEXT_DIFFERENCE_ITERABLE.is(dataId)) {
return myPrevNextDifferenceIterable;
}
else if (DiffDataKeys.CURRENT_CHANGE_RANGE.is(dataId)) {
SimpleDiffChange change = getSelectedChange(getCurrentSide());
if (change != null) {
return new LineRange(change.getStartLine(getCurrentSide()), change.getEndLine(getCurrentSide()));
}
}
return super.getData(dataId);
}
private class MySyncScrollable extends BaseSyncScrollable {
@Override
public boolean isSyncScrollEnabled() {
return getTextSettings().isEnableSyncScroll();
}
public int transfer(@NotNull Side baseSide, int line) {
if (myDiffChanges.isEmpty()) {
return line;
}
return super.transfer(baseSide, line);
}
@Override
protected void processHelper(@NotNull ScrollHelper helper) {
if (!helper.process(0, 0)) return;
for (SimpleDiffChange diffChange : myDiffChanges) {
if (!helper.process(diffChange.getStartLine(Side.LEFT), diffChange.getStartLine(Side.RIGHT))) return;
if (!helper.process(diffChange.getEndLine(Side.LEFT), diffChange.getEndLine(Side.RIGHT))) return;
}
helper.process(getEditor1().getDocument().getLineCount(), getEditor2().getDocument().getLineCount());
}
}
private class MyDividerPainter implements DiffSplitter.Painter, DiffDividerDrawUtil.DividerPaintable {
@Override
public void paint(@NotNull Graphics g, @NotNull JComponent divider) {
Graphics2D gg = DiffDividerDrawUtil.getDividerGraphics(g, divider, getEditor1().getComponent());
gg.setColor(DiffDrawUtil.getDividerColor(getEditor1()));
gg.fill(gg.getClipBounds());
//DividerPolygonUtil.paintSimplePolygons(gg, divider.getWidth(), getEditor1(), getEditor2(), this);
DiffDividerDrawUtil.paintPolygons(gg, divider.getWidth(), getEditor1(), getEditor2(), this);
myFoldingModel.paintOnDivider(gg, divider);
gg.dispose();
}
@Override
public void process(@NotNull Handler handler) {
for (SimpleDiffChange diffChange : myDiffChanges) {
if (!handler.process(diffChange.getStartLine(Side.LEFT), diffChange.getEndLine(Side.LEFT),
diffChange.getStartLine(Side.RIGHT), diffChange.getEndLine(Side.RIGHT),
diffChange.getDiffType().getColor(getEditor1()))) {
return;
}
}
}
}
private class MyStatusPanel extends StatusPanel {
@Override
protected int getChangesCount() {
return myDiffChanges.size() + myInvalidDiffChanges.size();
}
}
private static class CompareData {
@Nullable private final List<LineFragment> myFragments;
private final boolean myEqualContent;
public CompareData(@Nullable List<LineFragment> fragments, boolean equalContent) {
myFragments = fragments;
myEqualContent = equalContent;
}
@Nullable
public List<LineFragment> getFragments() {
return myFragments;
}
public boolean isEqualContent() {
return myEqualContent;
}
}
public class ModifierProvider extends KeyboardModifierListener {
public void init() {
init(myPanel, SimpleDiffViewer.this);
}
@Override
public void onModifiersChanged() {
for (SimpleDiffChange change : myDiffChanges) {
change.updateGutterActions(false);
}
}
}
private static class MyFoldingModel extends FoldingModelSupport {
private final MyPaintable myPaintable = new MyPaintable(0, 1);
public MyFoldingModel(@NotNull List<? extends EditorEx> editors, @NotNull Disposable disposable) {
super(editors.toArray(new EditorEx[2]), disposable);
}
public void install(@Nullable final List<LineFragment> fragments,
@NotNull UserDataHolder context,
@NotNull FoldingModelSupport.Settings settings) {
Iterator<int[]> it = map(fragments, new Function<LineFragment, int[]>() {
@Override
public int[] fun(LineFragment fragment) {
return new int[]{
fragment.getStartLine1(),
fragment.getEndLine1(),
fragment.getStartLine2(),
fragment.getEndLine2()};
}
});
install(it, context, settings);
}
public void paintOnDivider(@NotNull Graphics2D gg, @NotNull Component divider) {
myPaintable.paintOnDivider(gg, divider);
}
}
private class MyInitialScrollHelper extends MyInitialScrollPositionHelper {
@Override
protected boolean doScrollToChange() {
if (myScrollToChange == null) return false;
return SimpleDiffViewer.this.doScrollToChange(myScrollToChange);
}
@Override
protected boolean doScrollToFirstChange() {
return SimpleDiffViewer.this.doScrollToChange(ScrollToPolicy.FIRST_CHANGE);
}
@Override
protected boolean doScrollToContext() {
if (myNavigationContext == null) return false;
return SimpleDiffViewer.this.doScrollToContext(myNavigationContext);
}
}
}
| TangHao1987/intellij-community | platform/diff-impl/src/com/intellij/diff/tools/simple/SimpleDiffViewer.java | Java | apache-2.0 | 28,600 |
#include <iostream>
#include <cstdlib>
#include <fstream>
#include <sstream>
#include <string>
#include <stdexcept>
#include <vector>
#include <list>
#include <algorithm>
#include <math.h>
#include <assert.h>
#include <time.h>
#include <pthread.h>
#include <stdlib.h>
#include "string_utils.h"
#include "seq_contig.h"
#include "params.h"
using namespace std;
int int_max(int i, int j){
if(i>j) return i;
return j;
}
int int_min(int i, int j){
if(i<j) return i;
return j;
}
double Poisson_p_value(int k, double lambda, double precision ){
//double precision = 0.00000001;
double delta = 1;
double cur_sum = 0;
double prev_sum = 0;
double cur_i = k;
while(delta > precision){
double log_cur_p = -lambda + cur_i*log(lambda) ;
for(int j=1; j<=cur_i; j++){
log_cur_p = log_cur_p - log((double)j);
}
double cur_p = exp(log_cur_p);
cur_sum += cur_p;
delta = cur_p;//cur_sum - prev_sum;
prev_sum = cur_sum;
cur_i++;
}
return cur_sum;
}
double small_binomial_p_value(int k, double p, int n){
assert(k==2 || k==3);
if(k==2){
double res = 1.0 - pow(1-p, n) - ((double)n)*p*pow(1-p,n);
return res;
}
if(k==3){
double res = 1.0 - pow(1-p, n) - ((double)n)*p*pow(1-p,n-1) - ((double)n)*((double)n-1)*p*p*pow(1-p,n-2);
return res;
}
assert(false);
return -1;
}
int main(int argc, char* argv[]){
params pars(argc, argv);
pars.require("align_file","align_file",STRING_TYPE);
pars.require("output_path", "output_path", STRING_TYPE);
pars.require("genome_table", "genome_table", STRING_TYPE);
pars.require("QuEST_collapsed_file","QuEST_collapsed_file",STRING_TYPE);
pars.require("report_file","report_file",STRING_TYPE);
pars.optional("new_stack_size","new_stack_size","1",INT_TYPE);
pars.optional("collapse_reads","collapse_reads","true",STRING_TYPE);
pars.optional("collapse_window","collapse_window","100",INT_TYPE);
pars.optional("count_threshold","count_threhsold","3",INT_TYPE);
pars.optional("stack_p_value_threshold","stack_p_value_threshold","0.00001",DOUBLE_TYPE);
pars.optional("percent_positions_hit_threshold","percent_positions_hit_threshold","30",DOUBLE_TYPE);
if(!pars.enforce()){
exit(1);
}
cout<<endl;
pars.list_all_params();
string align_fname = pars.get_string_value("align_file");
string output_path = pars.get_string_value("output_path");
string genome_table_fname = pars.get_string_value("genome_table");
string QuEST_collapsed_fname = pars.get_string_value("QuEST_collapsed_file");
string report_fname = pars.get_string_value("report_file");
string collapse_reads = pars.get_string_value("collapse_reads");
int collapse_window = pars.get_int_value("collapse_window");
int count_threshold = pars.get_int_value("count_threshold");
double stack_p_value_threshold = pars.get_double_value("stack_p_value_threshold");
double percent_positions_hit_threshold = pars.get_double_value("percent_positions_hit_threshold");
double p_v_precision = stack_p_value_threshold/10;
int new_stack_size = pars.get_int_value("new_stack_size");
/*
if(report_fname != "not_applicable"){
remove(report_fname.c_str());
ofstream report_ofstr(report_fname.c_str());
if(!report_ofstr.good()){
cerr<<"Bad file name "<<report_fname<<endl;
exit(1);
}
}
*/
ofstream QuEST_collapsed_ofstr;
if(QuEST_collapsed_fname != "not_applicable"){
QuEST_collapsed_ofstr.open(QuEST_collapsed_fname.c_str());
if(!QuEST_collapsed_ofstr.good()){
cerr<<"Bad file name: "<<QuEST_collapsed_fname<<endl;
exit(1);
}
}
/*
cout<<"align_file: "<<align_fname<<endl;
cout<<"output_path: "<<output_path<<endl;
cout<<"genome_table: "<<genome_table_fname<<endl;
cout<<"collapse_reads: "<<collapse_reads<<endl;
cout<<endl;
*/
ifstream genome_table_str(genome_table_fname.c_str());
if(!genome_table_str.good()){
cerr<<"bad file name: "<<genome_table_fname<<endl;
exit(1);
}
vector<string> contigs;
vector<int> contig_sizes;
char gap_symbol = ' ';
while(genome_table_str.good()){
string dummy_string;
getline(genome_table_str, dummy_string);
if(genome_table_str.good()){
if(dummy_string.length() > 0){
if(dummy_string[0] != '#'){
vector<string> cur_contig_fields = split(dummy_string, gap_symbol);
if(cur_contig_fields.size() >= 2){
string cur_contig_id = cur_contig_fields[0];
int cur_contig_size = atoi(cur_contig_fields[1].c_str());
assert(cur_contig_size >= 0);
contigs.push_back(cur_contig_id);
contig_sizes.push_back(cur_contig_size);
}
}
}
}
}
genome_table_str.close();
vector< vector <int> > pos_hits;
vector< vector <int> > neg_hits;
vector<int> dummy_int_vec;
for(unsigned int i=0; i<contigs.size(); i++){
pos_hits.push_back(dummy_int_vec);
neg_hits.push_back(dummy_int_vec);
}
genome_table_str.close();
int line_counter = 0;
char sep1 = ' ';
if(align_fname != "NA"){
ifstream align_str(align_fname.c_str());
if(!align_str.good()){
cerr<<"Failed to read the file "<<align_fname<<endl;
exit(1);
}
string dummy_string;
while(align_str.good()){
getline(align_str, dummy_string);
if(line_counter % 10000 == 0){
printf ("\rread %.2f M reads ", ((double)line_counter/1000000.0) );
cout.flush();
}
line_counter++;
if(align_str.good()){
if(dummy_string.length() > 0){
if( dummy_string[0] != '#'){
vector<string> cur_hit_entry_fields = split(dummy_string, sep1);
if(cur_hit_entry_fields.size() >= 3){
string cur_hit_contig_name = cur_hit_entry_fields[0];
int cur_hit_5p_coord = atoi(cur_hit_entry_fields[1].c_str());
string cur_hit_orient = cur_hit_entry_fields[2];
if(cur_hit_orient != "+" && cur_hit_orient != "-"){
cout<<"Warning: expected +/- for orientation of read but found "<<cur_hit_orient;
cout<<". Skipping."<<endl;
}
else{
for(unsigned int i=0; i<contigs.size(); i++){
if(cur_hit_contig_name == contigs[i]){
int contig_size = contig_sizes[i];
if(cur_hit_5p_coord < 0 || cur_hit_5p_coord >= contig_size){
cout<<"Warning read coordinate "<<cur_hit_5p_coord<<" is out of boundaries [ 0,";
cout<<contig_size<<" ]. Skipping."<<endl;
}
else{
int start_coord = cur_hit_5p_coord;
if(cur_hit_orient == "+"){
pos_hits[i].push_back(start_coord);
}
else{
neg_hits[i].push_back(start_coord);
}
}
}
}
}
}
}
}
}
}
align_str.close();
}
cout<<endl<<endl;
cout<<"sorting hits"<<endl;
for(unsigned int i=0; i<contigs.size(); i++){
sort(pos_hits[i].begin(), pos_hits[i].end());
sort(neg_hits[i].begin(), neg_hits[i].end());
}
//cout<<"saving the binary align files..."<<endl;
int collapsed_reads = 0;
for(unsigned int i=0; i<contigs.size(); i++){
ofstream cur_out_str;
if(output_path != "not_applicable"){
string cur_output_fname = output_path + "/" + contigs[i] + ".align.bin";
remove(cur_output_fname.c_str());
cur_out_str.open(cur_output_fname.c_str());
if(!cur_out_str.good()){
cerr<<"bad file name: "<<cur_output_fname<<endl;
exit(1);
}
}
string cur_contig = contigs[i];
int cur_entries_pos = (int) pos_hits[i].size();
int cur_entries_neg = (int) neg_hits[i].size();
cout<<endl;
cout<<"-------------------------"<<endl;
cout<<"contig: "<<cur_contig<<endl;
cout<<endl;
cout<<"+ reads: "<<cur_entries_pos<<endl;
cout<<"- reads: "<<cur_entries_neg<<endl;
int cur_contig_name_size = cur_contig.size();
// cout<<endl<<"contig_name_size: "<<contig_name_size<<endl;
if(collapse_reads == "false"){
if(output_path != "not_applicable"){
cur_out_str.write((char*) (&cur_contig_name_size), sizeof(cur_contig_name_size));
cur_out_str.write((char*) &(cur_contig[0]), cur_contig_name_size*sizeof(char));
cur_out_str.write((char*)(&cur_entries_pos), sizeof(cur_entries_pos));
cur_out_str.write((char*)(&cur_entries_neg), sizeof(cur_entries_neg));
cur_out_str.write((char*)(&pos_hits[i][0]), sizeof(int)*cur_entries_pos);
cur_out_str.write((char*)(&neg_hits[i][0]), sizeof(int)*cur_entries_neg);
}
collapsed_reads += cur_entries_pos + cur_entries_neg;
}
else{
vector<short> pos_hit_counts(contig_sizes[i]);
vector<short> neg_hit_counts(contig_sizes[i]);
vector<int> pos_hits_collapsed;
vector<int> neg_hits_collapsed;
int stacks_collapsed = 0;
int reads_in_collapsed_stacks = 0;
//cout<<"Mapping counts"<<endl;
cout<<endl;
for(int j=0; j<contig_sizes[i]; j++){
pos_hit_counts[j] = 0;
neg_hit_counts[j] = 0;
}
for(unsigned int j=0; j<pos_hits[i].size(); j++){
pos_hit_counts[pos_hits[i][j]] ++;
}
for(unsigned int j=0; j<neg_hits[i].size(); j++){
neg_hit_counts[neg_hits[i][j]] ++;
}
for(int j=0; j<(int)pos_hit_counts.size(); j++){
if(pos_hit_counts[j] >= count_threshold){
int reads_in_the_collapse_window =0;
int positions_hit = 0;
for(int k=int_max(0,j-collapse_window/2); k<=int_min(j+collapse_window/2,contig_sizes[i]-1); k++){
if(pos_hit_counts[k] > 0){
reads_in_the_collapse_window += pos_hit_counts[k];
positions_hit++;
}
}
double percent_positions_hit = 100 * ((double)positions_hit)/((double)(collapse_window+1));
int cur_stack = (int) pos_hit_counts[j];
//double p_v_precision = stack_p_value_threshold/10;
double cur_stack_p_value;
if(cur_stack <=3 && cur_stack>=2){
cur_stack_p_value = small_binomial_p_value(cur_stack, 1.0/((double)collapse_window), reads_in_the_collapse_window);
}
else{
cur_stack_p_value =
Poisson_p_value(cur_stack, ((double)reads_in_the_collapse_window) / ((double)collapse_window),p_v_precision);
}
if(cur_stack_p_value <= stack_p_value_threshold && percent_positions_hit <= percent_positions_hit_threshold){
if(QuEST_collapsed_fname != "not_applicable"){
QuEST_collapsed_ofstr<<contigs[i]<<" "<<j<<" +"<<endl;
}
for(int s=0; s<new_stack_size; s++) pos_hits_collapsed.push_back(j);
stacks_collapsed++;
reads_in_collapsed_stacks += cur_stack;
}
else{
for(int k=0; k<cur_stack; k++){
pos_hits_collapsed.push_back(j);
QuEST_collapsed_ofstr<<contigs[i]<<" "<<j<<" +"<<endl;
}
}
}
else{
if(pos_hit_counts[j] > 0){
for(int k=0; k<pos_hit_counts[j]; k++){
pos_hits_collapsed.push_back(j);
if(QuEST_collapsed_fname != "not_applicable"){
QuEST_collapsed_ofstr<<contigs[i]<<" "<<j<<" +"<<endl;
}
}
}
}
}
for(int j=0; j<(int)neg_hit_counts.size(); j++){
if(neg_hit_counts[j] >= count_threshold){
int reads_in_the_collapse_window =0;
int positions_hit = 0;
for(int k=int_max(0,j-collapse_window/2); k<=int_min(j+collapse_window/2,contig_sizes[i]-1); k++){
if(neg_hit_counts[k] > 0){
reads_in_the_collapse_window += neg_hit_counts[k];
positions_hit++;
}
}
double percent_positions_hit = 100*((double)positions_hit)/((double)(collapse_window+1));
int cur_stack = (int) neg_hit_counts[j];
double cur_stack_p_value;
if(cur_stack <=3 && cur_stack>=2){
cur_stack_p_value = small_binomial_p_value(cur_stack, 1.0/((double)collapse_window), reads_in_the_collapse_window);
}
else{
cur_stack_p_value =
Poisson_p_value(cur_stack, ((double)reads_in_the_collapse_window) / ((double)collapse_window),p_v_precision);
}
// Poisson_p_value(cur_stack, ((double)reads_in_the_collapse_window) / ((double)collapse_window), p_v_precision);
if(cur_stack_p_value <= stack_p_value_threshold && percent_positions_hit <= percent_positions_hit_threshold){
if(QuEST_collapsed_fname != "not_applicable"){
QuEST_collapsed_ofstr<<contigs[i]<<" "<<j<<" -"<<endl;
}
for(int s=0; s<new_stack_size; s++) neg_hits_collapsed.push_back(j);
stacks_collapsed++;
reads_in_collapsed_stacks += cur_stack;
}
else{
for(int k=0; k<cur_stack; k++){
neg_hits_collapsed.push_back(j);
if(QuEST_collapsed_fname != "not_applicable"){
QuEST_collapsed_ofstr<<contigs[i]<<" "<<j<<" -"<<endl;
}
}
}
}
else{
if(neg_hit_counts[j] > 0){
for(int k=0; k<neg_hit_counts[j]; k++){
neg_hits_collapsed.push_back(j);
if(QuEST_collapsed_fname != "not_applicable"){
QuEST_collapsed_ofstr<<contigs[i]<<" "<<j<<" -"<<endl;
}
}
}
}
}
//exit(0);
/*
vector<int> pos_hits_collapsed;
vector<int> neg_hits_collapsed;
int last_coord = -1;
for(unsigned int j=0; j<pos_hits[i].size(); j++){
if(j>0){
if(last_coord == pos_hits[i][j]){ //do nothing
}
if(last_coord < pos_hits[i][j]){
pos_hits_collapsed.push_back(last_coord);
}
}
last_coord = pos_hits[i][j];
}
last_coord = -1;
for(unsigned int j=0; j<neg_hits[i].size(); j++){
if(j>0){
if(last_coord == neg_hits[i][j]){ //do nothing
}
if(last_coord < neg_hits[i][j]){
neg_hits_collapsed.push_back(last_coord);
}
}
last_coord = neg_hits[i][j];
}
*/
int cur_entries_pos_collapsed = pos_hits_collapsed.size();
int cur_entries_neg_collapsed = neg_hits_collapsed.size();
collapsed_reads += (cur_entries_pos_collapsed + cur_entries_neg_collapsed);
cout<<"+ reads after collapsing: "<<cur_entries_pos_collapsed<<endl;
cout<<"- reads after collapsing: "<<cur_entries_neg_collapsed<<endl;
cout<<endl;
cout<<"stacks collapsed: "<<stacks_collapsed<<endl;
cout<<"reads in collapsed stacks: "<<reads_in_collapsed_stacks<<endl;
if(output_path != "not_applicable"){
cur_out_str.write((char*) (&cur_contig_name_size), sizeof(cur_contig_name_size));
cur_out_str.write((char*) &(cur_contig[0]), cur_contig_name_size*sizeof(char));
cur_out_str.write((char*)(&cur_entries_pos_collapsed), sizeof(cur_entries_pos_collapsed));
cur_out_str.write((char*)(&cur_entries_neg_collapsed), sizeof(cur_entries_neg_collapsed));
cur_out_str.write((char*)(&pos_hits_collapsed[0]), sizeof(int)*cur_entries_pos_collapsed);
cur_out_str.write((char*)(&neg_hits_collapsed[0]), sizeof(int)*cur_entries_neg_collapsed);
}
}
if(output_path != "not_applicable"){
cur_out_str.close();
}
}
if(report_fname != "not_applicable"){
remove(report_fname.c_str());
ofstream report_ofstr(report_fname.c_str());
if(!report_ofstr.good()){
cerr<<"Bad file name "<<report_fname<<endl;
exit(1);
}
report_ofstr<<collapsed_reads<<endl;
report_ofstr.close();
}
QuEST_collapsed_ofstr.close();
cout<<"done!"<<endl;
cout<<endl;
return 0;
}
| verdurin/biobench2 | QuEST/src/collapse_reads.cpp | C++ | apache-2.0 | 15,129 |
/*
Copyright 2014 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package validation
import (
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/util/validation"
)
// ValidateEvent makes sure that the event makes sense.
func ValidateEvent(event *api.Event) validation.ErrorList {
allErrs := validation.ErrorList{}
// TODO: There is no namespace required for node.
if event.InvolvedObject.Kind != "Node" &&
event.Namespace != event.InvolvedObject.Namespace {
allErrs = append(allErrs, validation.NewFieldInvalid("involvedObject.namespace", event.InvolvedObject.Namespace, "namespace does not match involvedObject"))
}
if !validation.IsDNS1123Subdomain(event.Namespace) {
allErrs = append(allErrs, validation.NewFieldInvalid("namespace", event.Namespace, ""))
}
return allErrs
}
| yangxiangyu/kubernetes | pkg/api/validation/events.go | GO | apache-2.0 | 1,319 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.table.batching;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ScheduledExecutorService;
import org.apache.samza.SamzaException;
import org.apache.samza.context.Context;
import org.apache.samza.storage.kv.Entry;
import org.apache.samza.table.AsyncReadWriteUpdateTable;
import org.apache.samza.table.utils.TableMetricsUtil;
import org.apache.samza.util.HighResolutionClock;
/**
* A wrapper of a {@link AsyncReadWriteUpdateTable} that supports batch operations.
*
* This batching table does not guarantee any ordering of different operation types within the batch.
* For instance, query(Q) and put/delete(u) operations arrives in the following sequences, Q1, U1, Q2, U2,
* it does not mean the remote data store will receive the messages in the same order. Instead,
* the operations will be grouped by type and sent via micro batches. For this sequence, Q1 and Q2 will
* be grouped to micro batch B1; U1 and U2 will be grouped to micro batch B2, the implementation class
* can decide the order of the micro batches.
*
* Synchronized table operations (get/put/update/delete) should be used with caution for the batching feature.
* If the table is used by a single thread, there will be at most one operation in the batch, and the
* batch will be performed when the TTL of the batch window expires. Batching does not make sense in this scenario.
*
* The Batch implementation class can throw {@link BatchingNotSupportedException} if it thinks the operation is
* not batch-able. When receiving this exception, {@link AsyncBatchingTable} will send the operation to the
* {@link AsyncReadWriteUpdateTable}.
*
* @param <K> The type of the key.
* @param <V> The type of the value.
* @param <U> the type of the update applied to this table
*/
public class AsyncBatchingTable<K, V, U> implements AsyncReadWriteUpdateTable<K, V, U> {
private final AsyncReadWriteUpdateTable<K, V, U> table;
private final String tableId;
private final BatchProvider<K, V, U> batchProvider;
private final ScheduledExecutorService batchTimerExecutorService;
private BatchProcessor<K, V, U> batchProcessor;
/**
* @param tableId The id of the table.
* @param table The target table that serves the batch operations.
* @param batchProvider Batch provider to create a batch instance.
* @param batchTimerExecutorService Executor service for batch timer.
*/
public AsyncBatchingTable(String tableId, AsyncReadWriteUpdateTable<K, V, U> table, BatchProvider<K, V, U> batchProvider,
ScheduledExecutorService batchTimerExecutorService) {
Preconditions.checkNotNull(tableId);
Preconditions.checkNotNull(table);
Preconditions.checkNotNull(batchProvider);
Preconditions.checkNotNull(batchTimerExecutorService);
this.tableId = tableId;
this.table = table;
this.batchProvider = batchProvider;
this.batchTimerExecutorService = batchTimerExecutorService;
}
@Override
public CompletableFuture<V> getAsync(K key, Object... args) {
try {
return batchProcessor.processQueryOperation(new GetOperation<>(key, args));
} catch (BatchingNotSupportedException e) {
return table.getAsync(key, args);
} catch (Exception e) {
throw new SamzaException(e);
}
}
@Override
public CompletableFuture<Map<K, V>> getAllAsync(List<K> keys, Object... args) {
return table.getAllAsync(keys);
}
@Override
public <T> CompletableFuture<T> readAsync(int opId, Object ... args) {
return table.readAsync(opId, args);
}
@Override
public CompletableFuture<Void> putAsync(K key, V value, Object... args) {
try {
return batchProcessor.processPutDeleteOrUpdateOperations(new PutOperation<>(key, value, args));
} catch (BatchingNotSupportedException e) {
return table.putAsync(key, value, args);
} catch (Exception e) {
throw new SamzaException(e);
}
}
@Override
public CompletableFuture<Void> putAllAsync(List<Entry<K, V>> entries, Object... args) {
return table.putAllAsync(entries);
}
@Override
public CompletableFuture<Void> updateAsync(K key, U update) {
try {
return batchProcessor.processPutDeleteOrUpdateOperations(new UpdateOperation<>(key, update));
} catch (BatchingNotSupportedException e) {
return table.updateAsync(key, update);
} catch (Exception e) {
throw new SamzaException(e);
}
}
@Override
public CompletableFuture<Void> updateAllAsync(List<Entry<K, U>> updates) {
return table.updateAllAsync(updates);
}
@Override
public CompletableFuture<Void> deleteAsync(K key, Object... args) {
try {
return batchProcessor.processPutDeleteOrUpdateOperations(new DeleteOperation<>(key, args));
} catch (BatchingNotSupportedException e) {
return table.deleteAsync(key, args);
} catch (Exception e) {
throw new SamzaException(e);
}
}
@Override
public CompletableFuture<Void> deleteAllAsync(List<K> keys, Object... args) {
return table.deleteAllAsync(keys);
}
@Override
public void init(Context context) {
table.init(context);
final TableMetricsUtil metricsUtil = new TableMetricsUtil(context, this, tableId);
createBatchProcessor(TableMetricsUtil.mayCreateHighResolutionClock(context.getJobContext().getConfig()),
new BatchMetrics(metricsUtil));
}
@Override
public <T> CompletableFuture<T> writeAsync(int opId, Object ... args) {
return table.writeAsync(opId, args);
}
@Override
public void flush() {
table.flush();
}
@Override
public void close() {
batchProcessor.stop();
table.close();
}
@VisibleForTesting
void createBatchProcessor(HighResolutionClock clock, BatchMetrics batchMetrics) {
batchProcessor = new BatchProcessor<>(batchMetrics, new TableBatchHandler<>(table),
batchProvider, clock, batchTimerExecutorService);
}
@VisibleForTesting
BatchProcessor<K, V, U> getBatchProcessor() {
return batchProcessor;
}
}
| apache/samza | samza-core/src/main/java/org/apache/samza/table/batching/AsyncBatchingTable.java | Java | apache-2.0 | 6,955 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Tests for swift.common.storage_policies """
import contextlib
import six
import logging
import unittest
import os
import mock
from functools import partial
from six.moves.configparser import ConfigParser
from tempfile import NamedTemporaryFile
from test.unit import patch_policies, FakeRing, temptree, DEFAULT_TEST_EC_TYPE
import swift.common.storage_policy
from swift.common.storage_policy import (
StoragePolicyCollection, POLICIES, PolicyError, parse_storage_policies,
reload_storage_policies, get_policy_string, split_policy_string,
BaseStoragePolicy, StoragePolicy, ECStoragePolicy, REPL_POLICY, EC_POLICY,
VALID_EC_TYPES, DEFAULT_EC_OBJECT_SEGMENT_SIZE, BindPortsCache)
from swift.common.ring import RingData
from swift.common.exceptions import RingLoadError
from pyeclib.ec_iface import ECDriver
class CapturingHandler(logging.Handler):
def __init__(self):
super(CapturingHandler, self).__init__()
self._records = []
def emit(self, record):
self._records.append(record)
@contextlib.contextmanager
def capture_logging(log_name):
captured = CapturingHandler()
logger = logging.getLogger(log_name)
logger.addHandler(captured)
try:
yield captured._records
finally:
logger.removeHandler(captured)
@BaseStoragePolicy.register('fake')
class FakeStoragePolicy(BaseStoragePolicy):
"""
Test StoragePolicy class - the only user at the moment is
test_validate_policies_type_invalid()
"""
def __init__(self, idx, name='', is_default=False, is_deprecated=False,
object_ring=None):
super(FakeStoragePolicy, self).__init__(
idx, name, is_default, is_deprecated, object_ring)
class TestStoragePolicies(unittest.TestCase):
def _conf(self, conf_str):
conf_str = "\n".join(line.strip() for line in conf_str.split("\n"))
if six.PY2:
conf = ConfigParser()
else:
conf = ConfigParser(strict=False)
conf.readfp(six.StringIO(conf_str))
return conf
def assertRaisesWithMessage(self, exc_class, message, f, *args, **kwargs):
try:
f(*args, **kwargs)
except exc_class as err:
err_msg = str(err)
self.assertTrue(message in err_msg, 'Error message %r did not '
'have expected substring %r' % (err_msg, message))
else:
self.fail('%r did not raise %s' % (message, exc_class.__name__))
def test_policy_baseclass_instantiate(self):
self.assertRaisesWithMessage(TypeError,
"Can't instantiate BaseStoragePolicy",
BaseStoragePolicy, 1, 'one')
@patch_policies([
StoragePolicy(0, 'zero', is_default=True),
StoragePolicy(1, 'one'),
StoragePolicy(2, 'two'),
StoragePolicy(3, 'three', is_deprecated=True),
ECStoragePolicy(10, 'ten', ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=10, ec_nparity=4),
])
def test_swift_info(self):
# the deprecated 'three' should not exist in expect
expect = [{'aliases': 'zero', 'default': True, 'name': 'zero', },
{'aliases': 'two', 'name': 'two'},
{'aliases': 'one', 'name': 'one'},
{'aliases': 'ten', 'name': 'ten'}]
swift_info = POLICIES.get_policy_info()
self.assertEqual(sorted(expect, key=lambda k: k['name']),
sorted(swift_info, key=lambda k: k['name']))
@patch_policies
def test_get_policy_string(self):
self.assertEqual(get_policy_string('something', 0), 'something')
self.assertEqual(get_policy_string('something', None), 'something')
self.assertEqual(get_policy_string('something', ''), 'something')
self.assertEqual(get_policy_string('something', 1),
'something' + '-1')
self.assertRaises(PolicyError, get_policy_string, 'something', 99)
@patch_policies
def test_split_policy_string(self):
expectations = {
'something': ('something', POLICIES[0]),
'something-1': ('something', POLICIES[1]),
'tmp': ('tmp', POLICIES[0]),
'objects': ('objects', POLICIES[0]),
'tmp-1': ('tmp', POLICIES[1]),
'objects-1': ('objects', POLICIES[1]),
'objects-': PolicyError,
'objects-0': PolicyError,
'objects--1': ('objects-', POLICIES[1]),
'objects-+1': PolicyError,
'objects--': PolicyError,
'objects-foo': PolicyError,
'objects--bar': PolicyError,
'objects-+bar': PolicyError,
# questionable, demonstrated as inverse of get_policy_string
'objects+0': ('objects+0', POLICIES[0]),
'': ('', POLICIES[0]),
'0': ('0', POLICIES[0]),
'-1': ('', POLICIES[1]),
}
for policy_string, expected in expectations.items():
if expected == PolicyError:
try:
invalid = split_policy_string(policy_string)
except PolicyError:
continue # good
else:
self.fail('The string %r returned %r '
'instead of raising a PolicyError' %
(policy_string, invalid))
self.assertEqual(expected, split_policy_string(policy_string))
# should be inverse of get_policy_string
self.assertEqual(policy_string, get_policy_string(*expected))
def test_defaults(self):
self.assertGreater(len(POLICIES), 0)
# test class functions
default_policy = POLICIES.default
self.assertTrue(default_policy.is_default)
zero_policy = POLICIES.get_by_index(0)
self.assertTrue(zero_policy.idx == 0)
zero_policy_by_name = POLICIES.get_by_name(zero_policy.name)
self.assertTrue(zero_policy_by_name.idx == 0)
def test_storage_policy_repr(self):
test_policies = [StoragePolicy(0, 'aay', True),
StoragePolicy(1, 'bee', False),
StoragePolicy(2, 'cee', False),
ECStoragePolicy(10, 'ten',
ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=10, ec_nparity=3),
ECStoragePolicy(11, 'eleven',
ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=10, ec_nparity=3,
ec_duplication_factor=2)]
policies = StoragePolicyCollection(test_policies)
for policy in policies:
policy_repr = repr(policy)
self.assertTrue(policy.__class__.__name__ in policy_repr)
self.assertTrue('is_default=%s' % policy.is_default in policy_repr)
self.assertTrue('is_deprecated=%s' % policy.is_deprecated in
policy_repr)
self.assertTrue(policy.name in policy_repr)
if policy.policy_type == EC_POLICY:
self.assertTrue('ec_type=%s' % policy.ec_type in policy_repr)
self.assertTrue('ec_ndata=%s' % policy.ec_ndata in policy_repr)
self.assertTrue('ec_nparity=%s' %
policy.ec_nparity in policy_repr)
self.assertTrue('ec_segment_size=%s' %
policy.ec_segment_size in policy_repr)
if policy.ec_duplication_factor > 1:
self.assertTrue('ec_duplication_factor=%s' %
policy.ec_duplication_factor in
policy_repr)
collection_repr = repr(policies)
collection_repr_lines = collection_repr.splitlines()
self.assertTrue(
policies.__class__.__name__ in collection_repr_lines[0])
self.assertEqual(len(policies), len(collection_repr_lines[1:-1]))
for policy, line in zip(policies, collection_repr_lines[1:-1]):
self.assertTrue(repr(policy) in line)
with patch_policies(policies):
self.assertEqual(repr(POLICIES), collection_repr)
def test_validate_policies_defaults(self):
# 0 explicit default
test_policies = [StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one', False),
StoragePolicy(2, 'two', False)]
policies = StoragePolicyCollection(test_policies)
self.assertEqual(policies.default, test_policies[0])
self.assertEqual(policies.default.name, 'zero')
# non-zero explicit default
test_policies = [StoragePolicy(0, 'zero', False),
StoragePolicy(1, 'one', False),
StoragePolicy(2, 'two', True)]
policies = StoragePolicyCollection(test_policies)
self.assertEqual(policies.default, test_policies[2])
self.assertEqual(policies.default.name, 'two')
# multiple defaults
test_policies = [StoragePolicy(0, 'zero', False),
StoragePolicy(1, 'one', True),
StoragePolicy(2, 'two', True)]
self.assertRaisesWithMessage(
PolicyError, 'Duplicate default', StoragePolicyCollection,
test_policies)
# nothing specified
test_policies = []
policies = StoragePolicyCollection(test_policies)
self.assertEqual(policies.default, policies[0])
self.assertEqual(policies.default.name, 'Policy-0')
# no default specified with only policy index 0
test_policies = [StoragePolicy(0, 'zero')]
policies = StoragePolicyCollection(test_policies)
self.assertEqual(policies.default, policies[0])
# no default specified with multiple policies
test_policies = [StoragePolicy(0, 'zero', False),
StoragePolicy(1, 'one', False),
StoragePolicy(2, 'two', False)]
self.assertRaisesWithMessage(
PolicyError, 'Unable to find default policy',
StoragePolicyCollection, test_policies)
def test_deprecate_policies(self):
# deprecation specified
test_policies = [StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one', False),
StoragePolicy(2, 'two', False, is_deprecated=True)]
policies = StoragePolicyCollection(test_policies)
self.assertEqual(policies.default, test_policies[0])
self.assertEqual(policies.default.name, 'zero')
self.assertEqual(len(policies), 3)
# multiple policies requires default
test_policies = [StoragePolicy(0, 'zero', False),
StoragePolicy(1, 'one', False, is_deprecated=True),
StoragePolicy(2, 'two', False)]
self.assertRaisesWithMessage(
PolicyError, 'Unable to find default policy',
StoragePolicyCollection, test_policies)
def test_validate_policies_indexes(self):
# duplicate indexes
test_policies = [StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one', False),
StoragePolicy(1, 'two', False)]
self.assertRaises(PolicyError, StoragePolicyCollection,
test_policies)
def test_validate_policy_params(self):
StoragePolicy(0, 'name') # sanity
# bogus indexes
self.assertRaises(PolicyError, FakeStoragePolicy, 'x', 'name')
self.assertRaises(PolicyError, FakeStoragePolicy, -1, 'name')
# non-zero Policy-0
self.assertRaisesWithMessage(PolicyError, 'reserved',
FakeStoragePolicy, 1, 'policy-0')
# deprecate default
self.assertRaisesWithMessage(
PolicyError, 'Deprecated policy can not be default',
FakeStoragePolicy, 1, 'Policy-1', is_default=True,
is_deprecated=True)
# weird names
names = (
'',
'name_foo',
'name\nfoo',
'name foo',
u'name \u062a',
'name \xd8\xaa',
)
for name in names:
self.assertRaisesWithMessage(PolicyError, 'Invalid name',
FakeStoragePolicy, 1, name)
def test_validate_policies_names(self):
# duplicate names
test_policies = [StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'zero', False),
StoragePolicy(2, 'two', False)]
self.assertRaises(PolicyError, StoragePolicyCollection,
test_policies)
def test_validate_policies_type_default(self):
# no type specified - make sure the policy is initialized to
# DEFAULT_POLICY_TYPE
test_policy = FakeStoragePolicy(0, 'zero', True)
self.assertEqual(test_policy.policy_type, 'fake')
def test_validate_policies_type_invalid(self):
class BogusStoragePolicy(FakeStoragePolicy):
policy_type = 'bogus'
# unsupported policy type - initialization with FakeStoragePolicy
self.assertRaisesWithMessage(PolicyError, 'Invalid type',
BogusStoragePolicy, 1, 'one')
def test_policies_type_attribute(self):
test_policies = [
StoragePolicy(0, 'zero', is_default=True),
StoragePolicy(1, 'one'),
StoragePolicy(2, 'two'),
StoragePolicy(3, 'three', is_deprecated=True),
ECStoragePolicy(10, 'ten', ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=10, ec_nparity=3),
]
policies = StoragePolicyCollection(test_policies)
self.assertEqual(policies.get_by_index(0).policy_type,
REPL_POLICY)
self.assertEqual(policies.get_by_index(1).policy_type,
REPL_POLICY)
self.assertEqual(policies.get_by_index(2).policy_type,
REPL_POLICY)
self.assertEqual(policies.get_by_index(3).policy_type,
REPL_POLICY)
self.assertEqual(policies.get_by_index(10).policy_type,
EC_POLICY)
def test_names_are_normalized(self):
test_policies = [StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'ZERO', False)]
self.assertRaises(PolicyError, StoragePolicyCollection,
test_policies)
policies = StoragePolicyCollection([StoragePolicy(0, 'zEro', True),
StoragePolicy(1, 'One', False)])
pol0 = policies[0]
pol1 = policies[1]
for name in ('zero', 'ZERO', 'zErO', 'ZeRo'):
self.assertEqual(pol0, policies.get_by_name(name))
self.assertEqual(policies.get_by_name(name).name, 'zEro')
for name in ('one', 'ONE', 'oNe', 'OnE'):
self.assertEqual(pol1, policies.get_by_name(name))
self.assertEqual(policies.get_by_name(name).name, 'One')
def test_multiple_names(self):
# checking duplicate on insert
test_policies = [StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one', False, aliases='zero')]
self.assertRaises(PolicyError, StoragePolicyCollection,
test_policies)
# checking correct retrival using other names
test_policies = [StoragePolicy(0, 'zero', True, aliases='cero, kore'),
StoragePolicy(1, 'one', False, aliases='uno, tahi'),
StoragePolicy(2, 'two', False, aliases='dos, rua')]
policies = StoragePolicyCollection(test_policies)
for name in ('zero', 'cero', 'kore'):
self.assertEqual(policies.get_by_name(name), test_policies[0])
for name in ('two', 'dos', 'rua'):
self.assertEqual(policies.get_by_name(name), test_policies[2])
# Testing parsing of conf files/text
good_conf = self._conf("""
[storage-policy:0]
name = one
aliases = uno, tahi
default = yes
""")
policies = parse_storage_policies(good_conf)
self.assertEqual(policies.get_by_name('one'),
policies[0])
self.assertEqual(policies.get_by_name('one'),
policies.get_by_name('tahi'))
name_repeat_conf = self._conf("""
[storage-policy:0]
name = one
aliases = one
default = yes
""")
# Test on line below should not generate errors. Repeat of main
# name under aliases is permitted during construction
# but only because automated testing requires it.
policies = parse_storage_policies(name_repeat_conf)
extra_commas_conf = self._conf("""
[storage-policy:0]
name = one
aliases = ,,one, ,
default = yes
""")
# Extra blank entries should be silently dropped
policies = parse_storage_policies(extra_commas_conf)
bad_conf = self._conf("""
[storage-policy:0]
name = one
aliases = uno, uno
default = yes
""")
self.assertRaisesWithMessage(PolicyError,
'is already assigned to this policy',
parse_storage_policies, bad_conf)
def test_multiple_names_EC(self):
# checking duplicate names on insert
test_policies_ec = [
ECStoragePolicy(
0, 'ec8-2',
aliases='zeus, jupiter',
ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=8, ec_nparity=2,
object_ring=FakeRing(replicas=8),
is_default=True),
ECStoragePolicy(
1, 'ec10-4',
aliases='ec8-2',
ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=10, ec_nparity=4,
object_ring=FakeRing(replicas=10))]
self.assertRaises(PolicyError, StoragePolicyCollection,
test_policies_ec)
# checking correct retrival using other names
good_test_policies_EC = [
ECStoragePolicy(0, 'ec8-2', aliases='zeus, jupiter',
ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=8, ec_nparity=2,
object_ring=FakeRing(replicas=10),
is_default=True),
ECStoragePolicy(1, 'ec10-4', aliases='athena, minerva',
ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=10, ec_nparity=4,
object_ring=FakeRing(replicas=14)),
ECStoragePolicy(2, 'ec4-2', aliases='poseidon, neptune',
ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=4, ec_nparity=2,
object_ring=FakeRing(replicas=6)),
ECStoragePolicy(3, 'ec4-2-dup', aliases='uzuki, rin',
ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=4, ec_nparity=2,
ec_duplication_factor=2,
object_ring=FakeRing(replicas=12)),
]
ec_policies = StoragePolicyCollection(good_test_policies_EC)
for name in ('ec8-2', 'zeus', 'jupiter'):
self.assertEqual(ec_policies.get_by_name(name), ec_policies[0])
for name in ('ec10-4', 'athena', 'minerva'):
self.assertEqual(ec_policies.get_by_name(name), ec_policies[1])
for name in ('ec4-2', 'poseidon', 'neptune'):
self.assertEqual(ec_policies.get_by_name(name), ec_policies[2])
for name in ('ec4-2-dup', 'uzuki', 'rin'):
self.assertEqual(ec_policies.get_by_name(name), ec_policies[3])
# Testing parsing of conf files/text
good_ec_conf = self._conf("""
[storage-policy:0]
name = ec8-2
aliases = zeus, jupiter
policy_type = erasure_coding
ec_type = %(ec_type)s
default = yes
ec_num_data_fragments = 8
ec_num_parity_fragments = 2
[storage-policy:1]
name = ec10-4
aliases = poseidon, neptune
policy_type = erasure_coding
ec_type = %(ec_type)s
ec_num_data_fragments = 10
ec_num_parity_fragments = 4
[storage-policy:2]
name = ec4-2-dup
aliases = uzuki, rin
policy_type = erasure_coding
ec_type = %(ec_type)s
ec_num_data_fragments = 4
ec_num_parity_fragments = 2
ec_duplication_factor = 2
""" % {'ec_type': DEFAULT_TEST_EC_TYPE})
ec_policies = parse_storage_policies(good_ec_conf)
self.assertEqual(ec_policies.get_by_name('ec8-2'),
ec_policies[0])
self.assertEqual(ec_policies.get_by_name('ec10-4'),
ec_policies.get_by_name('poseidon'))
self.assertEqual(ec_policies.get_by_name('ec4-2-dup'),
ec_policies.get_by_name('uzuki'))
name_repeat_ec_conf = self._conf("""
[storage-policy:0]
name = ec8-2
aliases = ec8-2
policy_type = erasure_coding
ec_type = %(ec_type)s
default = yes
ec_num_data_fragments = 8
ec_num_parity_fragments = 2
""" % {'ec_type': DEFAULT_TEST_EC_TYPE})
# Test on line below should not generate errors. Repeat of main
# name under aliases is permitted during construction
# but only because automated testing requires it.
ec_policies = parse_storage_policies(name_repeat_ec_conf)
bad_ec_conf = self._conf("""
[storage-policy:0]
name = ec8-2
aliases = zeus, zeus
policy_type = erasure_coding
ec_type = %(ec_type)s
default = yes
ec_num_data_fragments = 8
ec_num_parity_fragments = 2
""" % {'ec_type': DEFAULT_TEST_EC_TYPE})
self.assertRaisesWithMessage(PolicyError,
'is already assigned to this policy',
parse_storage_policies, bad_ec_conf)
def test_add_remove_names(self):
test_policies = [StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one', False),
StoragePolicy(2, 'two', False)]
policies = StoragePolicyCollection(test_policies)
# add names
policies.add_policy_alias(1, 'tahi')
self.assertEqual(policies.get_by_name('tahi'), test_policies[1])
policies.add_policy_alias(2, 'rua', 'dos')
self.assertEqual(policies.get_by_name('rua'), test_policies[2])
self.assertEqual(policies.get_by_name('dos'), test_policies[2])
self.assertRaisesWithMessage(PolicyError, 'Invalid name',
policies.add_policy_alias, 2, 'double\n')
self.assertRaisesWithMessage(PolicyError, 'Invalid name',
policies.add_policy_alias, 2, '')
# try to add existing name
self.assertRaisesWithMessage(PolicyError, 'Duplicate name',
policies.add_policy_alias, 2, 'two')
self.assertRaisesWithMessage(PolicyError, 'Duplicate name',
policies.add_policy_alias, 1, 'two')
# remove name
policies.remove_policy_alias('tahi')
self.assertIsNone(policies.get_by_name('tahi'))
# remove only name
self.assertRaisesWithMessage(PolicyError,
'Policies must have at least one name.',
policies.remove_policy_alias, 'zero')
# remove non-existent name
self.assertRaisesWithMessage(PolicyError,
'No policy with name',
policies.remove_policy_alias, 'three')
# remove default name
policies.remove_policy_alias('two')
self.assertIsNone(policies.get_by_name('two'))
self.assertEqual(policies.get_by_index(2).name, 'rua')
# change default name to a new name
policies.change_policy_primary_name(2, 'two')
self.assertEqual(policies.get_by_name('two'), test_policies[2])
self.assertEqual(policies.get_by_index(2).name, 'two')
# change default name to an existing alias
policies.change_policy_primary_name(2, 'dos')
self.assertEqual(policies.get_by_index(2).name, 'dos')
# change default name to a bad new name
self.assertRaisesWithMessage(PolicyError, 'Invalid name',
policies.change_policy_primary_name,
2, 'bad\nname')
# change default name to a name belonging to another policy
self.assertRaisesWithMessage(PolicyError,
'Other policy',
policies.change_policy_primary_name,
1, 'dos')
def test_deprecated_default(self):
bad_conf = self._conf("""
[storage-policy:1]
name = one
deprecated = yes
default = yes
""")
self.assertRaisesWithMessage(
PolicyError, "Deprecated policy can not be default",
parse_storage_policies, bad_conf)
def test_multiple_policies_with_no_policy_index_zero(self):
bad_conf = self._conf("""
[storage-policy:1]
name = one
default = yes
""")
# Policy-0 will not be implicitly added if other policies are defined
self.assertRaisesWithMessage(
PolicyError, "must specify a storage policy section "
"for policy index 0", parse_storage_policies, bad_conf)
@mock.patch.object(swift.common.storage_policy, 'VALID_EC_TYPES',
['isa_l_rs_vand', 'isa_l_rs_cauchy'])
@mock.patch('swift.common.storage_policy.ECDriver')
def test_known_bad_ec_config(self, mock_driver):
good_conf = self._conf("""
[storage-policy:0]
name = bad-policy
policy_type = erasure_coding
ec_type = isa_l_rs_cauchy
ec_num_data_fragments = 10
ec_num_parity_fragments = 5
""")
with capture_logging('swift.common.storage_policy') as records:
parse_storage_policies(good_conf)
mock_driver.assert_called_once()
mock_driver.reset_mock()
self.assertFalse([(r.levelname, r.msg) for r in records])
good_conf = self._conf("""
[storage-policy:0]
name = bad-policy
policy_type = erasure_coding
ec_type = isa_l_rs_vand
ec_num_data_fragments = 10
ec_num_parity_fragments = 4
""")
with capture_logging('swift.common.storage_policy') as records:
parse_storage_policies(good_conf)
mock_driver.assert_called_once()
mock_driver.reset_mock()
self.assertFalse([(r.levelname, r.msg) for r in records])
bad_conf = self._conf("""
[storage-policy:0]
name = bad-policy
policy_type = erasure_coding
ec_type = isa_l_rs_vand
ec_num_data_fragments = 10
ec_num_parity_fragments = 5
""")
with capture_logging('swift.common.storage_policy') as records, \
self.assertRaises(PolicyError) as exc_mgr:
parse_storage_policies(bad_conf)
self.assertEqual(exc_mgr.exception.args[0],
'Storage policy bad-policy uses an EC '
'configuration known to harm data durability. This '
'policy MUST be deprecated.')
mock_driver.assert_not_called()
mock_driver.reset_mock()
self.assertEqual([r.levelname for r in records],
['WARNING'])
for msg in ('known to harm data durability',
'Any data in this policy should be migrated',
'https://bugs.launchpad.net/swift/+bug/1639691'):
self.assertIn(msg, records[0].msg)
slightly_less_bad_conf = self._conf("""
[storage-policy:0]
name = bad-policy
policy_type = erasure_coding
ec_type = isa_l_rs_vand
ec_num_data_fragments = 10
ec_num_parity_fragments = 5
deprecated = true
[storage-policy:1]
name = good-policy
policy_type = erasure_coding
ec_type = isa_l_rs_cauchy
ec_num_data_fragments = 10
ec_num_parity_fragments = 5
default = true
""")
with capture_logging('swift.common.storage_policy') as records:
parse_storage_policies(slightly_less_bad_conf)
self.assertEqual(2, mock_driver.call_count)
mock_driver.reset_mock()
self.assertEqual([r.levelname for r in records],
['WARNING'])
for msg in ('known to harm data durability',
'Any data in this policy should be migrated',
'https://bugs.launchpad.net/swift/+bug/1639691'):
self.assertIn(msg, records[0].msg)
def test_no_default(self):
orig_conf = self._conf("""
[storage-policy:0]
name = zero
[storage-policy:1]
name = one
default = yes
""")
policies = parse_storage_policies(orig_conf)
self.assertEqual(policies.default, policies[1])
self.assertTrue(policies[0].name, 'Policy-0')
bad_conf = self._conf("""
[storage-policy:0]
name = zero
[storage-policy:1]
name = one
deprecated = yes
""")
# multiple polices and no explicit default
self.assertRaisesWithMessage(
PolicyError, "Unable to find default",
parse_storage_policies, bad_conf)
good_conf = self._conf("""
[storage-policy:0]
name = Policy-0
default = yes
[storage-policy:1]
name = one
deprecated = yes
""")
policies = parse_storage_policies(good_conf)
self.assertEqual(policies.default, policies[0])
self.assertTrue(policies[1].is_deprecated, True)
def test_parse_storage_policies(self):
# ValueError when deprecating policy 0
bad_conf = self._conf("""
[storage-policy:0]
name = zero
deprecated = yes
[storage-policy:1]
name = one
deprecated = yes
""")
self.assertRaisesWithMessage(
PolicyError, "Unable to find policy that's not deprecated",
parse_storage_policies, bad_conf)
bad_conf = self._conf("""
[storage-policy:]
name = zero
""")
self.assertRaisesWithMessage(PolicyError, 'Invalid index',
parse_storage_policies, bad_conf)
bad_conf = self._conf("""
[storage-policy:-1]
name = zero
""")
self.assertRaisesWithMessage(PolicyError, 'Invalid index',
parse_storage_policies, bad_conf)
bad_conf = self._conf("""
[storage-policy:x]
name = zero
""")
self.assertRaisesWithMessage(PolicyError, 'Invalid index',
parse_storage_policies, bad_conf)
bad_conf = self._conf("""
[storage-policy:x-1]
name = zero
""")
self.assertRaisesWithMessage(PolicyError, 'Invalid index',
parse_storage_policies, bad_conf)
bad_conf = self._conf("""
[storage-policy:x]
name = zero
""")
self.assertRaisesWithMessage(PolicyError, 'Invalid index',
parse_storage_policies, bad_conf)
bad_conf = self._conf("""
[storage-policy:x:1]
name = zero
""")
self.assertRaisesWithMessage(PolicyError, 'Invalid index',
parse_storage_policies, bad_conf)
bad_conf = self._conf("""
[storage-policy:1]
name = zero
boo = berries
""")
self.assertRaisesWithMessage(PolicyError, 'Invalid option',
parse_storage_policies, bad_conf)
bad_conf = self._conf("""
[storage-policy:0]
name =
""")
self.assertRaisesWithMessage(PolicyError, 'Invalid name',
parse_storage_policies, bad_conf)
bad_conf = self._conf("""
[storage-policy:3]
name = Policy-0
""")
self.assertRaisesWithMessage(PolicyError, 'Invalid name',
parse_storage_policies, bad_conf)
bad_conf = self._conf("""
[storage-policy:1]
name = policY-0
""")
self.assertRaisesWithMessage(PolicyError, 'Invalid name',
parse_storage_policies, bad_conf)
bad_conf = self._conf("""
[storage-policy:0]
name = one
[storage-policy:1]
name = ONE
""")
self.assertRaisesWithMessage(PolicyError, 'Duplicate name',
parse_storage_policies, bad_conf)
bad_conf = self._conf("""
[storage-policy:0]
name = good_stuff
""")
self.assertRaisesWithMessage(PolicyError, 'Invalid name',
parse_storage_policies, bad_conf)
# policy_type = erasure_coding
# missing ec_type, ec_num_data_fragments and ec_num_parity_fragments
bad_conf = self._conf("""
[storage-policy:0]
name = zero
[storage-policy:1]
name = ec10-4
policy_type = erasure_coding
""")
self.assertRaisesWithMessage(PolicyError, 'Missing ec_type',
parse_storage_policies, bad_conf)
# missing ec_type, but other options valid...
bad_conf = self._conf("""
[storage-policy:0]
name = zero
[storage-policy:1]
name = ec10-4
policy_type = erasure_coding
ec_num_data_fragments = 10
ec_num_parity_fragments = 4
""")
self.assertRaisesWithMessage(PolicyError, 'Missing ec_type',
parse_storage_policies, bad_conf)
# ec_type specified, but invalid...
bad_conf = self._conf("""
[storage-policy:0]
name = zero
default = yes
[storage-policy:1]
name = ec10-4
policy_type = erasure_coding
ec_type = garbage_alg
ec_num_data_fragments = 10
ec_num_parity_fragments = 4
""")
self.assertRaisesWithMessage(PolicyError,
'Wrong ec_type garbage_alg for policy '
'ec10-4, should be one of "%s"' %
(', '.join(VALID_EC_TYPES)),
parse_storage_policies, bad_conf)
# missing and invalid ec_num_parity_fragments
bad_conf = self._conf("""
[storage-policy:0]
name = zero
[storage-policy:1]
name = ec10-4
policy_type = erasure_coding
ec_type = %(ec_type)s
ec_num_data_fragments = 10
""" % {'ec_type': DEFAULT_TEST_EC_TYPE})
self.assertRaisesWithMessage(PolicyError,
'Invalid ec_num_parity_fragments',
parse_storage_policies, bad_conf)
for num_parity in ('-4', '0', 'x'):
bad_conf = self._conf("""
[storage-policy:0]
name = zero
[storage-policy:1]
name = ec10-4
policy_type = erasure_coding
ec_type = %(ec_type)s
ec_num_data_fragments = 10
ec_num_parity_fragments = %(num_parity)s
""" % {'ec_type': DEFAULT_TEST_EC_TYPE,
'num_parity': num_parity})
self.assertRaisesWithMessage(PolicyError,
'Invalid ec_num_parity_fragments',
parse_storage_policies, bad_conf)
# missing and invalid ec_num_data_fragments
bad_conf = self._conf("""
[storage-policy:0]
name = zero
[storage-policy:1]
name = ec10-4
policy_type = erasure_coding
ec_type = %(ec_type)s
ec_num_parity_fragments = 4
""" % {'ec_type': DEFAULT_TEST_EC_TYPE})
self.assertRaisesWithMessage(PolicyError,
'Invalid ec_num_data_fragments',
parse_storage_policies, bad_conf)
for num_data in ('-10', '0', 'x'):
bad_conf = self._conf("""
[storage-policy:0]
name = zero
[storage-policy:1]
name = ec10-4
policy_type = erasure_coding
ec_type = %(ec_type)s
ec_num_data_fragments = %(num_data)s
ec_num_parity_fragments = 4
""" % {'num_data': num_data, 'ec_type': DEFAULT_TEST_EC_TYPE})
self.assertRaisesWithMessage(PolicyError,
'Invalid ec_num_data_fragments',
parse_storage_policies, bad_conf)
# invalid ec_object_segment_size
for segment_size in ('-4', '0', 'x'):
bad_conf = self._conf("""
[storage-policy:0]
name = zero
[storage-policy:1]
name = ec10-4
policy_type = erasure_coding
ec_object_segment_size = %(segment_size)s
ec_type = %(ec_type)s
ec_num_data_fragments = 10
ec_num_parity_fragments = 4
""" % {'segment_size': segment_size,
'ec_type': DEFAULT_TEST_EC_TYPE})
self.assertRaisesWithMessage(PolicyError,
'Invalid ec_object_segment_size',
parse_storage_policies, bad_conf)
# Additional section added to ensure parser ignores other sections
conf = self._conf("""
[some-other-section]
foo = bar
[storage-policy:0]
name = zero
[storage-policy:5]
name = one
default = yes
[storage-policy:6]
name = duplicate-sections-are-ignored
[storage-policy:6]
name = apple
""")
policies = parse_storage_policies(conf)
self.assertEqual(True, policies.get_by_index(5).is_default)
self.assertEqual(False, policies.get_by_index(0).is_default)
self.assertEqual(False, policies.get_by_index(6).is_default)
self.assertEqual("object", policies.get_by_name("zero").ring_name)
self.assertEqual("object-5", policies.get_by_name("one").ring_name)
self.assertEqual("object-6", policies.get_by_name("apple").ring_name)
self.assertEqual(0, int(policies.get_by_name('zero')))
self.assertEqual(5, int(policies.get_by_name('one')))
self.assertEqual(6, int(policies.get_by_name('apple')))
self.assertEqual("zero", policies.get_by_index(0).name)
self.assertEqual("zero", policies.get_by_index("0").name)
self.assertEqual("one", policies.get_by_index(5).name)
self.assertEqual("apple", policies.get_by_index(6).name)
self.assertEqual("zero", policies.get_by_index(None).name)
self.assertEqual("zero", policies.get_by_index('').name)
self.assertEqual(policies.get_by_index(0), policies.legacy)
def test_reload_invalid_storage_policies(self):
conf = self._conf("""
[storage-policy:0]
name = zero
[storage-policy:00]
name = double-zero
""")
with NamedTemporaryFile(mode='w+t') as f:
conf.write(f)
f.flush()
with mock.patch('swift.common.utils.SWIFT_CONF_FILE',
new=f.name):
try:
reload_storage_policies()
except SystemExit as e:
err_msg = str(e)
else:
self.fail('SystemExit not raised')
parts = [
'Invalid Storage Policy Configuration',
'Duplicate index',
]
for expected in parts:
self.assertTrue(
expected in err_msg, '%s was not in %s' % (expected,
err_msg))
def test_storage_policy_ordering(self):
test_policies = StoragePolicyCollection([
StoragePolicy(0, 'zero', is_default=True),
StoragePolicy(503, 'error'),
StoragePolicy(204, 'empty'),
StoragePolicy(404, 'missing'),
])
self.assertEqual([0, 204, 404, 503], [int(p) for p in
sorted(list(test_policies))])
p503 = test_policies[503]
self.assertTrue(501 < p503 < 507)
def test_get_object_ring(self):
test_policies = [StoragePolicy(0, 'aay', True),
StoragePolicy(1, 'bee', False),
StoragePolicy(2, 'cee', False)]
policies = StoragePolicyCollection(test_policies)
class NamedFakeRing(FakeRing):
def __init__(self, swift_dir, ring_name=None):
self.ring_name = ring_name
super(NamedFakeRing, self).__init__()
with mock.patch('swift.common.storage_policy.Ring',
new=NamedFakeRing):
for policy in policies:
self.assertFalse(policy.object_ring)
ring = policies.get_object_ring(int(policy), '/path/not/used')
self.assertEqual(ring.ring_name, policy.ring_name)
self.assertTrue(policy.object_ring)
self.assertTrue(isinstance(policy.object_ring, NamedFakeRing))
def blow_up(*args, **kwargs):
raise Exception('kaboom!')
with mock.patch('swift.common.storage_policy.Ring', new=blow_up):
for policy in policies:
policy.load_ring('/path/not/used')
expected = policies.get_object_ring(int(policy),
'/path/not/used')
self.assertEqual(policy.object_ring, expected)
# bad policy index
self.assertRaises(PolicyError, policies.get_object_ring, 99,
'/path/not/used')
def test_bind_ports_cache(self):
test_policies = [StoragePolicy(0, 'aay', True),
StoragePolicy(1, 'bee', False),
StoragePolicy(2, 'cee', False)]
my_ips = ['1.2.3.4', '2.3.4.5']
other_ips = ['3.4.5.6', '4.5.6.7']
bind_ip = my_ips[1]
devs_by_ring_name1 = {
'object': [ # 'aay'
{'id': 0, 'zone': 0, 'region': 1, 'ip': my_ips[0],
'port': 6006},
{'id': 0, 'zone': 0, 'region': 1, 'ip': other_ips[0],
'port': 6007},
{'id': 0, 'zone': 0, 'region': 1, 'ip': my_ips[1],
'port': 6008},
None,
{'id': 0, 'zone': 0, 'region': 1, 'ip': other_ips[1],
'port': 6009}],
'object-1': [ # 'bee'
{'id': 0, 'zone': 0, 'region': 1, 'ip': my_ips[1],
'port': 6006}, # dupe
{'id': 0, 'zone': 0, 'region': 1, 'ip': other_ips[0],
'port': 6010},
{'id': 0, 'zone': 0, 'region': 1, 'ip': my_ips[1],
'port': 6011},
{'id': 0, 'zone': 0, 'region': 1, 'ip': other_ips[1],
'port': 6012}],
'object-2': [ # 'cee'
{'id': 0, 'zone': 0, 'region': 1, 'ip': my_ips[0],
'port': 6010}, # on our IP and a not-us IP
{'id': 0, 'zone': 0, 'region': 1, 'ip': other_ips[0],
'port': 6013},
None,
{'id': 0, 'zone': 0, 'region': 1, 'ip': my_ips[1],
'port': 6014},
{'id': 0, 'zone': 0, 'region': 1, 'ip': other_ips[1],
'port': 6015}],
}
devs_by_ring_name2 = {
'object': [ # 'aay'
{'id': 0, 'zone': 0, 'region': 1, 'ip': my_ips[0],
'port': 6016},
{'id': 0, 'zone': 0, 'region': 1, 'ip': other_ips[1],
'port': 6019}],
'object-1': [ # 'bee'
{'id': 0, 'zone': 0, 'region': 1, 'ip': my_ips[1],
'port': 6016}, # dupe
{'id': 0, 'zone': 0, 'region': 1, 'ip': other_ips[1],
'port': 6022}],
'object-2': [ # 'cee'
{'id': 0, 'zone': 0, 'region': 1, 'ip': my_ips[0],
'port': 6020},
{'id': 0, 'zone': 0, 'region': 1, 'ip': other_ips[1],
'port': 6025}],
}
ring_files = [ring_name + '.ring.gz'
for ring_name in sorted(devs_by_ring_name1)]
def _fake_load(gz_path, stub_objs, metadata_only=False):
return RingData(
devs=stub_objs[os.path.basename(gz_path)[:-8]],
replica2part2dev_id=[],
part_shift=24)
with mock.patch(
'swift.common.storage_policy.RingData.load'
) as mock_ld, \
patch_policies(test_policies), \
mock.patch('swift.common.storage_policy.whataremyips') \
as mock_whataremyips, \
temptree(ring_files) as tempdir:
mock_whataremyips.return_value = my_ips
cache = BindPortsCache(tempdir, bind_ip)
self.assertEqual([
mock.call(bind_ip),
], mock_whataremyips.mock_calls)
mock_whataremyips.reset_mock()
mock_ld.side_effect = partial(_fake_load,
stub_objs=devs_by_ring_name1)
self.assertEqual(set([
6006, 6008, 6011, 6010, 6014,
]), cache.all_bind_ports_for_node())
self.assertEqual([
mock.call(os.path.join(tempdir, ring_files[0]),
metadata_only=True),
mock.call(os.path.join(tempdir, ring_files[1]),
metadata_only=True),
mock.call(os.path.join(tempdir, ring_files[2]),
metadata_only=True),
], mock_ld.mock_calls)
mock_ld.reset_mock()
mock_ld.side_effect = partial(_fake_load,
stub_objs=devs_by_ring_name2)
self.assertEqual(set([
6006, 6008, 6011, 6010, 6014,
]), cache.all_bind_ports_for_node())
self.assertEqual([], mock_ld.mock_calls)
# but when all the file mtimes are made different, it'll
# reload
for gz_file in [os.path.join(tempdir, n)
for n in ring_files]:
os.utime(gz_file, (88, 88))
self.assertEqual(set([
6016, 6020,
]), cache.all_bind_ports_for_node())
self.assertEqual([
mock.call(os.path.join(tempdir, ring_files[0]),
metadata_only=True),
mock.call(os.path.join(tempdir, ring_files[1]),
metadata_only=True),
mock.call(os.path.join(tempdir, ring_files[2]),
metadata_only=True),
], mock_ld.mock_calls)
mock_ld.reset_mock()
# Don't do something stupid like crash if a ring file is missing.
os.unlink(os.path.join(tempdir, 'object-2.ring.gz'))
self.assertEqual(set([
6016, 6020,
]), cache.all_bind_ports_for_node())
self.assertEqual([], mock_ld.mock_calls)
# whataremyips() is only called in the constructor
self.assertEqual([], mock_whataremyips.mock_calls)
def test_singleton_passthrough(self):
test_policies = [StoragePolicy(0, 'aay', True),
StoragePolicy(1, 'bee', False),
StoragePolicy(2, 'cee', False)]
with patch_policies(test_policies):
for policy in POLICIES:
self.assertEqual(POLICIES[int(policy)], policy)
def test_quorum_size_replication(self):
expected_sizes = {1: 1,
2: 1,
3: 2,
4: 2,
5: 3}
for n, expected in expected_sizes.items():
policy = StoragePolicy(0, 'zero',
object_ring=FakeRing(replicas=n))
self.assertEqual(policy.quorum, expected)
def test_quorum_size_erasure_coding(self):
test_ec_policies = [
ECStoragePolicy(10, 'ec8-2', ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=8, ec_nparity=2),
ECStoragePolicy(11, 'df10-6', ec_type='flat_xor_hd_4',
ec_ndata=10, ec_nparity=6),
ECStoragePolicy(12, 'ec4-2-dup', ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=4, ec_nparity=2, ec_duplication_factor=2),
]
for ec_policy in test_ec_policies:
k = ec_policy.ec_ndata
expected_size = (
(k + ec_policy.pyeclib_driver.min_parity_fragments_needed())
* ec_policy.ec_duplication_factor
)
self.assertEqual(expected_size, ec_policy.quorum)
def test_validate_ring(self):
test_policies = [
ECStoragePolicy(0, 'ec8-2', ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=8, ec_nparity=2,
is_default=True),
ECStoragePolicy(1, 'ec10-4', ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=10, ec_nparity=4),
ECStoragePolicy(2, 'ec4-2', ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=4, ec_nparity=2),
ECStoragePolicy(3, 'ec4-2-2dup', ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=4, ec_nparity=2,
ec_duplication_factor=2)
]
policies = StoragePolicyCollection(test_policies)
class MockRingData(object):
def __init__(self, num_replica):
self.replica_count = num_replica
def do_test(actual_load_ring_replicas):
for policy, ring_replicas in zip(policies,
actual_load_ring_replicas):
with mock.patch('swift.common.ring.ring.RingData.load',
return_value=MockRingData(ring_replicas)):
necessary_replica_num = (policy.ec_n_unique_fragments *
policy.ec_duplication_factor)
with mock.patch(
'swift.common.ring.ring.validate_configuration'):
msg = 'EC ring for policy %s needs to be configured ' \
'with exactly %d replicas.' % \
(policy.name, necessary_replica_num)
self.assertRaisesWithMessage(RingLoadError, msg,
policy.load_ring, 'mock')
# first, do somethign completely different
do_test([8, 10, 7, 11])
# then again, closer to true, but fractional
do_test([9.9, 14.1, 5.99999, 12.000000001])
def test_storage_policy_get_info(self):
test_policies = [
StoragePolicy(0, 'zero', is_default=True),
StoragePolicy(1, 'one', is_deprecated=True,
aliases='tahi, uno'),
ECStoragePolicy(10, 'ten',
ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=10, ec_nparity=3),
ECStoragePolicy(11, 'done', is_deprecated=True,
ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=10, ec_nparity=3),
]
policies = StoragePolicyCollection(test_policies)
expected = {
# default replication
(0, True): {
'name': 'zero',
'aliases': 'zero',
'default': True,
'deprecated': False,
'policy_type': REPL_POLICY
},
(0, False): {
'name': 'zero',
'aliases': 'zero',
'default': True,
},
# deprecated replication
(1, True): {
'name': 'one',
'aliases': 'one, tahi, uno',
'default': False,
'deprecated': True,
'policy_type': REPL_POLICY
},
(1, False): {
'name': 'one',
'aliases': 'one, tahi, uno',
'deprecated': True,
},
# enabled ec
(10, True): {
'name': 'ten',
'aliases': 'ten',
'default': False,
'deprecated': False,
'policy_type': EC_POLICY,
'ec_type': DEFAULT_TEST_EC_TYPE,
'ec_num_data_fragments': 10,
'ec_num_parity_fragments': 3,
'ec_object_segment_size': DEFAULT_EC_OBJECT_SEGMENT_SIZE,
'ec_duplication_factor': 1,
},
(10, False): {
'name': 'ten',
'aliases': 'ten',
},
# deprecated ec
(11, True): {
'name': 'done',
'aliases': 'done',
'default': False,
'deprecated': True,
'policy_type': EC_POLICY,
'ec_type': DEFAULT_TEST_EC_TYPE,
'ec_num_data_fragments': 10,
'ec_num_parity_fragments': 3,
'ec_object_segment_size': DEFAULT_EC_OBJECT_SEGMENT_SIZE,
'ec_duplication_factor': 1,
},
(11, False): {
'name': 'done',
'aliases': 'done',
'deprecated': True,
},
# enabled ec with ec_duplication
(12, True): {
'name': 'twelve',
'aliases': 'twelve',
'default': False,
'deprecated': False,
'policy_type': EC_POLICY,
'ec_type': DEFAULT_TEST_EC_TYPE,
'ec_num_data_fragments': 10,
'ec_num_parity_fragments': 3,
'ec_object_segment_size': DEFAULT_EC_OBJECT_SEGMENT_SIZE,
'ec_duplication_factor': 2,
},
(12, False): {
'name': 'twelve',
'aliases': 'twelve',
},
}
self.maxDiff = None
for policy in policies:
expected_info = expected[(int(policy), True)]
self.assertEqual(policy.get_info(config=True), expected_info)
expected_info = expected[(int(policy), False)]
self.assertEqual(policy.get_info(config=False), expected_info)
def test_ec_fragment_size_cached(self):
policy = ECStoragePolicy(
0, 'ec2-1', ec_type=DEFAULT_TEST_EC_TYPE,
ec_ndata=2, ec_nparity=1, object_ring=FakeRing(replicas=3),
ec_segment_size=DEFAULT_EC_OBJECT_SEGMENT_SIZE, is_default=True)
ec_driver = ECDriver(ec_type=DEFAULT_TEST_EC_TYPE,
k=2, m=1)
expected_fragment_size = ec_driver.get_segment_info(
DEFAULT_EC_OBJECT_SEGMENT_SIZE,
DEFAULT_EC_OBJECT_SEGMENT_SIZE)['fragment_size']
with mock.patch.object(
policy.pyeclib_driver, 'get_segment_info') as fake:
fake.return_value = {
'fragment_size': expected_fragment_size}
for x in range(10):
self.assertEqual(expected_fragment_size,
policy.fragment_size)
# pyeclib_driver.get_segment_info is called only once
self.assertEqual(1, fake.call_count)
if __name__ == '__main__':
unittest.main()
| matthewoliver/swift | test/unit/common/test_storage_policy.py | Python | apache-2.0 | 57,607 |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.diff.tools.fragmented;
import com.intellij.diff.DiffContext;
import com.intellij.diff.actions.AllLinesIterator;
import com.intellij.diff.actions.BufferedLineIterator;
import com.intellij.diff.actions.impl.OpenInEditorWithMouseAction;
import com.intellij.diff.actions.impl.SetEditorSettingsAction;
import com.intellij.diff.comparison.DiffTooBigException;
import com.intellij.diff.contents.DocumentContent;
import com.intellij.diff.fragments.LineFragment;
import com.intellij.diff.requests.ContentDiffRequest;
import com.intellij.diff.requests.DiffRequest;
import com.intellij.diff.tools.util.*;
import com.intellij.diff.tools.util.base.InitialScrollPositionSupport;
import com.intellij.diff.tools.util.base.ListenerDiffViewerBase;
import com.intellij.diff.tools.util.base.TextDiffSettingsHolder.TextDiffSettings;
import com.intellij.diff.tools.util.base.TextDiffViewerUtil;
import com.intellij.diff.tools.util.side.TwosideTextDiffViewer;
import com.intellij.diff.tools.util.text.TwosideTextDiffProvider;
import com.intellij.diff.util.*;
import com.intellij.diff.util.DiffUserDataKeysEx.ScrollToPolicy;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.command.undo.UndoManager;
import com.intellij.openapi.diff.DiffBundle;
import com.intellij.openapi.diff.LineTokenizer;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.actionSystem.EditorActionManager;
import com.intellij.openapi.editor.actionSystem.ReadonlyFragmentModificationHandler;
import com.intellij.openapi.editor.colors.EditorColors;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.event.DocumentListener;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.highlighter.EditorHighlighter;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.UserDataHolder;
import com.intellij.pom.Navigatable;
import com.intellij.util.containers.ContainerUtil;
import gnu.trove.TIntFunction;
import org.jetbrains.annotations.*;
import javax.swing.*;
import java.util.*;
import static com.intellij.diff.util.DiffUtil.getLinesContent;
public class UnifiedDiffViewer extends ListenerDiffViewerBase {
@NotNull protected final EditorEx myEditor;
@NotNull protected final Document myDocument;
@NotNull private final UnifiedDiffPanel myPanel;
@NotNull private final SetEditorSettingsAction myEditorSettingsAction;
@NotNull private final PrevNextDifferenceIterable myPrevNextDifferenceIterable;
@NotNull private final MyStatusPanel myStatusPanel;
@NotNull private final MyInitialScrollHelper myInitialScrollHelper = new MyInitialScrollHelper();
@NotNull private final MyFoldingModel myFoldingModel;
@NotNull private final TwosideTextDiffProvider.NoIgnore myTextDiffProvider;
@NotNull protected Side myMasterSide = Side.RIGHT;
@Nullable private ChangedBlockData myChangedBlockData;
private final boolean[] myForceReadOnlyFlags;
private boolean myReadOnlyLockSet = false;
private boolean myDuringOnesideDocumentModification;
private boolean myDuringTwosideDocumentModification;
private boolean myStateIsOutOfDate; // whether something was changed since last rediff
private boolean mySuppressEditorTyping; // our state is inconsistent. No typing can be handled correctly
public UnifiedDiffViewer(@NotNull DiffContext context, @NotNull DiffRequest request) {
super(context, (ContentDiffRequest)request);
myPrevNextDifferenceIterable = new MyPrevNextDifferenceIterable();
myStatusPanel = new MyStatusPanel();
myForceReadOnlyFlags = TextDiffViewerUtil.checkForceReadOnly(myContext, myRequest);
boolean leftEditable = isEditable(Side.LEFT, false);
boolean rightEditable = isEditable(Side.RIGHT, false);
if (leftEditable && !rightEditable) myMasterSide = Side.LEFT;
if (!leftEditable && rightEditable) myMasterSide = Side.RIGHT;
myDocument = EditorFactory.getInstance().createDocument("");
myEditor = DiffUtil.createEditor(myDocument, myProject, true, true);
List<JComponent> titles = DiffUtil.createTextTitles(myRequest, ContainerUtil.list(myEditor, myEditor));
UnifiedContentPanel contentPanel = new UnifiedContentPanel(titles, myEditor);
myPanel = new UnifiedDiffPanel(myProject, contentPanel, this, myContext);
myFoldingModel = new MyFoldingModel(myEditor, this);
myEditorSettingsAction = new SetEditorSettingsAction(getTextSettings(), getEditors());
myEditorSettingsAction.applyDefaults();
myTextDiffProvider = DiffUtil.createNoIgnoreTextDiffProvider(getProject(), getRequest(), getTextSettings(), this::rediff, this);
new MyOpenInEditorWithMouseAction().install(getEditors());
TextDiffViewerUtil.checkDifferentDocuments(myRequest);
DiffUtil.registerAction(new ReplaceSelectedChangesAction(Side.LEFT, true), myPanel);
DiffUtil.registerAction(new AppendSelectedChangesAction(Side.LEFT, true), myPanel);
DiffUtil.registerAction(new ReplaceSelectedChangesAction(Side.RIGHT, true), myPanel);
DiffUtil.registerAction(new AppendSelectedChangesAction(Side.RIGHT, true), myPanel);
}
@Override
@CalledInAwt
protected void onInit() {
super.onInit();
installEditorListeners();
installTypingSupport();
myPanel.setLoadingContent(); // We need loading panel only for initial rediff()
myPanel.setPersistentNotifications(DiffUtil.getCustomNotifications(myContext, myRequest));
}
@Override
@CalledInAwt
protected void onDispose() {
super.onDispose();
EditorFactory.getInstance().releaseEditor(myEditor);
}
@Override
@CalledInAwt
protected void processContextHints() {
super.processContextHints();
Side side = DiffUtil.getUserData(myRequest, myContext, DiffUserDataKeys.MASTER_SIDE);
if (side != null) myMasterSide = side;
myInitialScrollHelper.processContext(myRequest);
}
@Override
@CalledInAwt
protected void updateContextHints() {
super.updateContextHints();
myInitialScrollHelper.updateContext(myRequest);
myFoldingModel.updateContext(myRequest, getFoldingModelSettings());
}
@CalledInAwt
protected void updateEditorCanBeTyped() {
myEditor.setViewer(mySuppressEditorTyping || !isEditable(myMasterSide, true));
}
private void installTypingSupport() {
if (!isEditable(myMasterSide, false)) return;
updateEditorCanBeTyped();
myEditor.getColorsScheme().setColor(EditorColors.READONLY_FRAGMENT_BACKGROUND_COLOR, null); // guarded blocks
EditorActionManager.getInstance().setReadonlyFragmentModificationHandler(myDocument, new MyReadonlyFragmentModificationHandler());
myDocument.putUserData(UndoManager.ORIGINAL_DOCUMENT, getDocument(myMasterSide)); // use undo of master document
myDocument.addDocumentListener(new MyOnesideDocumentListener());
}
@NotNull
@Override
@CalledInAwt
public List<AnAction> createToolbarActions() {
List<AnAction> group = new ArrayList<>(myTextDiffProvider.getToolbarActions());
group.add(new MyToggleExpandByDefaultAction());
group.add(new MyReadOnlyLockAction());
group.add(myEditorSettingsAction);
group.add(Separator.getInstance());
group.addAll(super.createToolbarActions());
return group;
}
@NotNull
@Override
@CalledInAwt
public List<AnAction> createPopupActions() {
List<AnAction> group = new ArrayList<>(myTextDiffProvider.getPopupActions());
group.add(new MyToggleExpandByDefaultAction());
group.add(Separator.getInstance());
group.addAll(super.createPopupActions());
return group;
}
@NotNull
protected List<AnAction> createEditorPopupActions() {
List<AnAction> group = new ArrayList<>();
if (isEditable(Side.RIGHT, false)) {
group.add(new ReplaceSelectedChangesAction(Side.LEFT, false));
group.add(new ReplaceSelectedChangesAction(Side.RIGHT, false));
}
group.add(Separator.getInstance());
group.addAll(TextDiffViewerUtil.createEditorPopupActions());
return group;
}
@CalledInAwt
protected void installEditorListeners() {
new TextDiffViewerUtil.EditorActionsPopup(createEditorPopupActions()).install(getEditors());
}
//
// Diff
//
@Override
@CalledInAwt
protected void onSlowRediff() {
super.onSlowRediff();
myStatusPanel.setBusy(true);
}
@Override
@NotNull
protected Runnable performRediff(@NotNull final ProgressIndicator indicator) {
try {
indicator.checkCanceled();
final Document document1 = getContent1().getDocument();
final Document document2 = getContent2().getDocument();
final CharSequence[] texts = ReadAction.compute(() -> {
return new CharSequence[]{document1.getImmutableCharSequence(), document2.getImmutableCharSequence()};
});
final List<LineFragment> fragments = myTextDiffProvider.compare(texts[0], texts[1], indicator);
final DocumentContent content1 = getContent1();
final DocumentContent content2 = getContent2();
indicator.checkCanceled();
TwosideDocumentData data = ReadAction.compute(() -> {
indicator.checkCanceled();
UnifiedFragmentBuilder builder = new UnifiedFragmentBuilder(fragments, document1, document2, myMasterSide);
builder.exec();
indicator.checkCanceled();
EditorHighlighter highlighter = buildHighlighter(myProject, content1, content2,
texts[0], texts[1], builder.getRanges(),
builder.getText().length());
UnifiedEditorRangeHighlighter rangeHighlighter = new UnifiedEditorRangeHighlighter(myProject, document1, document2,
builder.getRanges());
return new TwosideDocumentData(builder, highlighter, rangeHighlighter);
});
UnifiedFragmentBuilder builder = data.getBuilder();
FileType fileType = content2.getContentType() == null ? content1.getContentType() : content2.getContentType();
LineNumberConvertor convertor1 = builder.getConvertor1();
LineNumberConvertor convertor2 = builder.getConvertor2();
List<LineRange> changedLines = builder.getChangedLines();
boolean isContentsEqual = builder.isEqual();
CombinedEditorData editorData = new CombinedEditorData(builder.getText(), data.getHighlighter(), data.getRangeHighlighter(), fileType,
convertor1.createConvertor(), convertor2.createConvertor());
return apply(editorData, builder.getBlocks(), convertor1, convertor2, changedLines, isContentsEqual);
}
catch (DiffTooBigException e) {
return () -> {
clearDiffPresentation();
myPanel.setTooBigContent();
};
}
catch (ProcessCanceledException e) {
throw e;
}
catch (Throwable e) {
LOG.error(e);
return () -> {
clearDiffPresentation();
myPanel.setErrorContent();
};
}
}
private void clearDiffPresentation() {
myPanel.resetNotifications();
myStatusPanel.setBusy(false);
destroyChangedBlockData();
myStateIsOutOfDate = false;
mySuppressEditorTyping = false;
updateEditorCanBeTyped();
}
@CalledInAwt
protected void markSuppressEditorTyping() {
mySuppressEditorTyping = true;
updateEditorCanBeTyped();
}
@CalledInAwt
protected void markStateIsOutOfDate() {
myStateIsOutOfDate = true;
if (myChangedBlockData != null) {
for (UnifiedDiffChange diffChange : myChangedBlockData.getDiffChanges()) {
diffChange.updateGutterActions();
}
}
}
@Nullable
private EditorHighlighter buildHighlighter(@Nullable Project project,
@NotNull DocumentContent content1,
@NotNull DocumentContent content2,
@NotNull CharSequence text1,
@NotNull CharSequence text2,
@NotNull List<HighlightRange> ranges,
int textLength) {
EditorHighlighter highlighter1 = DiffUtil.initEditorHighlighter(project, content1, text1);
EditorHighlighter highlighter2 = DiffUtil.initEditorHighlighter(project, content2, text2);
if (highlighter1 == null && highlighter2 == null) return null;
if (highlighter1 == null) highlighter1 = DiffUtil.initEmptyEditorHighlighter(text1);
if (highlighter2 == null) highlighter2 = DiffUtil.initEmptyEditorHighlighter(text2);
return new UnifiedEditorHighlighter(myDocument, highlighter1, highlighter2, ranges, textLength);
}
@NotNull
private Runnable apply(@NotNull final CombinedEditorData data,
@NotNull final List<ChangedBlock> blocks,
@NotNull final LineNumberConvertor convertor1,
@NotNull final LineNumberConvertor convertor2,
@NotNull final List<LineRange> changedLines,
final boolean isContentsEqual) {
return () -> {
myFoldingModel.updateContext(myRequest, getFoldingModelSettings());
LineCol oldCaretPosition = LineCol.fromOffset(myDocument, myEditor.getCaretModel().getPrimaryCaret().getOffset());
Pair<int[], Side> oldCaretLineTwoside = transferLineFromOneside(oldCaretPosition.line);
clearDiffPresentation();
if (isContentsEqual) {
boolean equalCharsets = TextDiffViewerUtil.areEqualCharsets(getContents());
boolean equalSeparators = TextDiffViewerUtil.areEqualLineSeparators(getContents());
myPanel.addNotification(DiffNotifications.createEqualContents(equalCharsets, equalSeparators));
}
TIntFunction foldingLineConvertor = myFoldingModel.getLineNumberConvertor();
TIntFunction contentConvertor1 = DiffUtil.getContentLineConvertor(getContent1());
TIntFunction contentConvertor2 = DiffUtil.getContentLineConvertor(getContent2());
myEditor.getGutterComponentEx().setLineNumberConvertor(
mergeLineConverters(contentConvertor1, data.getLineConvertor1(), foldingLineConvertor),
mergeLineConverters(contentConvertor2, data.getLineConvertor2(), foldingLineConvertor));
ApplicationManager.getApplication().runWriteAction(() -> {
myDuringOnesideDocumentModification = true;
try {
myDocument.setText(data.getText());
}
finally {
myDuringOnesideDocumentModification = false;
}
});
if (data.getHighlighter() != null) myEditor.setHighlighter(data.getHighlighter());
DiffUtil.setEditorCodeStyle(myProject, myEditor, data.getFileType());
if (data.getRangeHighlighter() != null) data.getRangeHighlighter().apply(myProject, myDocument);
ArrayList<UnifiedDiffChange> diffChanges = new ArrayList<>(blocks.size());
for (ChangedBlock block : blocks) {
diffChanges.add(new UnifiedDiffChange(this, block));
}
List<RangeMarker> guarderRangeBlocks = new ArrayList<>();
if (!myEditor.isViewer()) {
for (ChangedBlock block : blocks) {
LineRange range = myMasterSide.select(block.getRange2(), block.getRange1());
if (range.isEmpty()) continue;
TextRange textRange = DiffUtil.getLinesRange(myDocument, range.start, range.end);
guarderRangeBlocks.add(createGuardedBlock(textRange.getStartOffset(), textRange.getEndOffset()));
}
int textLength = myDocument.getTextLength(); // there are 'fake' newline at the very end
guarderRangeBlocks.add(createGuardedBlock(textLength, textLength));
}
myChangedBlockData = new ChangedBlockData(diffChanges, guarderRangeBlocks, convertor1, convertor2, isContentsEqual);
int newCaretLine = transferLineToOneside(oldCaretLineTwoside.second,
oldCaretLineTwoside.second.select(oldCaretLineTwoside.first));
myEditor.getCaretModel().moveToOffset(LineCol.toOffset(myDocument, newCaretLine, oldCaretPosition.column));
myFoldingModel.install(changedLines, myRequest, getFoldingModelSettings());
myInitialScrollHelper.onRediff();
myStatusPanel.update();
myPanel.setGoodContent();
myEditor.getGutterComponentEx().revalidateMarkup();
};
}
@NotNull
private RangeMarker createGuardedBlock(int start, int end) {
RangeMarker block = myDocument.createGuardedBlock(start, end);
block.setGreedyToLeft(true);
block.setGreedyToRight(true);
return block;
}
private static TIntFunction mergeLineConverters(@Nullable TIntFunction contentConvertor,
@NotNull TIntFunction unifiedConvertor,
@NotNull TIntFunction foldingConvertor) {
return DiffUtil.mergeLineConverters(DiffUtil.mergeLineConverters(contentConvertor, unifiedConvertor), foldingConvertor);
}
/*
* This convertor returns -1 if exact matching is impossible
*/
@CalledInAwt
public int transferLineToOnesideStrict(@NotNull Side side, int line) {
if (myChangedBlockData == null) return -1;
return myChangedBlockData.getLineNumberConvertor(side).convertInv(line);
}
/*
* This convertor returns -1 if exact matching is impossible
*/
@CalledInAwt
public int transferLineFromOnesideStrict(@NotNull Side side, int line) {
if (myChangedBlockData == null) return -1;
return myChangedBlockData.getLineNumberConvertor(side).convert(line);
}
/*
* This convertor returns 'good enough' position, even if exact matching is impossible
*/
@CalledInAwt
public int transferLineToOneside(@NotNull Side side, int line) {
if (myChangedBlockData == null) return line;
return myChangedBlockData.getLineNumberConvertor(side).convertApproximateInv(line);
}
/*
* This convertor returns 'good enough' position, even if exact matching is impossible
*/
@CalledInAwt
@NotNull
public Pair<int[], Side> transferLineFromOneside(int line) {
int[] lines = new int[2];
if (myChangedBlockData == null) {
lines[0] = line;
lines[1] = line;
return Pair.create(lines, myMasterSide);
}
LineNumberConvertor lineConvertor1 = myChangedBlockData.getLineNumberConvertor(Side.LEFT);
LineNumberConvertor lineConvertor2 = myChangedBlockData.getLineNumberConvertor(Side.RIGHT);
Side side = myMasterSide;
lines[0] = lineConvertor1.convert(line);
lines[1] = lineConvertor2.convert(line);
if (lines[0] == -1 && lines[1] == -1) {
lines[0] = lineConvertor1.convertApproximate(line);
lines[1] = lineConvertor2.convertApproximate(line);
}
else if (lines[0] == -1) {
lines[0] = lineConvertor1.convertApproximate(line);
side = Side.RIGHT;
}
else if (lines[1] == -1) {
lines[1] = lineConvertor2.convertApproximate(line);
side = Side.LEFT;
}
return Pair.create(lines, side);
}
@CalledInAwt
private void destroyChangedBlockData() {
if (myChangedBlockData == null) return;
for (UnifiedDiffChange change : myChangedBlockData.getDiffChanges()) {
change.destroyHighlighter();
}
for (RangeMarker block : myChangedBlockData.getGuardedRangeBlocks()) {
myDocument.removeGuardedBlock(block);
}
myChangedBlockData = null;
UnifiedEditorRangeHighlighter.erase(myProject, myDocument);
myFoldingModel.destroy();
myStatusPanel.update();
}
//
// Typing
//
private class MyOnesideDocumentListener implements DocumentListener {
@Override
public void beforeDocumentChange(@NotNull DocumentEvent e) {
if (myDuringOnesideDocumentModification) return;
if (myChangedBlockData == null) {
LOG.warn("oneside beforeDocumentChange - myChangedBlockData == null");
return;
}
// TODO: modify Document guard range logic - we can handle case, when whole read-only block is modified (ex: my replacing selection).
try {
myDuringTwosideDocumentModification = true;
Document twosideDocument = getDocument(myMasterSide);
LineCol onesideStartPosition = LineCol.fromOffset(myDocument, e.getOffset());
LineCol onesideEndPosition = LineCol.fromOffset(myDocument, e.getOffset() + e.getOldLength());
int line1 = onesideStartPosition.line;
int line2 = onesideEndPosition.line + 1;
int shift = DiffUtil.countLinesShift(e);
int twosideStartLine = transferLineFromOnesideStrict(myMasterSide, onesideStartPosition.line);
int twosideEndLine = transferLineFromOnesideStrict(myMasterSide, onesideEndPosition.line);
if (twosideStartLine == -1 || twosideEndLine == -1) {
// this should never happen
logDebugInfo(e, onesideStartPosition, onesideEndPosition, twosideStartLine, twosideEndLine);
markSuppressEditorTyping();
return;
}
int twosideStartOffset = twosideDocument.getLineStartOffset(twosideStartLine) + onesideStartPosition.column;
int twosideEndOffset = twosideDocument.getLineStartOffset(twosideEndLine) + onesideEndPosition.column;
twosideDocument.replaceString(twosideStartOffset, twosideEndOffset, e.getNewFragment());
for (UnifiedDiffChange change : myChangedBlockData.getDiffChanges()) {
change.processChange(line1, line2, shift);
}
LineNumberConvertor masterConvertor = myChangedBlockData.getLineNumberConvertor(myMasterSide);
LineNumberConvertor slaveConvertor = myChangedBlockData.getLineNumberConvertor(myMasterSide.other());
masterConvertor.handleMasterChange(line1, line2, shift, true);
slaveConvertor.handleMasterChange(line1, line2, shift, false);
}
finally {
// TODO: we can avoid marking state out-of-date in some simple cases (like in SimpleDiffViewer)
// but this will greatly increase complexity, so let's wait if it's actually required by users
markStateIsOutOfDate();
scheduleRediff();
myDuringTwosideDocumentModification = false;
}
}
private void logDebugInfo(DocumentEvent e,
LineCol onesideStartPosition, LineCol onesideEndPosition,
int twosideStartLine, int twosideEndLine) {
StringBuilder info = new StringBuilder();
Document document1 = getDocument(Side.LEFT);
Document document2 = getDocument(Side.RIGHT);
info.append("==== UnifiedDiffViewer Debug Info ====");
info.append("myMasterSide - ").append(myMasterSide).append('\n');
info.append("myLeftDocument.length() - ").append(document1.getTextLength()).append('\n');
info.append("myRightDocument.length() - ").append(document2.getTextLength()).append('\n');
info.append("myDocument.length() - ").append(myDocument.getTextLength()).append('\n');
info.append("e.getOffset() - ").append(e.getOffset()).append('\n');
info.append("e.getNewLength() - ").append(e.getNewLength()).append('\n');
info.append("e.getOldLength() - ").append(e.getOldLength()).append('\n');
info.append("onesideStartPosition - ").append(onesideStartPosition).append('\n');
info.append("onesideEndPosition - ").append(onesideEndPosition).append('\n');
info.append("twosideStartLine - ").append(twosideStartLine).append('\n');
info.append("twosideEndLine - ").append(twosideEndLine).append('\n');
Pair<int[], Side> pair1 = transferLineFromOneside(onesideStartPosition.line);
Pair<int[], Side> pair2 = transferLineFromOneside(onesideEndPosition.line);
info.append("non-strict transferStartLine - ").append(pair1.first[0]).append("-").append(pair1.first[1])
.append(":").append(pair1.second).append('\n');
info.append("non-strict transferEndLine - ").append(pair2.first[0]).append("-").append(pair2.first[1])
.append(":").append(pair2.second).append('\n');
info.append("---- UnifiedDiffViewer Debug Info ----");
LOG.warn(info.toString());
}
}
@Override
protected void onDocumentChange(@NotNull DocumentEvent e) {
if (myDuringTwosideDocumentModification) return;
markStateIsOutOfDate();
markSuppressEditorTyping();
scheduleRediff();
}
//
// Modification operations
//
private abstract class ApplySelectedChangesActionBase extends AnAction implements DumbAware {
@NotNull protected final Side myModifiedSide;
protected final boolean myShortcut;
ApplySelectedChangesActionBase(@NotNull Side modifiedSide, boolean shortcut) {
myModifiedSide = modifiedSide;
myShortcut = shortcut;
}
@Override
public void update(@NotNull AnActionEvent e) {
if (myShortcut) {
// consume shortcut even if there are nothing to do - avoid calling some other action
e.getPresentation().setEnabledAndVisible(true);
return;
}
Editor editor = e.getData(CommonDataKeys.EDITOR);
if (editor != getEditor()) {
e.getPresentation().setEnabledAndVisible(false);
return;
}
if (!isEditable(myModifiedSide, true) || isStateIsOutOfDate()) {
e.getPresentation().setEnabledAndVisible(false);
return;
}
e.getPresentation().setVisible(true);
e.getPresentation().setEnabled(isSomeChangeSelected());
}
@Override
public void actionPerformed(@NotNull final AnActionEvent e) {
final List<UnifiedDiffChange> selectedChanges = getSelectedChanges();
if (selectedChanges.isEmpty()) return;
if (!isEditable(myModifiedSide, true)) return;
if (isStateIsOutOfDate()) return;
String title = e.getPresentation().getText() + " selected changes";
DiffUtil.executeWriteCommand(getDocument(myModifiedSide), e.getProject(), title, () -> {
// state is invalidated during apply(), but changes are in reverse order, so they should not conflict with each other
apply(ContainerUtil.reverse(selectedChanges));
scheduleRediff();
});
}
protected boolean isSomeChangeSelected() {
if (myChangedBlockData == null) return false;
List<UnifiedDiffChange> changes = myChangedBlockData.getDiffChanges();
if (changes.isEmpty()) return false;
return DiffUtil.isSomeRangeSelected(getEditor(), lines -> {
return ContainerUtil.exists(changes, change -> isChangeSelected(change, lines));
});
}
@NotNull
@CalledInAwt
private List<UnifiedDiffChange> getSelectedChanges() {
if (myChangedBlockData == null) return Collections.emptyList();
final BitSet lines = DiffUtil.getSelectedLines(myEditor);
List<UnifiedDiffChange> changes = myChangedBlockData.getDiffChanges();
return ContainerUtil.filter(changes, change -> isChangeSelected(change, lines));
}
private boolean isChangeSelected(@NotNull UnifiedDiffChange change, @NotNull BitSet lines) {
return DiffUtil.isSelectedByLine(lines, change.getLine1(), change.getLine2());
}
@CalledWithWriteLock
protected abstract void apply(@NotNull List<UnifiedDiffChange> changes);
}
private class ReplaceSelectedChangesAction extends ApplySelectedChangesActionBase {
ReplaceSelectedChangesAction(@NotNull Side focusedSide, boolean shortcut) {
super(focusedSide.other(), shortcut);
setShortcutSet(ActionManager.getInstance().getAction(focusedSide.select("Diff.ApplyLeftSide", "Diff.ApplyRightSide")).getShortcutSet());
getTemplatePresentation().setText(focusedSide.select("Revert", "Accept"));
getTemplatePresentation().setIcon(focusedSide.select(AllIcons.Diff.Remove, AllIcons.Actions.Checked));
}
@Override
protected void apply(@NotNull List<UnifiedDiffChange> changes) {
for (UnifiedDiffChange change : changes) {
replaceChange(change, myModifiedSide.other());
}
}
}
private class AppendSelectedChangesAction extends ApplySelectedChangesActionBase {
AppendSelectedChangesAction(@NotNull Side focusedSide, boolean shortcut) {
super(focusedSide.other(), shortcut);
setShortcutSet(ActionManager.getInstance().getAction(focusedSide.select("Diff.AppendLeftSide", "Diff.AppendRightSide")).getShortcutSet());
getTemplatePresentation().setText("Append");
getTemplatePresentation().setIcon(DiffUtil.getArrowDownIcon(focusedSide));
}
@Override
protected void apply(@NotNull List<UnifiedDiffChange> changes) {
for (UnifiedDiffChange change : changes) {
appendChange(change, myModifiedSide.other());
}
}
}
@CalledWithWriteLock
public void replaceChange(@NotNull UnifiedDiffChange change, @NotNull Side sourceSide) {
Side outputSide = sourceSide.other();
Document document1 = getDocument(Side.LEFT);
Document document2 = getDocument(Side.RIGHT);
LineFragment lineFragment = change.getLineFragment();
DiffUtil.applyModification(outputSide.select(document1, document2),
outputSide.getStartLine(lineFragment), outputSide.getEndLine(lineFragment),
sourceSide.select(document1, document2),
sourceSide.getStartLine(lineFragment), sourceSide.getEndLine(lineFragment));
// no need to mark myStateIsOutOfDate - it will be made by DocumentListener
// TODO: we can apply change manually, without marking state out-of-date. But we'll have to schedule rediff anyway.
}
@CalledWithWriteLock
public void appendChange(@NotNull UnifiedDiffChange change, @NotNull final Side sourceSide) {
Side outputSide = sourceSide.other();
Document document1 = getDocument(Side.LEFT);
Document document2 = getDocument(Side.RIGHT);
LineFragment lineFragment = change.getLineFragment();
if (sourceSide.getStartLine(lineFragment) == sourceSide.getEndLine(lineFragment)) return;
DiffUtil.applyModification(outputSide.select(document1, document2),
outputSide.getEndLine(lineFragment), outputSide.getEndLine(lineFragment),
sourceSide.select(document1, document2),
sourceSide.getStartLine(lineFragment), sourceSide.getEndLine(lineFragment));
}
//
// Impl
//
@NotNull
public TextDiffSettings getTextSettings() {
return TextDiffViewerUtil.getTextSettings(myContext);
}
@NotNull
public FoldingModelSupport.Settings getFoldingModelSettings() {
return TextDiffViewerUtil.getFoldingModelSettings(myContext);
}
//
// Getters
//
@NotNull
public Side getMasterSide() {
return myMasterSide;
}
@NotNull
public EditorEx getEditor() {
return myEditor;
}
@NotNull
protected List<? extends EditorEx> getEditors() {
return Collections.singletonList(myEditor);
}
@NotNull
protected List<? extends DocumentContent> getContents() {
//noinspection unchecked
return (List<? extends DocumentContent>)(List)myRequest.getContents();
}
@NotNull
protected DocumentContent getContent(@NotNull Side side) {
return side.select(getContents());
}
@NotNull
protected DocumentContent getContent1() {
return getContent(Side.LEFT);
}
@NotNull
protected DocumentContent getContent2() {
return getContent(Side.RIGHT);
}
@CalledInAwt
@Nullable
protected List<UnifiedDiffChange> getDiffChanges() {
return myChangedBlockData == null ? null : myChangedBlockData.getDiffChanges();
}
@NotNull
@Override
public JComponent getComponent() {
return myPanel;
}
@Nullable
@Override
public JComponent getPreferredFocusedComponent() {
if (!myPanel.isGoodContent()) return null;
return myEditor.getContentComponent();
}
@NotNull
@Override
protected JComponent getStatusPanel() {
return myStatusPanel;
}
@CalledInAwt
public boolean isEditable(@NotNull Side side, boolean respectReadOnlyLock) {
if (myReadOnlyLockSet && respectReadOnlyLock) return false;
if (side.select(myForceReadOnlyFlags)) return false;
return DiffUtil.canMakeWritable(getDocument(side));
}
@NotNull
public Document getDocument(@NotNull Side side) {
return getContent(side).getDocument();
}
protected boolean isStateIsOutOfDate() {
return myStateIsOutOfDate;
}
//
// Misc
//
@Nullable
@Override
protected Navigatable getNavigatable() {
return getNavigatable(LineCol.fromCaret(myEditor));
}
@CalledInAwt
@Nullable
protected UnifiedDiffChange getCurrentChange() {
if (myChangedBlockData == null) return null;
int caretLine = myEditor.getCaretModel().getLogicalPosition().line;
for (UnifiedDiffChange change : myChangedBlockData.getDiffChanges()) {
if (DiffUtil.isSelectedByLine(caretLine, change.getLine1(), change.getLine2())) return change;
}
return null;
}
@CalledInAwt
@Nullable
protected Navigatable getNavigatable(@NotNull LineCol position) {
Pair<int[], Side> pair = transferLineFromOneside(position.line);
int line1 = pair.first[0];
int line2 = pair.first[1];
Navigatable navigatable1 = getContent1().getNavigatable(new LineCol(line1, position.column));
Navigatable navigatable2 = getContent2().getNavigatable(new LineCol(line2, position.column));
if (navigatable1 == null) return navigatable2;
if (navigatable2 == null) return navigatable1;
return pair.second.select(navigatable1, navigatable2);
}
public static boolean canShowRequest(@NotNull DiffContext context, @NotNull DiffRequest request) {
return TwosideTextDiffViewer.canShowRequest(context, request);
}
//
// Actions
//
private class MyPrevNextDifferenceIterable extends PrevNextDifferenceIterableBase<UnifiedDiffChange> {
@NotNull
@Override
protected List<UnifiedDiffChange> getChanges() {
return ContainerUtil.notNullize(getDiffChanges());
}
@NotNull
@Override
protected EditorEx getEditor() {
return myEditor;
}
@Override
protected int getStartLine(@NotNull UnifiedDiffChange change) {
return change.getLine1();
}
@Override
protected int getEndLine(@NotNull UnifiedDiffChange change) {
return change.getLine2();
}
}
private class MyOpenInEditorWithMouseAction extends OpenInEditorWithMouseAction {
@Override
protected Navigatable getNavigatable(@NotNull Editor editor, int line) {
if (editor != myEditor) return null;
return UnifiedDiffViewer.this.getNavigatable(new LineCol(line));
}
}
private class MyToggleExpandByDefaultAction extends TextDiffViewerUtil.ToggleExpandByDefaultAction {
MyToggleExpandByDefaultAction() {
super(getTextSettings());
}
@Override
protected void expandAll(boolean expand) {
myFoldingModel.expandAll(expand);
}
}
private class MyReadOnlyLockAction extends TextDiffViewerUtil.ReadOnlyLockAction {
MyReadOnlyLockAction() {
super(getContext());
applyDefaults();
}
@Override
protected void doApply(boolean readOnly) {
myReadOnlyLockSet = readOnly;
if (myChangedBlockData != null) {
for (UnifiedDiffChange unifiedDiffChange : myChangedBlockData.getDiffChanges()) {
unifiedDiffChange.updateGutterActions();
}
}
updateEditorCanBeTyped();
}
@Override
protected boolean canEdit() {
return !myForceReadOnlyFlags[0] && DiffUtil.canMakeWritable(getContent1().getDocument()) ||
!myForceReadOnlyFlags[1] && DiffUtil.canMakeWritable(getContent2().getDocument());
}
}
//
// Scroll from annotate
//
private class ChangedLinesIterator extends BufferedLineIterator {
@NotNull private final List<UnifiedDiffChange> myChanges;
private int myIndex = 0;
private ChangedLinesIterator(@NotNull List<UnifiedDiffChange> changes) {
myChanges = changes;
init();
}
@Override
public boolean hasNextBlock() {
return myIndex < myChanges.size();
}
@Override
public void loadNextBlock() {
LOG.assertTrue(!myStateIsOutOfDate);
UnifiedDiffChange change = myChanges.get(myIndex);
myIndex++;
LineFragment lineFragment = change.getLineFragment();
Document document = getContent2().getDocument();
CharSequence insertedText = getLinesContent(document, lineFragment.getStartLine2(), lineFragment.getEndLine2());
int lineNumber = lineFragment.getStartLine2();
LineTokenizer tokenizer = new LineTokenizer(insertedText.toString());
for (String line : tokenizer.execute()) {
addLine(lineNumber, line);
lineNumber++;
}
}
}
//
// Helpers
//
@Nullable
@Override
public Object getData(@NotNull @NonNls String dataId) {
if (DiffDataKeys.PREV_NEXT_DIFFERENCE_ITERABLE.is(dataId)) {
return myPrevNextDifferenceIterable;
}
else if (DiffDataKeys.CURRENT_EDITOR.is(dataId)) {
return myEditor;
}
else if (DiffDataKeys.CURRENT_CHANGE_RANGE.is(dataId)) {
UnifiedDiffChange change = getCurrentChange();
if (change != null) {
return new LineRange(change.getLine1(), change.getLine2());
}
}
return super.getData(dataId);
}
private class MyStatusPanel extends StatusPanel {
@Nullable
@Override
protected String getMessage() {
if (myChangedBlockData == null) return null;
int changesCount = myChangedBlockData.getDiffChanges().size();
if (changesCount == 0 && !myChangedBlockData.isContentsEqual()) {
return DiffBundle.message("diff.all.differences.ignored.text");
}
return DiffBundle.message("diff.count.differences.status.text", changesCount);
}
}
private static class TwosideDocumentData {
@NotNull private final UnifiedFragmentBuilder myBuilder;
@Nullable private final EditorHighlighter myHighlighter;
@Nullable private final UnifiedEditorRangeHighlighter myRangeHighlighter;
TwosideDocumentData(@NotNull UnifiedFragmentBuilder builder,
@Nullable EditorHighlighter highlighter,
@Nullable UnifiedEditorRangeHighlighter rangeHighlighter) {
myBuilder = builder;
myHighlighter = highlighter;
myRangeHighlighter = rangeHighlighter;
}
@NotNull
public UnifiedFragmentBuilder getBuilder() {
return myBuilder;
}
@Nullable
public EditorHighlighter getHighlighter() {
return myHighlighter;
}
@Nullable
public UnifiedEditorRangeHighlighter getRangeHighlighter() {
return myRangeHighlighter;
}
}
private static class ChangedBlockData {
@NotNull private final List<UnifiedDiffChange> myDiffChanges;
@NotNull private final List<RangeMarker> myGuardedRangeBlocks;
@NotNull private final LineNumberConvertor myLineNumberConvertor1;
@NotNull private final LineNumberConvertor myLineNumberConvertor2;
private final boolean myIsContentsEqual;
ChangedBlockData(@NotNull List<UnifiedDiffChange> diffChanges,
@NotNull List<RangeMarker> guarderRangeBlocks,
@NotNull LineNumberConvertor lineNumberConvertor1,
@NotNull LineNumberConvertor lineNumberConvertor2,
boolean isContentsEqual) {
myDiffChanges = diffChanges;
myGuardedRangeBlocks = guarderRangeBlocks;
myLineNumberConvertor1 = lineNumberConvertor1;
myLineNumberConvertor2 = lineNumberConvertor2;
myIsContentsEqual = isContentsEqual;
}
@NotNull
public List<UnifiedDiffChange> getDiffChanges() {
return myDiffChanges;
}
@NotNull
public List<RangeMarker> getGuardedRangeBlocks() {
return myGuardedRangeBlocks;
}
@NotNull
public LineNumberConvertor getLineNumberConvertor(@NotNull Side side) {
return side.select(myLineNumberConvertor1, myLineNumberConvertor2);
}
public boolean isContentsEqual() {
return myIsContentsEqual;
}
}
private static class CombinedEditorData {
@NotNull private final CharSequence myText;
@Nullable private final EditorHighlighter myHighlighter;
@Nullable private final UnifiedEditorRangeHighlighter myRangeHighlighter;
@Nullable private final FileType myFileType;
@NotNull private final TIntFunction myLineConvertor1;
@NotNull private final TIntFunction myLineConvertor2;
CombinedEditorData(@NotNull CharSequence text,
@Nullable EditorHighlighter highlighter,
@Nullable UnifiedEditorRangeHighlighter rangeHighlighter,
@Nullable FileType fileType,
@NotNull TIntFunction convertor1,
@NotNull TIntFunction convertor2) {
myText = text;
myHighlighter = highlighter;
myRangeHighlighter = rangeHighlighter;
myFileType = fileType;
myLineConvertor1 = convertor1;
myLineConvertor2 = convertor2;
}
@NotNull
public CharSequence getText() {
return myText;
}
@Nullable
public EditorHighlighter getHighlighter() {
return myHighlighter;
}
@Nullable
public UnifiedEditorRangeHighlighter getRangeHighlighter() {
return myRangeHighlighter;
}
@Nullable
public FileType getFileType() {
return myFileType;
}
@NotNull
public TIntFunction getLineConvertor1() {
return myLineConvertor1;
}
@NotNull
public TIntFunction getLineConvertor2() {
return myLineConvertor2;
}
}
private class MyInitialScrollHelper extends InitialScrollPositionSupport.TwosideInitialScrollHelper {
@NotNull
@Override
protected List<? extends Editor> getEditors() {
return UnifiedDiffViewer.this.getEditors();
}
@Override
protected void disableSyncScroll(boolean value) {
}
@Override
public void onSlowRediff() {
// Will not happen for initial rediff
}
@Nullable
@Override
protected LogicalPosition[] getCaretPositions() {
LogicalPosition position = myEditor.getCaretModel().getLogicalPosition();
Pair<int[], Side> pair = transferLineFromOneside(position.line);
LogicalPosition[] carets = new LogicalPosition[2];
carets[0] = getPosition(pair.first[0], position.column);
carets[1] = getPosition(pair.first[1], position.column);
return carets;
}
@Override
protected boolean doScrollToPosition() {
if (myCaretPosition == null) return false;
LogicalPosition twosidePosition = myMasterSide.selectNotNull(myCaretPosition);
int onesideLine = transferLineToOneside(myMasterSide, twosidePosition.line);
LogicalPosition position = new LogicalPosition(onesideLine, twosidePosition.column);
myEditor.getCaretModel().moveToLogicalPosition(position);
if (myEditorsPosition != null && myEditorsPosition.isSame(position)) {
DiffUtil.scrollToPoint(myEditor, myEditorsPosition.myPoints[0], false);
}
else {
DiffUtil.scrollToCaret(myEditor, false);
}
return true;
}
@NotNull
private LogicalPosition getPosition(int line, int column) {
if (line == -1) return new LogicalPosition(0, 0);
return new LogicalPosition(line, column);
}
private void doScrollToLine(@NotNull Side side, @NotNull LogicalPosition position) {
int onesideLine = transferLineToOneside(side, position.line);
DiffUtil.scrollEditor(myEditor, onesideLine, position.column, false);
}
@Override
protected boolean doScrollToLine() {
if (myScrollToLine == null) return false;
doScrollToLine(myScrollToLine.first, new LogicalPosition(myScrollToLine.second, 0));
return true;
}
private boolean doScrollToChange(@NotNull ScrollToPolicy scrollToChangePolicy) {
if (myChangedBlockData == null) return false;
List<UnifiedDiffChange> changes = myChangedBlockData.getDiffChanges();
UnifiedDiffChange targetChange = scrollToChangePolicy.select(changes);
if (targetChange == null) return false;
DiffUtil.scrollEditor(myEditor, targetChange.getLine1(), false);
return true;
}
@Override
protected boolean doScrollToChange() {
if (myScrollToChange == null) return false;
return doScrollToChange(myScrollToChange);
}
@Override
protected boolean doScrollToFirstChange() {
return doScrollToChange(ScrollToPolicy.FIRST_CHANGE);
}
@Override
protected boolean doScrollToContext() {
if (myNavigationContext == null) return false;
if (myChangedBlockData == null) return false;
ChangedLinesIterator changedLinesIterator = new ChangedLinesIterator(myChangedBlockData.getDiffChanges());
int line = myNavigationContext.contextMatchCheck(changedLinesIterator);
if (line == -1) {
// this will work for the case, when spaces changes are ignored, and corresponding fragments are not reported as changed
// just try to find target line -> +-
AllLinesIterator allLinesIterator = new AllLinesIterator(getContent2().getDocument());
line = myNavigationContext.contextMatchCheck(allLinesIterator);
}
if (line == -1) return false;
doScrollToLine(Side.RIGHT, new LogicalPosition(line, 0));
return true;
}
}
private static class MyFoldingModel extends FoldingModelSupport {
MyFoldingModel(@NotNull EditorEx editor, @NotNull Disposable disposable) {
super(new EditorEx[]{editor}, disposable);
}
public void install(@Nullable List<LineRange> changedLines,
@NotNull UserDataHolder context,
@NotNull FoldingModelSupport.Settings settings) {
Iterator<int[]> it = map(changedLines, line -> new int[]{
line.start,
line.end
});
install(it, context, settings);
}
@NotNull
public TIntFunction getLineNumberConvertor() {
return getLineConvertor(0);
}
}
private static class MyReadonlyFragmentModificationHandler implements ReadonlyFragmentModificationHandler {
@Override
public void handle(ReadOnlyFragmentModificationException e) {
// do nothing
}
}
}
| goodwinnk/intellij-community | platform/diff-impl/src/com/intellij/diff/tools/fragmented/UnifiedDiffViewer.java | Java | apache-2.0 | 47,029 |
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
const models = require('./index');
/**
* Base class for backup ProtectionIntent.
*
* @extends models['Resource']
*/
class ProtectionIntentResource extends models['Resource'] {
/**
* Create a ProtectionIntentResource.
* @member {object} [properties] ProtectionIntentResource properties
* @member {string} [properties.backupManagementType] Type of backup
* managemenent for the backed up item. Possible values include: 'Invalid',
* 'AzureIaasVM', 'MAB', 'DPM', 'AzureBackupServer', 'AzureSql',
* 'AzureStorage', 'AzureWorkload', 'DefaultBackup'
* @member {string} [properties.sourceResourceId] ARM ID of the resource to
* be backed up.
* @member {string} [properties.itemId] ID of the item which is getting
* protected, In case of Azure Vm , it is ProtectedItemId
* @member {string} [properties.policyId] ID of the backup policy with which
* this item is backed up.
* @member {string} [properties.protectionState] Backup state of this backup
* item. Possible values include: 'Invalid', 'NotProtected', 'Protecting',
* 'Protected', 'ProtectionFailed'
* @member {string} [properties.protectionIntentItemType] Polymorphic
* Discriminator
*/
constructor() {
super();
}
/**
* Defines the metadata of ProtectionIntentResource
*
* @returns {object} metadata of ProtectionIntentResource
*
*/
mapper() {
return {
required: false,
serializedName: 'ProtectionIntentResource',
type: {
name: 'Composite',
className: 'ProtectionIntentResource',
modelProperties: {
id: {
required: false,
readOnly: true,
serializedName: 'id',
type: {
name: 'String'
}
},
name: {
required: false,
readOnly: true,
serializedName: 'name',
type: {
name: 'String'
}
},
type: {
required: false,
readOnly: true,
serializedName: 'type',
type: {
name: 'String'
}
},
location: {
required: false,
serializedName: 'location',
type: {
name: 'String'
}
},
tags: {
required: false,
serializedName: 'tags',
type: {
name: 'Dictionary',
value: {
required: false,
serializedName: 'StringElementType',
type: {
name: 'String'
}
}
}
},
eTag: {
required: false,
serializedName: 'eTag',
type: {
name: 'String'
}
},
properties: {
required: false,
serializedName: 'properties',
type: {
name: 'Composite',
polymorphicDiscriminator: {
serializedName: 'protectionIntentItemType',
clientName: 'protectionIntentItemType'
},
uberParent: 'ProtectionIntent',
className: 'ProtectionIntent'
}
}
}
}
};
}
}
module.exports = ProtectionIntentResource;
| xingwu1/azure-sdk-for-node | lib/services/recoveryServicesBackupManagement/lib/models/protectionIntentResource.js | JavaScript | apache-2.0 | 3,687 |
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.lang.properties.provider;
import com.intellij.lang.properties.psi.Property;
import com.intellij.lang.properties.psi.PropertyKeyIndex;
import com.intellij.openapi.project.Project;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.properties.provider.PropertiesProvider;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
public class PropertiesProviderImpl implements PropertiesProvider {
@Override
public @Nullable String getPropertyValue(@NotNull String propertyKey, @NotNull GlobalSearchScope scope) {
Project project = scope.getProject();
if (project == null) return null;
Collection<Property> property =
PropertyKeyIndex.getInstance().get(propertyKey, project, scope);
if (property == null) return null;
Property item = ContainerUtil.getFirstItem(property);
return item != null ? item.getValue() : null;
}
}
| jwren/intellij-community | plugins/properties/properties-psi-api/src/com/intellij/lang/properties/provider/PropertiesProviderImpl.java | Java | apache-2.0 | 1,127 |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_interface.java
// Do not modify
package org.projectfloodlight.openflow.protocol;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import io.netty.buffer.ByteBuf;
public interface OFAsyncConfigPropExperimenterMaster extends OFObject, OFAsyncConfigProp {
int getType();
OFVersion getVersion();
void writeTo(ByteBuf channelBuffer);
Builder createBuilder();
public interface Builder extends OFAsyncConfigProp.Builder {
OFAsyncConfigPropExperimenterMaster build();
int getType();
OFVersion getVersion();
}
}
| mehdi149/OF_COMPILER_0.1 | gen-src/main/java/org/projectfloodlight/openflow/protocol/OFAsyncConfigPropExperimenterMaster.java | Java | apache-2.0 | 1,789 |
/*
* Copyright 2012-2013 inBloom, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.ComponentModel.DataAnnotations;
using InBloomClient.Enum;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
namespace InBloomClient.Entities
{
public class Cohort
{
public string id { get; set; }
[Required(ErrorMessage = "educationOrgId is required")]
public string educationOrgId { get; set; }
/// <summary>
/// The academic subject associated with an academic intervention.
/// </summary>
[JsonConverter(typeof(StringEnumConverter))]
public AcademicSubjectType academicSubject { get; set; }
/// <summary>
/// The description of he cohort and its purpose.
/// </summary>
[StringLength(1024)]
public string cohortDescription { get; set; }
/// <summary>
/// The name or ID for the cohort.
/// </summary>
[StringLength(20)]
[Required(ErrorMessage = "Cohort name is required")]
public string cohortIdentifier { get; set; }
//District, School, Classroom, Teacher, Principal, Counselor, Statewide
[JsonConverter(typeof(StringEnumConverter))]
public CohortScopeType cohortScope { get; set; }
//Academic Intervention, Attendance Intervention, Discipline Intervention, Classroom Pullout, Extracurricular Activity, Field Trip,
//Principal Watch List, Counselor List, In-school Suspension, Study Hall, Other
/// <summary>
/// The type of cohort (e.g., academic intervention, classroom breakout)
/// </summary>
[Required(ErrorMessage = "Cohort type is required")]
[JsonConverter(typeof(StringEnumConverter))]
public CohortType cohortType { get; set; }
///// <summary>
///// The education organization associated with and owner of the cohort.
///// </summary>
//public EducationOrganization educationOrg { get; set; }
///// <summary>
///// The (optional) program associated with this cohort (e.g., special education)
///// </summary>
//public List<Program> programs { get; set; }
}
} | teamMnM/slc-student-grouping-tool | InBloomClient/InBloomClient/Entities/Cohort.cs | C# | apache-2.0 | 2,824 |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.network.lb.dao;
import java.util.List;
import com.cloud.network.ElasticLbVmMapVO;
import com.cloud.network.LoadBalancerVO;
import com.cloud.utils.db.GenericDao;
import com.cloud.vm.DomainRouterVO;
public interface ElasticLbVmMapDao extends GenericDao<ElasticLbVmMapVO, Long> {
ElasticLbVmMapVO findOneByLbIdAndElbVmId(long lbId, long elbVmId);
ElasticLbVmMapVO findOneByIpIdAndElbVmId(long ipId, long elbVmId);
ElasticLbVmMapVO findOneByIp(long ipId);
List<ElasticLbVmMapVO> listByElbVmId(long elbVmId);
List<ElasticLbVmMapVO> listByLbId(long lbId);
int deleteLB(long lbId);
List<DomainRouterVO> listUnusedElbVms();
List<LoadBalancerVO> listLbsForElbVm(long elbVmId);
}
| argv0/cloudstack | plugins/network-elements/elastic-loadbalancer/src/com/cloud/network/lb/dao/ElasticLbVmMapDao.java | Java | apache-2.0 | 1,527 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler;
import java.io.File;
import java.io.IOException;
import java.util.Date;
import java.util.Locale;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrCore;
import org.apache.solr.util.plugin.SolrCoreAware;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.schema.DateField;
import org.apache.commons.io.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Ping Request Handler for reporting SolrCore health to a Load Balancer.
*
* <p>
* This handler is designed to be used as the endpoint for an HTTP
* Load-Balancer to use when checking the "health" or "up status" of a
* Solr server.
* </p>
*
* <p>
* In it's simplest form, the PingRequestHandler should be
* configured with some defaults indicating a request that should be
* executed. If the request succeeds, then the PingRequestHandler
* will respond back with a simple "OK" status. If the request fails,
* then the PingRequestHandler will respond back with the
* corrisponding HTTP Error code. Clients (such as load balancers)
* can be configured to poll the PingRequestHandler monitoring for
* these types of responses (or for a simple connection failure) to
* know if there is a problem with the Solr server.
* </p>
*
* <pre class="prettyprint">
* <requestHandler name="/admin/ping" class="solr.PingRequestHandler">
* <lst name="invariants">
* <str name="qt">/search</str><!-- handler to delegate to -->
* <str name="q">some test query</str>
* </lst>
* </requestHandler>
* </pre>
*
* <p>
* A more advanced option available, is to configure the handler with a
* "healthcheckFile" which can be used to enable/disable the PingRequestHandler.
* </p>
*
* <pre class="prettyprint">
* <requestHandler name="/admin/ping" class="solr.PingRequestHandler">
* <!-- relative paths are resolved against the data dir -->
* <str name="healthcheckFile">server-enabled.txt</str>
* <lst name="invariants">
* <str name="qt">/search</str><!-- handler to delegate to -->
* <str name="q">some test query</str>
* </lst>
* </requestHandler>
* </pre>
*
* <ul>
* <li>If the health check file exists, the handler will execute the
* delegated query and return status as described above.
* </li>
* <li>If the health check file does not exist, the handler will return
* an HTTP error even if the server is working fine and the delegated
* query would have succeeded
* </li>
* </ul>
*
* <p>
* This health check file feature can be used as a way to indicate
* to some Load Balancers that the server should be "removed from
* rotation" for maintenance, or upgrades, or whatever reason you may
* wish.
* </p>
*
* <p>
* The health check file may be created/deleted by any external
* system, or the PingRequestHandler itself can be used to
* create/delete the file by specifying an "action" param in a
* request:
* </p>
*
* <ul>
* <li><code>http://.../ping?action=enable</code>
* - creates the health check file if it does not already exist
* </li>
* <li><code>http://.../ping?action=disable</code>
* - deletes the health check file if it exists
* </li>
* <li><code>http://.../ping?action=status</code>
* - returns a status code indicating if the healthcheck file exists
* ("<code>enabled</code>") or not ("<code>disabled</code>")
* </li>
* </ul>
*
* @since solr 1.3
*/
public class PingRequestHandler extends RequestHandlerBase implements SolrCoreAware
{
public static Logger log = LoggerFactory.getLogger(PingRequestHandler.class);
public static final String HEALTHCHECK_FILE_PARAM = "healthcheckFile";
protected enum ACTIONS {STATUS, ENABLE, DISABLE, PING};
private String healthFileName = null;
private File healthcheck = null;
@Override
public void init(NamedList args) {
super.init(args);
Object tmp = args.get(HEALTHCHECK_FILE_PARAM);
healthFileName = (null == tmp ? null : tmp.toString());
}
@Override
public void inform( SolrCore core ) {
if (null != healthFileName) {
healthcheck = new File(healthFileName);
if ( ! healthcheck.isAbsolute()) {
healthcheck = new File(core.getDataDir(), healthFileName);
healthcheck = healthcheck.getAbsoluteFile();
}
if ( ! healthcheck.getParentFile().canWrite()) {
// this is not fatal, users may not care about enable/disable via
// solr request, file might be touched/deleted by an external system
log.warn("Directory for configured healthcheck file is not writable by solr, PingRequestHandler will not be able to control enable/disable: {}",
healthcheck.getParentFile().getAbsolutePath());
}
}
}
/**
* Returns true if the healthcheck flag-file is enabled but does not exist,
* otherwise (no file configured, or file configured and exists)
* returns false.
*/
public boolean isPingDisabled() {
return (null != healthcheck && ! healthcheck.exists() );
}
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception
{
SolrParams params = req.getParams();
// in this case, we want to default distrib to false so
// we only ping the single node
Boolean distrib = params.getBool("distrib");
if (distrib == null) {
ModifiableSolrParams mparams = new ModifiableSolrParams(params);
mparams.set("distrib", false);
req.setParams(mparams);
}
String actionParam = params.get("action");
ACTIONS action = null;
if (actionParam == null){
action = ACTIONS.PING;
}
else {
try {
action = ACTIONS.valueOf(actionParam.toUpperCase(Locale.ROOT));
}
catch (IllegalArgumentException iae){
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"Unknown action: " + actionParam);
}
}
switch(action){
case PING:
if( isPingDisabled() ) {
SolrException e = new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE,
"Service disabled");
rsp.setException(e);
return;
}
handlePing(req, rsp);
break;
case ENABLE:
handleEnable(true);
break;
case DISABLE:
handleEnable(false);
break;
case STATUS:
if( healthcheck == null ){
SolrException e = new SolrException
(SolrException.ErrorCode.SERVICE_UNAVAILABLE,
"healthcheck not configured");
rsp.setException(e);
} else {
rsp.add( "status", isPingDisabled() ? "disabled" : "enabled" );
}
}
}
protected void handlePing(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception
{
SolrParams params = req.getParams();
SolrCore core = req.getCore();
// Get the RequestHandler
String qt = params.get( CommonParams.QT );//optional; you get the default otherwise
SolrRequestHandler handler = core.getRequestHandler( qt );
if( handler == null ) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"Unknown RequestHandler (qt): "+qt );
}
if( handler instanceof PingRequestHandler ) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"Cannot execute the PingRequestHandler recursively" );
}
// Execute the ping query and catch any possible exception
Throwable ex = null;
try {
SolrQueryResponse pingrsp = new SolrQueryResponse();
core.execute(handler, req, pingrsp );
ex = pingrsp.getException();
}
catch( Exception e ) {
ex = e;
}
// Send an error or an 'OK' message (response code will be 200)
if( ex != null ) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
"Ping query caused exception: "+ex.getMessage(), ex );
}
rsp.add( "status", "OK" );
}
protected void handleEnable(boolean enable) throws SolrException {
if (healthcheck == null) {
throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE,
"No healthcheck file defined.");
}
if ( enable ) {
try {
// write out when the file was created
FileUtils.write(healthcheck,
DateField.formatExternal(new Date()), "UTF-8");
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
"Unable to write healthcheck flag file", e);
}
} else {
if (healthcheck.exists() && !healthcheck.delete()){
throw new SolrException(SolrException.ErrorCode.NOT_FOUND,
"Did not successfully delete healthcheck file: "
+healthcheck.getAbsolutePath());
}
}
}
//////////////////////// SolrInfoMBeans methods //////////////////////
@Override
public String getDescription() {
return "Reports application health to a load-balancer";
}
@Override
public String getSource() {
return "$URL: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene_solr_4_7/solr/core/src/java/org/apache/solr/handler/PingRequestHandler.java $";
}
}
| pengzong1111/solr4 | solr/core/src/java/org/apache/solr/handler/PingRequestHandler.java | Java | apache-2.0 | 10,590 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.server.kerberos.shared.replay;
import java.io.Serializable;
import javax.security.auth.kerberos.KerberosPrincipal;
import net.sf.ehcache.Cache;
import net.sf.ehcache.Element;
import net.sf.ehcache.store.AbstractPolicy;
import org.apache.directory.shared.kerberos.KerberosTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* "The replay cache will store at least the server name, along with the client name,
* time, and microsecond fields from the recently-seen authenticators, and if a
* matching tuple is found, the KRB_AP_ERR_REPEAT error is returned."
*
* We will store the entries in Ehacache instance
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public class ReplayCacheImpl implements ReplayCache
{
private static final Logger LOG = LoggerFactory.getLogger( ReplayCacheImpl.class );
/** ehcache based storage to store the entries */
private Cache cache;
/** default clock skew */
private static final long DEFAULT_CLOCK_SKEW = 5 * KerberosTime.MINUTE;
/** The clock skew */
private long clockSkew = DEFAULT_CLOCK_SKEW;
/**
* A structure to hold an entry
*/
public class ReplayCacheEntry implements Serializable
{
private static final long serialVersionUID = 1L;
/** The server principal */
private KerberosPrincipal serverPrincipal;
/** The client principal */
private KerberosPrincipal clientPrincipal;
/** The client time */
private KerberosTime clientTime;
/** The client micro seconds */
private int clientMicroSeconds;
/**
* Creates a new instance of ReplayCacheEntry.
*
* @param serverPrincipal
* @param clientPrincipal
* @param clientTime
* @param clientMicroSeconds
*/
public ReplayCacheEntry( KerberosPrincipal serverPrincipal, KerberosPrincipal clientPrincipal,
KerberosTime clientTime, int clientMicroSeconds )
{
this.serverPrincipal = serverPrincipal;
this.clientPrincipal = clientPrincipal;
this.clientTime = clientTime;
this.clientMicroSeconds = clientMicroSeconds;
}
/**
* Returns whether this {@link ReplayCacheEntry} is equal to another {@link ReplayCacheEntry}.
* {@link ReplayCacheEntry}'s are equal when the server name, client name, client time, and
* the client microseconds are equal.
*
* @param that
* @return true if the ReplayCacheEntry's are equal.
*/
public boolean equals( ReplayCacheEntry that )
{
return serverPrincipal.equals( that.serverPrincipal ) && clientPrincipal.equals( that.clientPrincipal )
&& clientTime.equals( that.clientTime ) && clientMicroSeconds == that.clientMicroSeconds;
}
/**
* Returns whether this {@link ReplayCacheEntry} is older than a given time.
*
* @param clockSkew
* @return true if the {@link ReplayCacheEntry}'s client time is outside the clock skew time.
*/
public boolean isOutsideClockSkew( long clockSkew )
{
return !clientTime.isInClockSkew( clockSkew );
}
/**
* @return create a key to be used while storing in the cache
*/
private String createKey()
{
StringBuilder sb = new StringBuilder();
sb.append( ( clientPrincipal == null ) ? "null" : clientPrincipal.getName() );
sb.append( '#' );
sb.append( ( serverPrincipal == null ) ? "null" : serverPrincipal.getName() );
sb.append( '#' );
sb.append( ( clientTime == null ) ? "null" : clientTime.getDate() );
sb.append( '#' );
sb.append( clientMicroSeconds );
return sb.toString();
}
}
/**
* an expiration policy based on the clockskew
*/
private class ClockskewExpirationPolicy extends AbstractPolicy
{
/**
* {@inheritDoc}
*/
public String getName()
{
return "CLOCK-SKEW";
}
/**
* {@inheritDoc}
*/
public boolean compare( Element element1, Element element2 )
{
ReplayCacheEntry entry = ( ReplayCacheEntry ) element2.getValue();
if ( entry.isOutsideClockSkew( clockSkew ) )
{
return true;
}
return false;
}
}
/**
* Creates a new instance of InMemoryReplayCache. Sets the
* delay between each cleaning run to 5 seconds.
*/
public ReplayCacheImpl( Cache cache )
{
this.cache = cache;
this.cache.setMemoryStoreEvictionPolicy( new ClockskewExpirationPolicy() );
}
/**
* Creates a new instance of InMemoryReplayCache. Sets the
* delay between each cleaning run to 5 seconds. Sets the
* clockSkew to the given value
*
* @param clockSkew the allowed skew (milliseconds)
*/
public ReplayCacheImpl( Cache cache, long clockSkew )
{
this.cache = cache;
this.clockSkew = clockSkew;
this.cache.setMemoryStoreEvictionPolicy( new ClockskewExpirationPolicy() );
}
/**
* Sets the clock skew.
*
* @param clockSkew
*/
public void setClockSkew( long clockSkew )
{
this.clockSkew = clockSkew;
}
/**
* Check if an entry is a replay or not.
*/
public synchronized boolean isReplay( KerberosPrincipal serverPrincipal, KerberosPrincipal clientPrincipal,
KerberosTime clientTime, int clientMicroSeconds )
{
ReplayCacheEntry entry = new ReplayCacheEntry( serverPrincipal, clientPrincipal, clientTime, clientMicroSeconds );
Element element = cache.get( entry.createKey() );
if ( element == null )
{
return false;
}
entry = ( ReplayCacheEntry ) element.getValue();
if ( serverPrincipal.equals( entry.serverPrincipal ) &&
clientTime.equals( entry.clientTime ) &&
( clientMicroSeconds == entry.clientMicroSeconds ) )
{
return true;
}
return false;
}
/**
* Add a new entry into the cache. A thread will clean all the timed out
* entries.
*/
public synchronized void save( KerberosPrincipal serverPrincipal, KerberosPrincipal clientPrincipal,
KerberosTime clientTime, int clientMicroSeconds )
{
ReplayCacheEntry entry = new ReplayCacheEntry( serverPrincipal, clientPrincipal, clientTime, clientMicroSeconds );
Element element = new Element( entry.createKey(), entry );
cache.put( element );
}
/**
* {@inheritDoc}
*/
public void clear()
{
LOG.debug( "removing all the elements from cache" );
cache.removeAll();
}
}
| lucastheisen/apache-directory-server | kerberos-codec/src/main/java/org/apache/directory/server/kerberos/shared/replay/ReplayCacheImpl.java | Java | apache-2.0 | 7,871 |
namespace JVM {
export class JvmController {
tabs: Nav.HawtioTab[];
constructor(workspace: Jmx.Workspace) {
'ngInject';
this.tabs = [new Nav.HawtioTab('Remote', '/jvm/connect')];
if (hasLocalMBean(workspace)) {
this.tabs.push(new Nav.HawtioTab('Local', '/jvm/local'));
}
if (hasDiscoveryMBean(workspace)) {
this.tabs.push(new Nav.HawtioTab('Discover', '/jvm/discover'));
}
}
}
export const jvmComponent: angular.IComponentOptions = {
template: '<hawtio-tabs-layout tabs="$ctrl.tabs"></hawtio-tabs-layout>',
controller: JvmController
};
}
| hawtio/hawtio-integration | plugins/jvm/ts/jvm.component.ts | TypeScript | apache-2.0 | 622 |
// Copyright 2011 Avi Levi
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Linq;
using System.Reflection;
namespace ByContext.ModelBinders
{
public class DefaultModelBinderFactory : IModelBinderFactory
{
public IModelBinder Create(Type modelType)
{
var propertyInfos = modelType.GetProperties(BindingFlags.Instance | BindingFlags.Public).Select(pi => pi).ToDictionary(p => p.Name, p => p);
return new DefaultModelBinder(propertyInfos);
}
}
} | Avi-Levi/ByContext | ByContext/ModelBinders/DefaultModelBinderFactory.cs | C# | apache-2.0 | 1,035 |
// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
package personalize
import (
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/client"
"github.com/aws/aws-sdk-go/aws/client/metadata"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/aws/signer/v4"
"github.com/aws/aws-sdk-go/private/protocol/jsonrpc"
)
// Personalize provides the API operation methods for making requests to
// Amazon Personalize. See this package's package overview docs
// for details on the service.
//
// Personalize methods are safe to use concurrently. It is not safe to
// modify mutate any of the struct's properties though.
type Personalize struct {
*client.Client
}
// Used for custom client initialization logic
var initClient func(*client.Client)
// Used for custom request initialization logic
var initRequest func(*request.Request)
// Service information constants
const (
ServiceName = "Personalize" // Name of service.
EndpointsID = "personalize" // ID to lookup a service endpoint with.
ServiceID = "Personalize" // ServiceID is a unique identifer of a specific service.
)
// New creates a new instance of the Personalize client with a session.
// If additional configuration is needed for the client instance use the optional
// aws.Config parameter to add your extra config.
//
// Example:
// // Create a Personalize client from just a session.
// svc := personalize.New(mySession)
//
// // Create a Personalize client with additional configuration
// svc := personalize.New(mySession, aws.NewConfig().WithRegion("us-west-2"))
func New(p client.ConfigProvider, cfgs ...*aws.Config) *Personalize {
c := p.ClientConfig(EndpointsID, cfgs...)
if c.SigningNameDerived || len(c.SigningName) == 0 {
c.SigningName = "personalize"
}
return newClient(*c.Config, c.Handlers, c.Endpoint, c.SigningRegion, c.SigningName)
}
// newClient creates, initializes and returns a new service client instance.
func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegion, signingName string) *Personalize {
svc := &Personalize{
Client: client.New(
cfg,
metadata.ClientInfo{
ServiceName: ServiceName,
ServiceID: ServiceID,
SigningName: signingName,
SigningRegion: signingRegion,
Endpoint: endpoint,
APIVersion: "2018-05-22",
JSONVersion: "1.1",
TargetPrefix: "AmazonPersonalize",
},
handlers,
),
}
// Handlers
svc.Handlers.Sign.PushBackNamed(v4.SignRequestHandler)
svc.Handlers.Build.PushBackNamed(jsonrpc.BuildHandler)
svc.Handlers.Unmarshal.PushBackNamed(jsonrpc.UnmarshalHandler)
svc.Handlers.UnmarshalMeta.PushBackNamed(jsonrpc.UnmarshalMetaHandler)
svc.Handlers.UnmarshalError.PushBackNamed(jsonrpc.UnmarshalErrorHandler)
// Run custom client initialization if present
if initClient != nil {
initClient(svc.Client)
}
return svc
}
// newRequest creates a new request for a Personalize operation and runs any
// custom request initialization.
func (c *Personalize) newRequest(op *request.Operation, params, data interface{}) *request.Request {
req := c.NewRequest(op, params, data)
// Run custom request initialization if present
if initRequest != nil {
initRequest(req)
}
return req
}
| HotelsDotCom/kube-aws | vendor/github.com/aws/aws-sdk-go/service/personalize/service.go | GO | apache-2.0 | 3,260 |
# -*- coding: utf-8 -*-
# compute the times of action(rec|click|msg) for each user
from math import sqrt
def getActionScore(action):
if action == "rec":
return 0
elif action == "click" :
return 1
else:
return 2
def compute_interaction(data):
interaction = {}
for line in data:
(userA,userB,times,action) = line.split(' ')
action = action[:-1]
key = userB + " " + action
interaction.setdefault(key, 0)
interaction[key] += 1
return interaction
def compute_user_history_interaction(trainFile):
records = []
lineList = []
lineNum = 1
result = []
lineList = [line for line in file(trainFile)]
for line in lineList:
if lineNum == 1: #ignore the title in first line
lineNum += 1
continue
records.append(line)
lineNum += 1
interaction = compute_interaction(records)
out = file('user_interaction.txt', 'w')
for (key, times) in interaction.items():
out.write('%s %d' % (key, times))
out.write('\n')
for (key, times) in interaction.items():
user, action = key.split(' ');
result.append((user, action, times))
return result
#get the weight for each type of action
def get_action_weight(action):
pop = 0;
if action == "rec":
pop = 1
elif action == "click":
pop = 10
elif action == "msg":
pop = 100
return pop;
#trainFile line like: [userA, userB, action_times, action_type(rec|click|msg)]
def compute_user_popularity(trainFile, user_popularity_file):
popDict = {}
rankedscores = []
result = []
print "-----compute_user_history_interaction ... "
interaction = compute_user_history_interaction(trainFile)
print "-----compute_user_popularity ... "
for (user, action, times) in interaction[0:len(interaction)]:
popDict.setdefault(user, 0)
popDict[user] += get_action_weight(action) * times
ranked_popularity = [(popularity, user) for (user, popularity) in popDict.items()]
ranked_popularity.sort()
ranked_popularity.reverse()
print "-----ranking_user_popularity ... "
result = [(user, popularity) for (popularity, user) in ranked_popularity[0:len(ranked_popularity)]]
print "-----output user_popularity ... "
out = file(user_popularity_file, 'w')
for (user, popularity) in result[0:len(result)]:
out.write('%s %d\n' % (user, popularity))
print "-----Ending ... "
return result | bingtianbaihua/MachineLearning | 世纪佳缘会员推荐之投票加权/compute_user_popularity.py | Python | apache-2.0 | 2,399 |
<?php
/**
* User: zach
* Date: 5/6/13
* Time: 11:00 PM
*/
namespace Elasticsearch\Connections;
use Elasticsearch\Common\Exceptions\Curl\CouldNotConnectToHost;
use Elasticsearch\Common\Exceptions\Curl\CouldNotResolveHostException;
use Elasticsearch\Common\Exceptions\Curl\OperationTimeoutException;
use Elasticsearch\Common\Exceptions\TransportException;
use Psr\Log\LoggerInterface;
/**
* Abstract Class AbstractConnection
*
* @category Elasticsearch
* @package Elasticsearch\Connections
* @author Zachary Tong <zachary.tong@elasticsearch.com>
* @license http://www.apache.org/licenses/LICENSE-2.0 Apache2
* @link http://elasticsearch.org
*/
abstract class AbstractConnection implements ConnectionInterface
{
/**
* @var string
*/
protected $transportSchema = 'http';
/**
* @var string
*/
protected $host;
/**
* @var LoggerInterface
*/
protected $log;
/**
* @var LoggerInterface
*/
protected $trace;
/**
* @var array
*/
protected $connectionParams;
/** @var bool */
protected $isAlive = false;
/** @var float */
private $pingTimeout = 1; //TODO expose this
/** @var int */
private $lastPing = 0;
/** @var int */
private $failedPings = 0;
/**
* @param $method
* @param $uri
* @param null $params
* @param null $body
* @param array $options
*
* @return mixed
*/
abstract public function performRequest($method, $uri, $params = null, $body = null, $options = array());
/** @return string */
abstract public function getTransportSchema();
/**
* Constructor
*
* @param string $host Host string
* @param string $port Host port
* @param array $connectionParams Array of connection-specific parameters
* @param \Psr\Log\LoggerInterface $log Logger object
* @param \Psr\Log\LoggerInterface $trace
*/
public function __construct($host, $port, $connectionParams, LoggerInterface $log, LoggerInterface $trace)
{
$this->host = $this->transportSchema . '://' . $host . ':' . $port;
$this->log = $log;
$this->trace = $trace;
$this->connectionParams = $connectionParams;
}
/**
* Log a successful request
*
* @param string $method
* @param string $fullURI
* @param string $body
* @param array $headers
* @param string $statusCode
* @param string $response
* @param string $duration
*
* @return void
*/
public function logRequestSuccess($method, $fullURI, $body, $headers, $statusCode, $response, $duration)
{
$this->log->debug('Request Body', array($body));
$this->log->info(
'Request Success:',
array(
'method' => $method,
'uri' => $fullURI,
'headers' => $headers,
'HTTP code' => $statusCode,
'duration' => $duration,
)
);
$this->log->debug('Response', array($response));
// Build the curl command for Trace.
$curlCommand = $this->buildCurlCommand($method, $fullURI, $body);
$this->trace->info($curlCommand);
$this->trace->debug(
'Response:',
array(
'response' => $response,
'method' => $method,
'uri' => $fullURI,
'HTTP code' => $statusCode,
'duration' => $duration,
)
);
}
/**
* Log a a failed request
*
* @param string $method
* @param string $fullURI
* @param string $body
* @param array $headers
* @param string $duration
* @param null|string $statusCode
* @param null|string $response
* @param null|string $exception
*
* @return void
*/
public function logRequestFail(
$method,
$fullURI,
$body,
$headers,
$duration,
$statusCode = null,
$response = null,
$exception = null
) {
$this->log->debug('Request Body', array($body));
$this->log->info(
'Request Success:',
array(
'method' => $method,
'uri' => $fullURI,
'headers' => $headers,
'HTTP code' => $statusCode,
'duration' => $duration,
'error' => $exception,
)
);
$this->log->debug('Response', array($response));
// Build the curl command for Trace.
$curlCommand = $this->buildCurlCommand($method, $fullURI, $body);
$this->trace->info($curlCommand);
$this->trace->debug(
'Response:',
array(
'response' => $response,
'method' => $method,
'uri' => $fullURI,
'HTTP code' => $statusCode,
'duration' => $duration,
)
);
}
/**
* @return bool
*/
public function ping()
{
$this->lastPing = time();
$options = array('timeout' => $this->pingTimeout);
try {
$response = $this->performRequest('HEAD', '', null, null, $options);
} catch (TransportException $exception) {
$this->markDead();
return false;
}
if ($response['status'] === 200) {
$this->markAlive();
return true;
} else {
$this->markDead();
return false;
}
}
/**
* @return array
*/
public function sniff()
{
$options = array('timeout' => $this->pingTimeout);
return $this->performRequest('GET', '/_cluster/nodes', null, null, $options);
}
/**
* @return bool
*/
public function isAlive()
{
return $this->isAlive;
}
public function markAlive()
{
$this->failedPings = 0;
$this->isAlive = true;
}
public function markDead()
{
$this->isAlive = false;
$this->failedPings += 1;
}
/**
* @return int
*/
public function getLastPing()
{
return $this->lastPing;
}
/**
* @return int
*/
public function getPingFailures()
{
return $this->failedPings;
}
/**
* @param $curlErrorNumber
* @param $message
*
* @throws \Elasticsearch\Common\Exceptions\TransportException
* @throws \Elasticsearch\Common\Exceptions\Curl\CouldNotResolveHostException
* @throws \Elasticsearch\Common\Exceptions\Curl\CouldNotConnectToHost
*/
protected function throwCurlException($curlErrorNumber, $message)
{
switch ($curlErrorNumber) {
case 6:
throw new CouldNotResolveHostException($message);
case 7:
throw new CouldNotConnectToHost($message);
case 28:
throw new OperationTimeoutException($message);
default:
throw new TransportException($message);
}
}
/**
* Construct a string cURL command
*
* @param string $method HTTP method
* @param string $uri Full URI of request
* @param string $body Request body
*
* @return string
*/
private function buildCurlCommand($method, $uri, $body)
{
if (strpos($uri, '?') === false) {
$uri .= '?pretty=true';
} else {
str_replace('?', '?pretty=true', $uri);
}
$curlCommand = 'curl -X' . strtoupper($method);
$curlCommand .= " '" . $uri . "'";
if (isset($body) === true && $body !== '') {
$curlCommand .= " -d '" . $body . "'";
}
return $curlCommand;
}
} | eddiejaoude/elasticsearch-php | src/Elasticsearch/Connections/AbstractConnection.php | PHP | apache-2.0 | 8,055 |
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
const models = require('./index');
/**
* Class representing a ProductionOrStagingEndpointInfo.
* @extends models['EndpointInfo']
*/
class ProductionOrStagingEndpointInfo extends models['EndpointInfo'] {
/**
* Create a ProductionOrStagingEndpointInfo.
*/
constructor() {
super();
}
/**
* Defines the metadata of ProductionOrStagingEndpointInfo
*
* @returns {object} metadata of ProductionOrStagingEndpointInfo
*
*/
mapper() {
return {
required: false,
serializedName: 'ProductionOrStagingEndpointInfo',
type: {
name: 'Composite',
className: 'ProductionOrStagingEndpointInfo',
modelProperties: {
versionId: {
required: false,
serializedName: 'versionId',
type: {
name: 'String'
}
},
isStaging: {
required: false,
serializedName: 'isStaging',
type: {
name: 'Boolean'
}
},
endpointUrl: {
required: false,
serializedName: 'endpointUrl',
type: {
name: 'String'
}
},
region: {
required: false,
serializedName: 'region',
type: {
name: 'String'
}
},
assignedEndpointKey: {
required: false,
serializedName: 'assignedEndpointKey',
type: {
name: 'String'
}
},
endpointRegion: {
required: false,
serializedName: 'endpointRegion',
type: {
name: 'String'
}
},
failedRegions: {
required: false,
serializedName: 'failedRegions',
type: {
name: 'String'
}
},
publishedDateTime: {
required: false,
serializedName: 'publishedDateTime',
type: {
name: 'String'
}
}
}
}
};
}
}
module.exports = ProductionOrStagingEndpointInfo;
| xingwu1/azure-sdk-for-node | lib/services/luis/authoring/lib/models/productionOrStagingEndpointInfo.js | JavaScript | apache-2.0 | 2,500 |
package com.ccb.project.vo;
import java.sql.Date;
/**
* Created by han on 2015/6/29.
*/
public class planvo {
private String uid;
private String prjId;
private Date endDate;
public Date getBgnDate() {
return bgnDate;
}
public void setBgnDate(Date bgnDate) {
this.bgnDate = bgnDate;
}
public Date getEndDate() {
return endDate;
}
public void setEndDate(Date endDate) {
this.endDate = endDate;
}
public String getPrjId() {
return prjId;
}
public void setPrjId(String prjId) {
this.prjId = prjId;
}
public String getUid() {
return uid;
}
public void setUid(String uid) {
this.uid = uid;
}
private Date bgnDate;
}
| longjl/JFinal_Authority | jfinal-authority/src/main/java/com/ccb/project/vo/planvo.java | Java | apache-2.0 | 772 |
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using OpenQA.Selenium;
using OpenQA.Selenium.Internal;
namespace Selenium.Internal.SeleniumEmulation
{
internal static class JavaScriptLibrary
{
private const string InjectableSeleniumResourceName = "injectableSelenium.js";
private const string HtmlUtilsResourceName = "htmlutils.js";
public static void CallEmbeddedSelenium(IWebDriver driver, string functionName, IWebElement element, params object[] values)
{
StringBuilder builder = new StringBuilder(ReadScript(InjectableSeleniumResourceName));
builder.Append("return browserbot.").Append(functionName).Append(".apply(browserbot, arguments);");
List<object> args = new List<object>();
args.Add(element);
args.AddRange(values);
((IJavaScriptExecutor)driver).ExecuteScript(builder.ToString(), args.ToArray());
}
public static object CallEmbeddedHtmlUtils(IWebDriver driver, string functionName, IWebElement element, params object[] values)
{
StringBuilder builder = new StringBuilder(ReadScript(HtmlUtilsResourceName));
builder.Append("return htmlutils.").Append(functionName).Append(".apply(htmlutils, arguments);");
List<object> args = new List<object>();
args.Add(element);
args.AddRange(values);
return ((IJavaScriptExecutor)driver).ExecuteScript(builder.ToString(), args.ToArray());
}
public static object ExecuteScript(IWebDriver driver, string script, params object[] args)
{
IJavaScriptExecutor executor = driver as IJavaScriptExecutor;
if (executor == null)
{
throw new InvalidOperationException("The underlying WebDriver instance does not support executing javascript");
}
else
{
return executor.ExecuteScript(script, args);
}
}
private static string ReadScript(string script)
{
string extractedScript = string.Empty;
Stream resourceStream = ResourceUtilities.GetResourceStream(script, script);
using (TextReader reader = new StreamReader(resourceStream))
{
extractedScript = reader.ReadToEnd();
}
return extractedScript;
}
}
}
| zerodiv/CTM-Windows-Agent | Continuum_Windows_Testing_Agent/Vendor/selenium/WebdriverBackedSelenium/Internal/SeleniumEmulation/JavaScriptLibrary.cs | C# | apache-2.0 | 2,460 |
package datafactory
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/autorest/validation"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// TriggerRunsClient is the the Azure Data Factory V2 management API provides a RESTful set of web services that
// interact with Azure Data Factory V2 services.
type TriggerRunsClient struct {
BaseClient
}
// NewTriggerRunsClient creates an instance of the TriggerRunsClient client.
func NewTriggerRunsClient(subscriptionID string) TriggerRunsClient {
return NewTriggerRunsClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewTriggerRunsClientWithBaseURI creates an instance of the TriggerRunsClient client.
func NewTriggerRunsClientWithBaseURI(baseURI string, subscriptionID string) TriggerRunsClient {
return TriggerRunsClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// QueryByFactory query trigger runs.
// Parameters:
// resourceGroupName - the resource group name.
// factoryName - the factory name.
// filterParameters - parameters to filter the pipeline run.
func (client TriggerRunsClient) QueryByFactory(ctx context.Context, resourceGroupName string, factoryName string, filterParameters RunFilterParameters) (result TriggerRunsQueryResponse, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/TriggerRunsClient.QueryByFactory")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: resourceGroupName,
Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil},
{Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}},
{TargetValue: factoryName,
Constraints: []validation.Constraint{{Target: "factoryName", Name: validation.MaxLength, Rule: 63, Chain: nil},
{Target: "factoryName", Name: validation.MinLength, Rule: 3, Chain: nil},
{Target: "factoryName", Name: validation.Pattern, Rule: `^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$`, Chain: nil}}},
{TargetValue: filterParameters,
Constraints: []validation.Constraint{{Target: "filterParameters.LastUpdatedAfter", Name: validation.Null, Rule: true, Chain: nil},
{Target: "filterParameters.LastUpdatedBefore", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil {
return result, validation.NewError("datafactory.TriggerRunsClient", "QueryByFactory", err.Error())
}
req, err := client.QueryByFactoryPreparer(ctx, resourceGroupName, factoryName, filterParameters)
if err != nil {
err = autorest.NewErrorWithError(err, "datafactory.TriggerRunsClient", "QueryByFactory", nil, "Failure preparing request")
return
}
resp, err := client.QueryByFactorySender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "datafactory.TriggerRunsClient", "QueryByFactory", resp, "Failure sending request")
return
}
result, err = client.QueryByFactoryResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "datafactory.TriggerRunsClient", "QueryByFactory", resp, "Failure responding to request")
}
return
}
// QueryByFactoryPreparer prepares the QueryByFactory request.
func (client TriggerRunsClient) QueryByFactoryPreparer(ctx context.Context, resourceGroupName string, factoryName string, filterParameters RunFilterParameters) (*http.Request, error) {
pathParameters := map[string]interface{}{
"factoryName": autorest.Encode("path", factoryName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-06-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns", pathParameters),
autorest.WithJSON(filterParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// QueryByFactorySender sends the QueryByFactory request. The method will close the
// http.Response Body if it receives an error.
func (client TriggerRunsClient) QueryByFactorySender(req *http.Request) (*http.Response, error) {
sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
return autorest.SendWithSender(client, req, sd...)
}
// QueryByFactoryResponder handles the response to the QueryByFactory request. The method always
// closes the http.Response Body.
func (client TriggerRunsClient) QueryByFactoryResponder(resp *http.Response) (result TriggerRunsQueryResponse, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
| enj/origin | vendor/github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory/triggerruns.go | GO | apache-2.0 | 6,162 |
// Copyright 2015 The LUCI Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package tumble
import (
"bytes"
"context"
"crypto/sha1"
"encoding/binary"
"encoding/gob"
"fmt"
"math"
"reflect"
"strings"
"sync"
"time"
"go.chromium.org/luci/appengine/meta"
"go.chromium.org/luci/common/clock"
"go.chromium.org/luci/common/logging"
"go.chromium.org/luci/common/tsmon/field"
"go.chromium.org/luci/common/tsmon/metric"
ds "go.chromium.org/luci/gae/service/datastore"
"go.chromium.org/luci/gae/service/info"
)
var registry = map[string]reflect.Type{}
var metricCreated = metric.NewCounter(
"luci/tumble/mutations/created",
"The number of mutations created in tumble",
nil,
field.String("namespace"),
)
// Register allows |mut| to be played by the tumble backend. This should be
// called at init() time once for every Mutation implementation.
//
// This will also gob.Register your mutation implementation.
//
// Example:
// Register((*MyMutationImpl)(nil))
func Register(mut Mutation) {
gob.Register(mut)
t := reflect.TypeOf(mut)
registry[t.String()] = t
}
// Mutation is the interface that your tumble mutations must implement.
//
// Mutation implementations can be registered with the Register function.
type Mutation interface {
// Root returns a datastore.Key which will be used to derive the Key for the
// entity group which this Mutation will operate on. This is used to batch
// together Entries for more efficient processing.
//
// Returning nil is an error.
Root(c context.Context) *ds.Key
// RollForward performs the action of the Mutation.
//
// It is only considered successful if it returns nil. If it returns non-nil,
// then it will be retried at a later time. If it never returns nil, then it
// will never be flushed from tumble's queue, and you'll have to manually
// delete it or fix the code so that it can be handled without error.
//
// This method runs inside of a single-group transaction. It must modify only
// the entity group specified by Root().
//
// As a side effect, RollForward may return new arbitrary Mutations. These
// will be committed in the same transaction as RollForward.
//
// The context contains an implementation of "luci/gae/service/datastore",
// using the "luci/gae/filter/txnBuf" transaction buffer. This means that
// all functionality (including additional transactions) is available, with
// the limitations mentioned by that package (notably, no cursors are
// allowed).
RollForward(c context.Context) ([]Mutation, error)
}
// DelayedMutation is a Mutation which allows you to defer its processing
// until a certain absolute time.
//
// As a side effect, tumble will /mostly/ process items in their chronological
// ProcessAfter order, instead of the undefined order.
//
// Your tumble configuration must have DelayedMutations set, and you must have
// added the appropriate index to use these. If DelayedMutations is not set,
// then tumble will ignore the ProcessAfter and HighPriorty values here, and
// process mutations as quickly as possible in no particular order.
type DelayedMutation interface {
Mutation
// ProcessAfter will be called once when scheduling this Mutation. The
// mutation will be recorded to datastore immediately, but tumble will skip it
// for processing until at least the time that's returned here. Multiple calls
// to this method should always return the same time.
//
// A Time value in the past will get reset to "next available time slot",
// unless HighPriority() returns true.
ProcessAfter() time.Time
// HighPriority indicates that this mutation should be processed before
// others, if possible, and must be set in conjunction with a ProcessAfter
// timestamp that occurs in the past.
//
// Tumble works by processing Mutations in the order of their creation, or
// ProcessAfter times, whichever is later. If HighPriority is true, then a
// ProcessAfter time in the past will take precedence over Mutations which
// may actually have been recorded after this one, in the event that tumble
// is processing tasks slower than they're being created.
HighPriority() bool
}
type realMutation struct {
// TODO(riannucci): add functionality to luci/gae/service/datastore so that
// GetMeta/SetMeta may be overridden by the struct.
_kind string `gae:"$kind,tumble.Mutation"`
ID string `gae:"$id"`
Parent *ds.Key `gae:"$parent"`
ExpandedShard int64
ProcessAfter time.Time
TargetRoot *ds.Key
Version string
Type string
Data []byte `gae:",noindex"`
}
func (r *realMutation) shard(cfg *Config) taskShard {
shardCount := cfg.TotalShardCount(r.TargetRoot.Namespace())
expandedShardsPerShard := math.MaxUint64 / shardCount
ret := uint64(r.ExpandedShard-math.MinInt64) / expandedShardsPerShard
// account for rounding errors on the last shard.
if ret >= shardCount {
ret = shardCount - 1
}
return taskShard{ret, mkTimestamp(cfg, r.ProcessAfter)}
}
func putMutations(c context.Context, cfg *Config, fromRoot *ds.Key, muts []Mutation, round uint64) (
shardSet map[taskShard]struct{}, mutKeys []*ds.Key, err error) {
if len(muts) == 0 {
return
}
version, err := meta.GetEntityGroupVersion(c, fromRoot)
if err != nil {
return
}
now := clock.Now(c).UTC()
shardSet = map[taskShard]struct{}{}
toPut := make([]*realMutation, len(muts))
mutKeys = make([]*ds.Key, len(muts))
for i, m := range muts {
id := fmt.Sprintf("%016x_%08x_%08x", version, round, i)
toPut[i], err = newRealMutation(c, cfg, id, fromRoot, m, now)
if err != nil {
logging.Errorf(c, "error creating real mutation for %v: %s", m, err)
return
}
mutKeys[i] = ds.KeyForObj(c, toPut[i])
shardSet[toPut[i].shard(cfg)] = struct{}{}
}
if err = ds.Put(c, toPut); err != nil {
logging.Errorf(c, "error putting %d new mutations: %s", len(toPut), err)
} else {
metricCreated.Add(c, int64(len(toPut)), fromRoot.Namespace())
}
return
}
var appVersion = struct {
sync.Once
version string
}{}
func getAppVersion(c context.Context) string {
appVersion.Do(func() {
appVersion.version = info.VersionID(c)
// AppEngine version is <app-yaml-version>.<unique-upload-id>
//
// The upload ID prevents version consistency between different AppEngine
// modules, which will necessarily have different IDs, so we base our
// comparable version off of the app.yaml-supplied value.
if idx := strings.LastIndex(appVersion.version, "."); idx > 0 {
appVersion.version = appVersion.version[:idx]
}
})
return appVersion.version
}
func newRealMutation(c context.Context, cfg *Config, id string, parent *ds.Key, m Mutation, now time.Time) (*realMutation, error) {
when := now
if cfg.DelayedMutations {
if dm, ok := m.(DelayedMutation); ok {
targetTime := dm.ProcessAfter()
if dm.HighPriority() || targetTime.After(now) {
when = targetTime
}
}
}
t := reflect.TypeOf(m).String()
if _, ok := registry[t]; !ok {
return nil, fmt.Errorf("Attempting to add unregistered mutation %v: %v", t, m)
}
buf := &bytes.Buffer{}
err := gob.NewEncoder(buf).Encode(m)
if err != nil {
return nil, err
}
root := m.Root(c).Root()
hash := sha1.Sum([]byte(root.Encode()))
eshard := int64(binary.BigEndian.Uint64(hash[:]))
return &realMutation{
ID: id,
Parent: parent,
ExpandedShard: eshard,
ProcessAfter: when,
TargetRoot: root,
Version: getAppVersion(c),
Type: t,
Data: buf.Bytes(),
}, nil
}
func (r *realMutation) GetMutation() (Mutation, error) {
typ, ok := registry[r.Type]
if !ok {
return nil, fmt.Errorf("unable to load reflect.Type for %q", r.Type)
}
ret := reflect.New(typ)
if err := gob.NewDecoder(bytes.NewBuffer(r.Data)).DecodeValue(ret); err != nil {
return nil, err
}
return ret.Elem().Interface().(Mutation), nil
}
| luci/luci-go | tumble/model_mutation.go | GO | apache-2.0 | 8,301 |
package extractor // import "code.cloudfoundry.org/archiver/extractor"
| getcarina/dvm | vendor/github.com/pivotal-golang/archiver/extractor/package.go | GO | apache-2.0 | 71 |
<?php
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/* LANGUAGE */
$filetransfer_language[0] = 'Enviar un archivo';
$filetransfer_language[1] = '¿Qué archivo te gustaría enviar?';
$filetransfer_language[2] = 'Selecciona un archivo mediante el botón de abajo.';
$filetransfer_language[3] = '<b>ADVERTENCIA:</b> No envíes material con copyright para el cual no poseas los derechos o tengas permiso del propietario.';
$filetransfer_language[4] = 'Enviar archivo';
$filetransfer_language[5] = 'te ha enviado un archivo';
$filetransfer_language[6] = 'Clic aquí para descargar el archivo';
$filetransfer_language[7] = 'ha enviado un archivo con éxito';
$filetransfer_language[8] = 'Archivo enviado con éxito. Cerrando ventana.';
$filetransfer_language[9] = 'ha compartido un archivo';
$filetransfer_language[10] = 'Lo sentimos, no podemos encontrar el archivo.';
$filetransfer_language[11] = 'Guardar';
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// | networksoft/erp.vitale | cometchat/plugins/filetransfer/lang/es-la.php | PHP | apache-2.0 | 1,138 |
(function() {
function AlbumCtrl(Fixtures, SongPlayer) {
this.albumData = [];
this.albumData.push(Fixtures.getAlbum());
this.songPlayer = SongPlayer;
}
angular
.module('blocJams')
.controller('AlbumCtrl', ['Fixtures','SongPlayer', AlbumCtrl]);
})();
| javierforero/bloc-jams-angular | dist/scripts/controllers/AlbumCtrl.js | JavaScript | apache-2.0 | 303 |
/**
*
* Copyright 2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.mail;
import javax.mail.Address;
import javax.mail.Message;
import javax.mail.MessagingException;
import javax.mail.Session;
import javax.mail.Transport;
import javax.mail.URLName;
/**
* @version $Rev$ $Date$
*/
public class NullTransport extends Transport {
public NullTransport(Session session, URLName urlName) {
super(session, urlName);
}
public void sendMessage(Message message, Address[] addresses) throws MessagingException {
// do nothing
}
protected boolean protocolConnect(String host, int port, String user, String password) throws MessagingException {
return true; // always connect
}
}
| meetdestiny/geronimo-trader | modules/mail/src/java/org/apache/geronimo/mail/NullTransport.java | Java | apache-2.0 | 1,308 |
/*
* Copyright 2018 Rundeck, Inc. (http://rundeck.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dtolabs.rundeck.core.cluster;
import org.rundeck.app.spi.AppService;
/**
* Provides info about cluster configuration
*/
public interface ClusterInfoService
extends AppService
{
/**
* True if cluster mode is enabled
*/
boolean isClusterModeEnabled();
/**
* This cluster member's UUID
*/
String getServerUUID();
}
| variacode/rundeck | core/src/main/java/com/dtolabs/rundeck/core/cluster/ClusterInfoService.java | Java | apache-2.0 | 988 |
package org.ovirt.engine.ui.uicommon.models.datacenters;
import java.util.Collections;
import org.ovirt.engine.core.compat.*;
import org.ovirt.engine.ui.uicompat.*;
import org.ovirt.engine.core.common.businessentities.*;
import org.ovirt.engine.core.common.vdscommands.*;
import org.ovirt.engine.core.common.queries.*;
import org.ovirt.engine.core.common.action.*;
import org.ovirt.engine.ui.frontend.*;
import org.ovirt.engine.ui.uicommon.*;
import org.ovirt.engine.ui.uicommon.models.*;
import org.ovirt.engine.core.common.*;
import org.ovirt.engine.ui.uicommon.models.common.*;
import org.ovirt.engine.ui.uicompat.*;
import org.ovirt.engine.core.common.interfaces.*;
import org.ovirt.engine.core.common.businessentities.*;
import org.ovirt.engine.core.common.queries.*;
import org.ovirt.engine.ui.uicommon.*;
import org.ovirt.engine.ui.uicommon.models.*;
@SuppressWarnings("unused")
public class DataCenterNetworkListModel extends SearchableListModel implements IFrontendMultipleQueryAsyncCallback
{
private static final String ENGINE_NETWORK = "engine";
private UICommand privateNewCommand;
public UICommand getNewCommand()
{
return privateNewCommand;
}
private void setNewCommand(UICommand value)
{
privateNewCommand = value;
}
private UICommand privateEditCommand;
public UICommand getEditCommand()
{
return privateEditCommand;
}
private void setEditCommand(UICommand value)
{
privateEditCommand = value;
}
private UICommand privateRemoveCommand;
public UICommand getRemoveCommand()
{
return privateRemoveCommand;
}
private void setRemoveCommand(UICommand value)
{
privateRemoveCommand = value;
}
public storage_pool getEntity()
{
return (storage_pool)super.getEntity();
}
public void setEntity(storage_pool value)
{
super.setEntity(value);
}
private Model window;
public Model getWindow()
{
return window;
}
public void setWindow(Model value)
{
if (window != value)
{
window = value;
OnPropertyChanged(new PropertyChangedEventArgs("Window"));
}
}
private Model confirmWindow;
public Model getConfirmWindow()
{
return confirmWindow;
}
public void setConfirmWindow(Model value)
{
if (confirmWindow != value)
{
confirmWindow = value;
OnPropertyChanged(new PropertyChangedEventArgs("ConfirmWindow"));
}
}
private java.util.ArrayList<VDSGroup> privateClusterList;
public java.util.ArrayList<VDSGroup> getClusterList()
{
return privateClusterList;
}
public void setClusterList(java.util.ArrayList<VDSGroup> value)
{
privateClusterList = value;
}
private java.util.ArrayList<SelectionTreeNodeModel> privateSelectionNodeList;
public java.util.ArrayList<SelectionTreeNodeModel> getSelectionNodeList()
{
return privateSelectionNodeList;
}
public void setSelectionNodeList(java.util.ArrayList<SelectionTreeNodeModel> value)
{
privateSelectionNodeList = value;
}
public DataCenterNetworkListModel()
{
setTitle("Logical Networks");
setNewCommand(new UICommand("New", this));
setEditCommand(new UICommand("Edit", this));
setRemoveCommand(new UICommand("Remove", this));
UpdateActionAvailability();
}
@Override
protected void OnEntityChanged()
{
super.OnEntityChanged();
getSearchCommand().Execute();
}
@Override
public void Search()
{
if (getEntity() != null)
{
super.Search();
}
}
@Override
protected void SyncSearch()
{
super.SyncSearch();
AsyncQuery _asyncQuery = new AsyncQuery();
_asyncQuery.setModel(this);
_asyncQuery.asyncCallback = new INewAsyncCallback() { public void OnSuccess(Object model, Object ReturnValue)
{
SearchableListModel searchableListModel = (SearchableListModel)model;
searchableListModel.setItems((java.util.ArrayList<network>)((VdcQueryReturnValue)ReturnValue).getReturnValue());
}};
Frontend.RunQuery(VdcQueryType.GetAllNetworks, new GetAllNetworkQueryParamenters(getEntity().getId()), _asyncQuery);
}
@Override
protected void AsyncSearch()
{
super.AsyncSearch();
setAsyncResult(Frontend.RegisterQuery(VdcQueryType.GetAllNetworks, new GetAllNetworkQueryParamenters(getEntity().getId())));
setItems(getAsyncResult().getData());
}
public void remove()
{
if (getWindow() != null)
{
return;
}
ConfirmationModel model = new ConfirmationModel();
setWindow(model);
model.setTitle("Remove Logical Network(s)");
model.setHashName("remove_logical_network");
model.setMessage("Logical Network(s)");
java.util.ArrayList<String> list = new java.util.ArrayList<String>();
for (network a : Linq.<network>Cast(getSelectedItems()))
{
list.add(a.getname());
}
model.setItems(list);
UICommand tempVar = new UICommand("OnRemove", this);
tempVar.setTitle("OK");
tempVar.setIsDefault(true);
model.getCommands().add(tempVar);
UICommand tempVar2 = new UICommand("Cancel", this);
tempVar2.setTitle("Cancel");
tempVar2.setIsCancel(true);
model.getCommands().add(tempVar2);
}
public void OnRemove()
{
java.util.ArrayList<VdcActionParametersBase> pb = new java.util.ArrayList<VdcActionParametersBase>();
for (network a : Linq.<network>Cast(getSelectedItems()))
{
pb.add((VdcActionParametersBase)new AddNetworkStoragePoolParameters(getEntity().getId(), a));
}
Frontend.RunMultipleAction(VdcActionType.RemoveNetwork, pb);
Cancel();
}
public void Edit()
{
network network = (network)getSelectedItem();
if (getWindow() != null)
{
return;
}
DataCenterNetworkModel model = new DataCenterNetworkModel();
setWindow(model);
model.setTitle("Edit Logical Network");
model.setHashName("edit_logical_network");
model.getName().setEntity(network.getname());
model.getDescription().setEntity(network.getdescription());
model.setIsStpEnabled(network.getstp());
model.setHasVLanTag(network.getvlan_id() != null);
model.getVLanTag().setEntity((network.getvlan_id() == null ? 0 : network.getvlan_id()));
setClusterList(DataProvider.GetClusterList(getEntity().getId()));
setSelectionNodeList(new java.util.ArrayList<SelectionTreeNodeModel>());
java.util.ArrayList<VdcQueryParametersBase> parametersList = new java.util.ArrayList<VdcQueryParametersBase>();
java.util.ArrayList<VdcQueryType> queryTypeList = new java.util.ArrayList<VdcQueryType>();
for (VDSGroup vdsGroup : getClusterList())
{
queryTypeList.add(VdcQueryType.GetAllNetworksByClusterId);
parametersList.add(new VdsGroupQueryParamenters(vdsGroup.getID()));
SelectionTreeNodeModel tempVar = new SelectionTreeNodeModel();
tempVar.setIsSelectedNullable(false);
tempVar.setEntity(vdsGroup);
tempVar.setDescription(vdsGroup.getname());
getSelectionNodeList().add(tempVar);
}
Frontend.RunMultipleQueries(queryTypeList, parametersList, this);
model.setDetachAllCommand(new UICommand("DetachClusters", this));
//cannot detach engine networks from clusters
if (StringHelper.stringsEqual(network.getname(), ENGINE_NETWORK))
{
for (SelectionTreeNodeModel nodeModel : getSelectionNodeList())
{
nodeModel.setIsChangable(false);
}
model.getDetachAllCommand().setIsAvailable(false);
model.getName().setIsChangable(false);
model.setMessage("Cannot detach Management Network from Clusters");
}
}
public void New()
{
if (getWindow() != null)
{
return;
}
DataCenterNetworkModel model = new DataCenterNetworkModel();
setWindow(model);
model.setTitle("New Logical Network");
model.setHashName("new_logical_network");
model.setIsNew(true);
model.setClusters(DataProvider.GetClusterList(getEntity().getId()));
UICommand tempVar = new UICommand("OnSave", this);
tempVar.setTitle("OK");
tempVar.setIsDefault(true);
model.getCommands().add(tempVar);
model.setDetachAllCommand(new UICommand("DetachClusters", this));
model.getDetachAllAvailable().setEntity(false);
UICommand tempVar2 = new UICommand("Cancel", this);
tempVar2.setTitle("Cancel");
tempVar2.setIsCancel(true);
model.getCommands().add(tempVar2);
}
public void OnSave()
{
DataCenterNetworkModel model = (DataCenterNetworkModel)getWindow();
if (getEntity() == null || (!model.getIsNew() && getSelectedItem() == null))
{
Cancel();
return;
}
model.setcurrentNetwork(model.getIsNew() ? new network() : (network)Cloner.clone(getSelectedItem()));
if (!model.Validate())
{
return;
}
//Save changes.
model.getcurrentNetwork().setstorage_pool_id(getEntity().getId());
model.getcurrentNetwork().setname((String)model.getName().getEntity());
model.getcurrentNetwork().setstp(model.getIsStpEnabled());
model.getcurrentNetwork().setdescription((String)model.getDescription().getEntity());
model.getcurrentNetwork().setvlan_id(null);
if (model.getHasVLanTag())
{
model.getcurrentNetwork().setvlan_id(Integer.parseInt(model.getVLanTag().getEntity().toString()));
}
model.setnewClusters(new java.util.ArrayList<VDSGroup>());
for (SelectionTreeNodeModel selectionTreeNodeModel : model.getClusterTreeNodes())
{
//C# TO JAVA CONVERTER TODO TASK: Comparisons involving nullable type instances are not converted to null-value logic:
if (selectionTreeNodeModel.getIsSelectedNullable() != null && selectionTreeNodeModel.getIsSelectedNullable().equals(true))
{
model.getnewClusters().add((VDSGroup)selectionTreeNodeModel.getEntity());
}
}
java.util.ArrayList<VDSGroup> detachNetworkFromClusters = Linq.Except(model.getOriginalClusters(), model.getnewClusters());
java.util.ArrayList<VdcActionParametersBase> actionParameters = new java.util.ArrayList<VdcActionParametersBase>();
for (VDSGroup detachNetworkFromCluster : detachNetworkFromClusters)
{
actionParameters.add((VdcActionParametersBase) new AttachNetworkToVdsGroupParameter(detachNetworkFromCluster, model.getcurrentNetwork()));
}
model.StartProgress(null);
Frontend.RunMultipleAction(VdcActionType.DetachNetworkToVdsGroup, actionParameters,
new IFrontendMultipleActionAsyncCallback() {
@Override
public void Executed(FrontendMultipleActionAsyncResult result) {
DataCenterNetworkModel networkModel = (DataCenterNetworkModel)result.getState();
network network = networkModel.getcurrentNetwork();
VdcReturnValueBase returnValue;
if (networkModel.getIsNew())
{
returnValue = Frontend.RunAction(VdcActionType.AddNetwork, new AddNetworkStoragePoolParameters(getEntity().getId(), network));
}
else
{
if ((Boolean)networkModel.getIsEnabled().getEntity())
{
returnValue = Frontend.RunAction(VdcActionType.UpdateNetwork, new AddNetworkStoragePoolParameters(getEntity().getId(), network));
}
else
{
VdcReturnValueBase tempVar = new VdcReturnValueBase();
tempVar.setSucceeded(true);
returnValue = tempVar;
}
}
if (returnValue != null && returnValue.getSucceeded())
{
Guid networkId = networkModel.getIsNew() ? (Guid)returnValue.getActionReturnValue() : network.getId();
java.util.ArrayList<VDSGroup> attachNetworkToClusters = Linq.Except(networkModel.getnewClusters(), networkModel.getOriginalClusters());
java.util.ArrayList<VdcActionParametersBase> actionParameters1 = new java.util.ArrayList<VdcActionParametersBase>();
for (VDSGroup attachNetworkToCluster : attachNetworkToClusters)
{
network tempVar2 = new network();
tempVar2.setId(networkId);
tempVar2.setname(network.getname());
actionParameters1.add((VdcActionParametersBase) new AttachNetworkToVdsGroupParameter(attachNetworkToCluster, tempVar2));
}
Frontend.RunMultipleAction(VdcActionType.AttachNetworkToVdsGroup, actionParameters1);
}
if (returnValue != null && returnValue.getSucceeded())
{
Cancel();
}
networkModel.StopProgress();
}
}, model);
}
public void DetachClusters()
{
ConfirmationModel confirmModel = new ConfirmationModel();
setConfirmWindow(confirmModel);
confirmModel.setTitle("Detach Network from ALL Clusters");
confirmModel.setHashName("detach_network_from_all_clusters");
confirmModel.setMessage("You are about to detach the Network from all of the Clusters to which it is currentlyattached.\nAs a result, the Clusters' Hosts might become unreachable.\n\nAre you sure you want to continue?");
confirmModel.getLatch().setIsAvailable(true);
UICommand tempVar = new UICommand("OnDetachClusters", this);
tempVar.setTitle("OK");
tempVar.setIsDefault(true);
confirmModel.getCommands().add(tempVar);
UICommand tempVar2 = new UICommand("CancelConfirmation", this);
tempVar2.setTitle("Cancel");
tempVar2.setIsCancel(true);
confirmModel.getCommands().add(tempVar2);
}
public void CancelConfirmation()
{
setConfirmWindow(null);
}
public void OnDetachClusters()
{
ConfirmationModel confirmationModel = (ConfirmationModel)getConfirmWindow();
if (!confirmationModel.Validate())
{
return;
}
DataCenterNetworkModel model = (DataCenterNetworkModel)getWindow();
network network = (network)getSelectedItem();
java.util.ArrayList<VdcActionParametersBase> actionParameters = new java.util.ArrayList<VdcActionParametersBase>();
for (SelectionTreeNodeModel selectionTreeNodeModel : model.getClusterTreeNodes())
{
//C# TO JAVA CONVERTER TODO TASK: Comparisons involving nullable type instances are not converted to null-value logic:
if (selectionTreeNodeModel.getIsSelectedNullable() != null && selectionTreeNodeModel.getIsSelectedNullable().equals(true))
{
selectionTreeNodeModel.setIsSelectedNullable(false);
actionParameters.add((VdcActionParametersBase)new AttachNetworkToVdsGroupParameter((VDSGroup)selectionTreeNodeModel.getEntity(), network));
}
}
java.util.ArrayList<VdcReturnValueBase> returnValueList = Frontend.RunMultipleAction(VdcActionType.DetachNetworkToVdsGroup, actionParameters);
boolean isSucceded = true;
for (VdcReturnValueBase vdcReturnValueBase : returnValueList)
{
isSucceded &= vdcReturnValueBase.getSucceeded();
}
CancelConfirmation();
if (isSucceded)
{
model.setOriginalClusters(new java.util.ArrayList<VDSGroup>());
model.getIsEnabled().setEntity(true);
model.getDetachAllAvailable().setEntity(!(Boolean)model.getIsEnabled().getEntity());
}
else
{
Cancel();
}
}
public void Cancel()
{
setWindow(null);
}
@Override
protected void OnSelectedItemChanged()
{
super.OnSelectedItemChanged();
UpdateActionAvailability();
}
@Override
protected void SelectedItemsChanged()
{
super.SelectedItemsChanged();
UpdateActionAvailability();
}
private void UpdateActionAvailability()
{
java.util.List tempVar = getSelectedItems();
java.util.ArrayList selectedItems = (java.util.ArrayList)((tempVar != null) ? tempVar : new java.util.ArrayList());
boolean anyEngine = false;
for (Object item : selectedItems)
{
network network = (network)item;
if (StringHelper.stringsEqual(network.getname(), ENGINE_NETWORK))
{
anyEngine = true;
break;
}
}
getEditCommand().setIsExecutionAllowed(selectedItems.size() == 1);
getRemoveCommand().setIsExecutionAllowed(selectedItems.size() > 0 && !anyEngine);
}
@Override
public void ExecuteCommand(UICommand command)
{
super.ExecuteCommand(command);
if (command == getNewCommand())
{
New();
}
else if (command == getEditCommand())
{
Edit();
}
else if (command == getRemoveCommand())
{
remove();
}
else if (StringHelper.stringsEqual(command.getName(), "OnSave"))
{
OnSave();
}
else if (StringHelper.stringsEqual(command.getName(), "Cancel"))
{
Cancel();
}
else if (StringHelper.stringsEqual(command.getName(), "OnRemove"))
{
OnRemove();
}
else if(StringHelper.stringsEqual(command.getName(), "DetachClusters"))
{
DetachClusters();
}
else if(StringHelper.stringsEqual(command.getName(), "OnDetachClusters"))
{
OnDetachClusters();
}
else if (StringHelper.stringsEqual(command.getName(), "CancelConfirmation"))
{
CancelConfirmation();
}
}
public void Executed(FrontendMultipleQueryAsyncResult result)
{
network network = (network)getSelectedItem();
java.util.List<VdcQueryReturnValue> returnValueList = result.getReturnValues();
DataCenterNetworkModel model = (DataCenterNetworkModel)getWindow();
java.util.ArrayList<network> clusterNetworkList = null;
boolean networkHasAttachedClusters = false;
for (int i = 0; i < returnValueList.size(); i++)
{
VdcQueryReturnValue returnValue = returnValueList.get(i);
if (returnValue.getSucceeded() && returnValue.getReturnValue() != null)
{
clusterNetworkList = (java.util.ArrayList<network>)returnValue.getReturnValue();
for (network clusterNetwork : clusterNetworkList)
{
if (clusterNetwork.getId().equals(network.getId()))
{
model.getOriginalClusters().add((VDSGroup)getSelectionNodeList().get(i).getEntity());
getSelectionNodeList().get(i).setIsSelectedNullable(true);
networkHasAttachedClusters = true;
break;
}
}
}
}
if (networkHasAttachedClusters)
{
model.getIsEnabled().setEntity(false);
if (!StringHelper.stringsEqual(network.getname(), ENGINE_NETWORK))
{
model.getDetachAllAvailable().setEntity(!(Boolean)model.getIsEnabled().getEntity());
}
}
model.setClusterTreeNodes(getSelectionNodeList());
if (StringHelper.stringsEqual(network.getname(), ENGINE_NETWORK) && getSelectionNodeList().size() > 0)
{
UICommand tempVar = new UICommand("Cancel", this);
tempVar.setTitle("Close");
tempVar.setIsDefault(true);
tempVar.setIsCancel(true);
model.getCommands().add(tempVar);
}
else
{
UICommand tempVar2 = new UICommand("OnSave", this);
tempVar2.setTitle("OK");
tempVar2.setIsDefault(true);
model.getCommands().add(tempVar2);
UICommand tempVar3 = new UICommand("Cancel", this);
tempVar3.setTitle("Cancel");
tempVar3.setIsCancel(true);
model.getCommands().add(tempVar3);
}
}
} | raksha-rao/gluster-ovirt | frontend/webadmin/modules/uicommon/src/main/java/org/ovirt/engine/ui/uicommon/models/datacenters/DataCenterNetworkListModel.java | Java | apache-2.0 | 17,841 |
package org.fusesource.fabric.openshift.commands.support;
import com.openshift.client.IOpenShiftConnection;
public class OpenshiftConnectionListener {
private IOpenShiftConnection connection;
public IOpenShiftConnection getConnection() {
return connection;
}
public void bindConnection(IOpenShiftConnection connection) {
this.connection = connection;
}
public void unbindConnection(IOpenShiftConnection connection) {
this.connection = null;
}
}
| janstey/fuse | fabric/fabric-openshift/src/main/java/org/fusesource/fabric/openshift/commands/support/OpenshiftConnectionListener.java | Java | apache-2.0 | 503 |
package com.mit.dstore.ui.chat;
import java.util.List;
/**
* @Description 相册数据体
* @author Nana
* @date 2014-5-9
*/
public class ImageBucket {
public int count = 0;//相册中图片数量
public String bucketName;
public List<ImageItem> imageList;
}
| luchuangbin/test1 | src/com/mit/dstore/ui/chat/ImageBucket.java | Java | apache-2.0 | 279 |
/*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.mail;
import com.thoughtworks.go.util.command.StreamConsumer;
import com.thoughtworks.go.util.command.ProcessOutputStreamConsumer;
public class SysOutStreamConsumer extends ProcessOutputStreamConsumer {
public SysOutStreamConsumer() {
super(new SysOut(), new SysOut());
}
private static class SysOut implements StreamConsumer {
@Override
public void consumeLine(String line) {
System.out.println(line);
}
}
}
| kierarad/gocd | test/test-utils/src/main/java/com/thoughtworks/go/mail/SysOutStreamConsumer.java | Java | apache-2.0 | 1,103 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.imaging.common;
import java.text.DecimalFormat;
import java.text.NumberFormat;
public class RationalNumber extends Number
{
private static final long serialVersionUID = -1;
public final int numerator;
public final int divisor;
public RationalNumber(int numerator, int divisor)
{
this.numerator = numerator;
this.divisor = divisor;
}
public static final RationalNumber factoryMethod(long n, long d)
{
// safer than constructor - handles values outside min/max range.
// also does some simple finding of common denominators.
if (n > Integer.MAX_VALUE || n < Integer.MIN_VALUE
|| d > Integer.MAX_VALUE || d < Integer.MIN_VALUE)
{
while ((n > Integer.MAX_VALUE || n < Integer.MIN_VALUE
|| d > Integer.MAX_VALUE || d < Integer.MIN_VALUE)
&& (Math.abs(n) > 1) && (Math.abs(d) > 1))
{
// brutal, inprecise truncation =(
// use the sign-preserving right shift operator.
n >>= 1;
d >>= 1;
}
if (d == 0)
throw new NumberFormatException("Invalid value, numerator: "
+ n + ", divisor: " + d);
}
long gcd = gcd(n, d);
d = d / gcd;
n = n / gcd;
return new RationalNumber((int) n, (int) d);
}
/**
* Return the greatest common divisor
*/
private static long gcd(long a, long b)
{
if (b == 0)
return a;
else
return gcd(b, a % b);
}
public RationalNumber negate()
{
return new RationalNumber(-numerator, divisor);
}
@Override
public double doubleValue()
{
return (double) numerator / (double) divisor;
}
@Override
public float floatValue()
{
return (float) numerator / (float) divisor;
}
@Override
public int intValue()
{
return numerator / divisor;
}
@Override
public long longValue()
{
return (long) numerator / (long) divisor;
}
public boolean isValid()
{
return divisor != 0;
}
private static final NumberFormat nf = DecimalFormat.getInstance();
@Override
public String toString()
{
if (divisor == 0)
return "Invalid rational (" + numerator + "/" + divisor + ")";
if ((numerator % divisor) == 0)
return nf.format(numerator / divisor);
return numerator + "/" + divisor + " ("
+ nf.format((double) numerator / divisor) + ")";
}
public String toDisplayString()
{
if ((numerator % divisor) == 0)
return "" + (numerator / divisor);
NumberFormat nf = DecimalFormat.getInstance();
nf.setMaximumFractionDigits(3);
return nf.format((double) numerator / (double) divisor);
}
} | apache/sanselan | src/main/java/org/apache/commons/imaging/common/RationalNumber.java | Java | apache-2.0 | 3,776 |
package io.cattle.platform.host.service;
import io.cattle.platform.archaius.util.ArchaiusUtil;
import io.cattle.platform.core.dao.DataDao;
import io.cattle.platform.ssh.common.SshKeyGen;
import io.cattle.platform.token.CertSet;
import io.cattle.platform.token.impl.RSAKeyProvider;
import io.cattle.platform.token.impl.RSAPrivateKeyHolder;
import io.cattle.platform.util.exception.ExceptionUtils;
import io.cattle.platform.util.type.InitializationTask;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.security.KeyPair;
import java.security.PublicKey;
import java.security.cert.Certificate;
import java.security.cert.X509Certificate;
import java.security.interfaces.RSAPrivateKey;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Callable;
import javax.inject.Inject;
import org.bouncycastle.openssl.jcajce.JcaPEMWriter;
import com.netflix.config.DynamicBooleanProperty;
public class HostApiRSAKeyProvider implements RSAKeyProvider, InitializationTask {
private static final DynamicBooleanProperty GEN_ON_STARTUP = ArchaiusUtil.getBoolean("host.api.keygen.on.startup");
private static final String KEY = "host.api.key";
private static final String CERT = "host.api.key.cert";
private static final String DEFAULT = "default";
DataDao dataDao;
@Override
public RSAPrivateKeyHolder getPrivateKey() {
KeyPair kp = getKeyPair();
if (kp == null) {
return null;
}
return new RSAPrivateKeyHolder(DEFAULT, (RSAPrivateKey) kp.getPrivate());
}
@Override
public void start() {
if (GEN_ON_STARTUP.get()) {
getPrivateKey();
getCACertificate();
}
}
protected KeyPair getKeyPair() {
String encoded = dataDao.getOrCreate(KEY, false, new Callable<String>() {
@Override
public String call() throws Exception {
KeyPair kp = SshKeyGen.generateKeyPair();
return SshKeyGen.toPEM(kp);
}
});
try {
return SshKeyGen.readKeyPair(encoded);
} catch (Exception e) {
ExceptionUtils.throwRuntime("Failed to read key pair from PEM", e);
/* Won't hit next line */
return null;
}
}
@Override
public X509Certificate getCACertificate() {
final KeyPair kp = getKeyPair();
String encoded = dataDao.getOrCreate(CERT, false, new Callable<String>() {
@Override
public String call() throws Exception {
X509Certificate cert = SshKeyGen.createRootCACert(kp);
return SshKeyGen.toPEM(cert);
}
});
try {
return SshKeyGen.readCACert(encoded);
} catch (Exception e) {
ExceptionUtils.throwRuntime("Failed to CA cert from PEM", e);
/* Won't hit next line */
return null;
}
}
@Override
public CertSet generateCertificate(String subject, String... sans) throws Exception {
KeyPair caKp = getKeyPair();
X509Certificate caCert = getCACertificate();
KeyPair clientKp = SshKeyGen.generateKeyPair();
X509Certificate clientCert = SshKeyGen.generateClientCert(subject, clientKp.getPublic(), caKp.getPrivate(), caCert, sans);
CertSet result = new CertSet(caCert, clientCert, clientKp.getPrivate());
return result;
}
@Override
public PublicKey getDefaultPublicKey() {
return getPublicKeys().get(DEFAULT);
}
@Override
public Map<String, PublicKey> getPublicKeys() {
Map<String, PublicKey> result = new HashMap<>();
KeyPair defaultKp = getKeyPair();
if (defaultKp != null) {
result.put(DEFAULT, defaultKp.getPublic());
}
return result;
}
public DataDao getDataDao() {
return dataDao;
}
@Inject
public void setDataDao(DataDao dataDao) {
this.dataDao = dataDao;
}
@Override
public byte[] toBytes(Certificate cert) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try (JcaPEMWriter writer = new JcaPEMWriter(new OutputStreamWriter(baos))) {
writer.writeObject(cert);
}
return baos.toByteArray();
}
} | vincent99/cattle | code/implementation/host-api/src/main/java/io/cattle/platform/host/service/HostApiRSAKeyProvider.java | Java | apache-2.0 | 4,380 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openejb.jee.jba.cmp;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.XmlValue;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"content"
})
@XmlRootElement(name = "where")
public class Where {
@XmlValue
protected String content;
/**
* Gets the value of the content property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getContent() {
return content;
}
/**
* Sets the value of the content property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setContent(String value) {
this.content = value;
}
}
| apache/openejb | container/openejb-jee/src/main/java/org/apache/openejb/jee/jba/cmp/Where.java | Java | apache-2.0 | 2,101 |
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include <memory>
#include <string>
#include <unordered_map>
#include <vector>
#include "tensorflow/contrib/lite/toco/graph_transformations/graph_transformations.h"
#include "tensorflow/contrib/lite/toco/graph_transformations/quantization_util.h"
#include "tensorflow/contrib/lite/toco/model.h"
#include "tensorflow/contrib/lite/toco/tooling_util.h"
#include "tensorflow/core/platform/logging.h"
namespace toco {
template <ArrayDataType A>
void GetBoundsForQuantizedDataType(float* min, float* max) {
using limits = std::numeric_limits<DataType<A>>;
*min = limits::min();
*max = limits::max();
}
void GetBoundsForQuantizedDataType(ArrayDataType quantized_data_type,
float* min, float* max) {
// It is important for matching accuracy between TF training and TFLite
// inference, that the min and max values are float to match TF's
// FakeQuantWithMinMaxVarsFunctor.
switch (quantized_data_type) {
case ArrayDataType::kUint8:
return GetBoundsForQuantizedDataType<ArrayDataType::kUint8>(min, max);
case ArrayDataType::kInt8:
return GetBoundsForQuantizedDataType<ArrayDataType::kInt8>(min, max);
case ArrayDataType::kUint16:
return GetBoundsForQuantizedDataType<ArrayDataType::kUint16>(min, max);
case ArrayDataType::kInt16:
return GetBoundsForQuantizedDataType<ArrayDataType::kInt16>(min, max);
case ArrayDataType::kUint32:
return GetBoundsForQuantizedDataType<ArrayDataType::kUint32>(min, max);
case ArrayDataType::kInt32:
return GetBoundsForQuantizedDataType<ArrayDataType::kInt32>(min, max);
case ArrayDataType::kUint64:
return GetBoundsForQuantizedDataType<ArrayDataType::kUint64>(min, max);
case ArrayDataType::kInt64:
return GetBoundsForQuantizedDataType<ArrayDataType::kInt64>(min, max);
default:
LOG(FATAL) << "unhandled quantized data type";
}
}
::tensorflow::Status ResolveConstantFakeQuant::Run(Model* model,
std::size_t op_index,
bool* modified) {
*modified = false;
const auto fakequant_it = model->operators.begin() + op_index;
const auto* fakequant_base_op = fakequant_it->get();
if (fakequant_base_op->type != OperatorType::kFakeQuant) {
return ::tensorflow::Status::OK();
}
const auto* fakequant_op =
static_cast<const FakeQuantOperator*>(fakequant_base_op);
// Yield until the fakequant MinMax has been resolved.
if (!fakequant_op->minmax) {
return ::tensorflow::Status::OK();
}
// This transformation only applies when the input array is constant.
if (!IsConstantParameterArray(*model, fakequant_op->inputs[0])) {
return ::tensorflow::Status::OK();
}
const auto& input_array = model->GetArray(fakequant_op->inputs[0]);
CHECK(input_array.data_type == ArrayDataType::kFloat);
// Determine the final data type in the same way as PropagateFakeQuantNumBits.
ArrayDataType quantized_data_type = input_array.final_data_type;
if (!InferQuantizedDataTypeFromFakeQuant(*fakequant_op,
&quantized_data_type)) {
AddMessageF("Unsupported FakeQuant num_bits=%d", fakequant_op->num_bits);
return ::tensorflow::Status::OK();
}
AddMessageF("Resolving constant %s", LogName(*fakequant_op));
auto& output_array = model->GetArray(fakequant_op->outputs[0]);
CHECK(input_array.data_type == ArrayDataType::kFloat);
output_array.data_type = ArrayDataType::kFloat;
// We'll set the final data type to what the fake quant indicates we should
// have (and would have been set if this stayed around until
// PropagateFakeQuantNumBits).
if (propagate_fake_quant_num_bits()) {
output_array.final_data_type = quantized_data_type;
}
CHECK(!output_array.buffer);
const auto& input_buffer = input_array.GetBuffer<ArrayDataType::kFloat>();
output_array.GetOrCreateMinMax() = *fakequant_op->minmax;
auto& output_buffer = output_array.GetMutableBuffer<ArrayDataType::kFloat>();
const int size = input_buffer.data.size();
output_buffer.data.resize(size);
QuantizationParams qparams;
ChooseQuantizationParamsForArrayAndQuantizedDataType(
output_array, quantized_data_type, &qparams);
float quantized_min, quantized_max;
GetBoundsForQuantizedDataType(quantized_data_type, &quantized_min,
&quantized_max);
if (fakequant_op->narrow_range) {
quantized_min++;
output_array.narrow_range = true;
}
// It is important for matching accuracy between TF training and TFLite
// inference, that the following variables are float to match TF's
// FakeQuantWithMinMaxVarsFunctor.
const float scale = qparams.scale;
const float nudged_min = (quantized_min - qparams.zero_point) * scale;
const float nudged_max = (quantized_max - qparams.zero_point) * scale;
tflite::FakeQuantizeArray(scale, nudged_min, nudged_max,
input_buffer.data.data(), output_buffer.data.data(),
size);
if (IsDiscardableArray(*model, fakequant_op->inputs[0]) &&
CountOpsWithInput(*model, fakequant_op->inputs[0]) == 1) {
model->EraseArray(fakequant_op->inputs[0]);
}
model->operators.erase(fakequant_it);
*modified = true;
return ::tensorflow::Status::OK();
}
} // namespace toco
| girving/tensorflow | tensorflow/contrib/lite/toco/graph_transformations/resolve_constant_fake_quant.cc | C++ | apache-2.0 | 6,030 |
/*
* Copyright 2013 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.logic.particles;
import org.lwjgl.BufferUtils;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GL13;
import org.terasology.asset.Assets;
import org.terasology.config.Config;
import org.terasology.entitySystem.entity.EntityManager;
import org.terasology.entitySystem.entity.EntityRef;
import org.terasology.entitySystem.entity.lifecycleEvents.BeforeDeactivateComponent;
import org.terasology.entitySystem.entity.lifecycleEvents.OnActivatedComponent;
import org.terasology.entitySystem.event.ReceiveEvent;
import org.terasology.entitySystem.systems.BaseComponentSystem;
import org.terasology.entitySystem.systems.RegisterMode;
import org.terasology.entitySystem.systems.RegisterSystem;
import org.terasology.entitySystem.systems.RenderSystem;
import org.terasology.entitySystem.systems.UpdateSubscriberSystem;
import org.terasology.logic.location.LocationComponent;
import org.terasology.logic.particles.BlockParticleEffectComponent.Particle;
import org.terasology.math.geom.Vector2f;
import org.terasology.math.geom.Vector3f;
import org.terasology.math.geom.Vector3i;
import org.terasology.math.geom.Vector4f;
import org.terasology.registry.In;
import org.terasology.rendering.assets.material.Material;
import org.terasology.rendering.assets.texture.Texture;
import org.terasology.rendering.logic.NearestSortingList;
import org.terasology.rendering.world.WorldRenderer;
import org.terasology.utilities.random.FastRandom;
import org.terasology.utilities.random.Random;
import org.terasology.world.WorldProvider;
import org.terasology.world.biomes.Biome;
import org.terasology.world.block.Block;
import org.terasology.world.block.BlockManager;
import org.terasology.world.block.BlockPart;
import org.terasology.world.block.loader.WorldAtlas;
import java.nio.FloatBuffer;
import java.util.Arrays;
import java.util.Iterator;
import static org.lwjgl.opengl.GL11.GL_ONE;
import static org.lwjgl.opengl.GL11.GL_ONE_MINUS_SRC_ALPHA;
import static org.lwjgl.opengl.GL11.GL_QUADS;
import static org.lwjgl.opengl.GL11.GL_SRC_ALPHA;
import static org.lwjgl.opengl.GL11.glBegin;
import static org.lwjgl.opengl.GL11.glBindTexture;
import static org.lwjgl.opengl.GL11.glBlendFunc;
import static org.lwjgl.opengl.GL11.glCallList;
import static org.lwjgl.opengl.GL11.glDeleteLists;
import static org.lwjgl.opengl.GL11.glDisable;
import static org.lwjgl.opengl.GL11.glEnable;
import static org.lwjgl.opengl.GL11.glEnd;
import static org.lwjgl.opengl.GL11.glEndList;
import static org.lwjgl.opengl.GL11.glGenLists;
import static org.lwjgl.opengl.GL11.glNewList;
import static org.lwjgl.opengl.GL11.glPopMatrix;
import static org.lwjgl.opengl.GL11.glPushMatrix;
import static org.lwjgl.opengl.GL11.glScalef;
import static org.lwjgl.opengl.GL11.glTranslated;
import static org.lwjgl.opengl.GL11.glTranslatef;
/**
* @author Immortius
*/
// TODO: Generalise for non-block particles
// TODO: Dispose display lists
@RegisterSystem(RegisterMode.CLIENT)
public class BlockParticleEmitterSystem extends BaseComponentSystem implements UpdateSubscriberSystem, RenderSystem {
private static final int PARTICLES_PER_UPDATE = 32;
@In
private EntityManager entityManager;
@In
private WorldProvider worldProvider;
@In
private WorldAtlas worldAtlas;
@In
private BlockManager blockManager;
// TODO: lose dependency on worldRenderer?
@In
private WorldRenderer worldRenderer;
@In
private Config config;
private Random random = new FastRandom();
private NearestSortingList sorter = new NearestSortingList();
private int displayList;
public void initialise() {
if (displayList == 0) {
displayList = glGenLists(1);
glNewList(displayList, GL11.GL_COMPILE);
drawParticle();
glEndList();
}
sorter.initialise(worldRenderer.getActiveCamera());
}
@Override
public void shutdown() {
glDeleteLists(displayList, 1);
sorter.stop();
}
public void update(float delta) {
for (EntityRef entity : entityManager.getEntitiesWith(BlockParticleEffectComponent.class, LocationComponent.class)) {
BlockParticleEffectComponent particleEffect = entity.getComponent(BlockParticleEffectComponent.class);
Iterator<Particle> iterator = particleEffect.particles.iterator();
while (iterator.hasNext()) {
BlockParticleEffectComponent.Particle p = iterator.next();
p.lifeRemaining -= delta;
if (p.lifeRemaining <= 0) {
iterator.remove();
} else {
updateVelocity(entity, particleEffect, p, delta);
updatePosition(p, delta);
}
}
for (int i = 0; particleEffect.spawnCount > 0 && i < PARTICLES_PER_UPDATE; ++i) {
spawnParticle(particleEffect);
}
if (particleEffect.particles.size() == 0 && particleEffect.destroyEntityOnCompletion) {
entity.destroy();
} else {
entity.saveComponent(particleEffect);
}
}
}
@ReceiveEvent(components = {BlockParticleEffectComponent.class, LocationComponent.class})
public void onActivated(OnActivatedComponent event, EntityRef entity) {
sorter.add(entity);
}
@ReceiveEvent(components = {BlockParticleEffectComponent.class, LocationComponent.class})
public void onDeactivated(BeforeDeactivateComponent event, EntityRef entity) {
sorter.remove(entity);
}
private void spawnParticle(BlockParticleEffectComponent particleEffect) {
Particle p = new Particle();
p.lifeRemaining = random.nextFloat() * (particleEffect.maxLifespan - particleEffect.minLifespan) + particleEffect.minLifespan;
p.velocity = random.nextVector3f();
p.size = random.nextFloat() * (particleEffect.maxSize - particleEffect.minSize) + particleEffect.minSize;
p.position.set(
random.nextFloat(-particleEffect.spawnRange.x, particleEffect.spawnRange.x),
random.nextFloat(-particleEffect.spawnRange.y, particleEffect.spawnRange.y),
random.nextFloat(-particleEffect.spawnRange.z, particleEffect.spawnRange.z));
p.color = particleEffect.color;
if (particleEffect.blockType != null) {
final float tileSize = worldAtlas.getRelativeTileSize();
p.texSize.set(tileSize, tileSize);
Block b = particleEffect.blockType.getArchetypeBlock();
p.texOffset.set(b.getPrimaryAppearance().getTextureAtlasPos(BlockPart.FRONT));
if (particleEffect.randBlockTexDisplacement) {
final float relTileSize = worldAtlas.getRelativeTileSize();
Vector2f particleTexSize = new Vector2f(
relTileSize * particleEffect.randBlockTexDisplacementScale.y,
relTileSize * particleEffect.randBlockTexDisplacementScale.y);
p.texSize.x *= particleEffect.randBlockTexDisplacementScale.x;
p.texSize.y *= particleEffect.randBlockTexDisplacementScale.y;
p.texOffset.set(
p.texOffset.x + random.nextFloat() * (tileSize - particleTexSize.x),
p.texOffset.y + random.nextFloat() * (tileSize - particleTexSize.y));
}
}
//p.texSize.set(TEX_SIZE,TEX_SIZE);
particleEffect.particles.add(p);
particleEffect.spawnCount--;
}
protected void updateVelocity(EntityRef entity, BlockParticleEffectComponent particleEffect, Particle particle, float delta) {
Vector3f diff = new Vector3f(particleEffect.targetVelocity);
diff.sub(particle.velocity);
diff.x *= particleEffect.acceleration.x * delta;
diff.y *= particleEffect.acceleration.y * delta;
diff.z *= particleEffect.acceleration.z * delta;
particle.velocity.add(diff);
if (particleEffect.collideWithBlocks) {
LocationComponent location = entity.getComponent(LocationComponent.class);
Vector3f pos = location.getWorldPosition();
pos.add(particle.position);
if (worldProvider.getBlock(new Vector3f(pos.x, pos.y + 2 * Math.signum(particle.velocity.y) * particle.size, pos.z)).getId() != 0x0) {
particle.velocity.y = 0;
}
}
}
protected void updatePosition(Particle particle, float delta) {
particle.position.x += particle.velocity.x * delta;
particle.position.y += particle.velocity.y * delta;
particle.position.z += particle.velocity.z * delta;
}
public void renderAlphaBlend() {
if (config.getRendering().isRenderNearest()) {
render(Arrays.asList(sorter.getNearest(config.getRendering().getParticleEffectLimit())));
} else {
render(entityManager.getEntitiesWith(BlockParticleEffectComponent.class, LocationComponent.class));
}
}
private void render(Iterable<EntityRef> particleEntities) {
Assets.getMaterial("engine:prog.particle").enable();
glDisable(GL11.GL_CULL_FACE);
Vector3f cameraPosition = worldRenderer.getActiveCamera().getPosition();
for (EntityRef entity : particleEntities) {
LocationComponent location = entity.getComponent(LocationComponent.class);
if (null == location) {
continue;
}
Vector3f worldPos = location.getWorldPosition();
if (!worldProvider.isBlockRelevant(worldPos)) {
continue;
}
BlockParticleEffectComponent particleEffect = entity.getComponent(BlockParticleEffectComponent.class);
if (particleEffect.texture == null) {
Texture terrainTex = Assets.getTexture("engine:terrain");
if (terrainTex == null) {
return;
}
GL13.glActiveTexture(GL13.GL_TEXTURE0);
glBindTexture(GL11.GL_TEXTURE_2D, terrainTex.getId());
} else {
GL13.glActiveTexture(GL13.GL_TEXTURE0);
glBindTexture(GL11.GL_TEXTURE_2D, particleEffect.texture.getId());
}
if (particleEffect.blendMode == BlockParticleEffectComponent.ParticleBlendMode.ADD) {
glBlendFunc(GL_ONE, GL_ONE);
}
if (particleEffect.blockType != null) {
renderBlockParticles(worldPos, cameraPosition, particleEffect);
} else {
renderParticles(worldPos, cameraPosition, particleEffect);
}
if (particleEffect.blendMode == BlockParticleEffectComponent.ParticleBlendMode.ADD) {
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
}
}
glEnable(GL11.GL_CULL_FACE);
}
private void renderBlockParticles(Vector3f worldPos, Vector3f cameraPosition, BlockParticleEffectComponent particleEffect) {
Vector3i worldPos3i = new Vector3i(worldPos, 0.5f);
Biome biome = worldProvider.getBiome(worldPos3i);
glPushMatrix();
glTranslated(worldPos.x - cameraPosition.x, worldPos.y - cameraPosition.y, worldPos.z - cameraPosition.z);
for (Particle particle : particleEffect.particles) {
glPushMatrix();
glTranslatef(particle.position.x, particle.position.y, particle.position.z);
applyOrientation();
glScalef(particle.size, particle.size, particle.size);
float light = worldRenderer.getRenderingLightValueAt(new Vector3f(worldPos.x + particle.position.x,
worldPos.y + particle.position.y, worldPos.z + particle.position.z));
renderParticle(particle, particleEffect.blockType.getArchetypeBlock(), biome, light);
glPopMatrix();
}
glPopMatrix();
}
private void renderParticles(Vector3f worldPos, Vector3f cameraPosition, BlockParticleEffectComponent particleEffect) {
glPushMatrix();
glTranslated(worldPos.x - cameraPosition.x, worldPos.y - cameraPosition.y, worldPos.z - cameraPosition.z);
for (Particle particle : particleEffect.particles) {
glPushMatrix();
glTranslatef(particle.position.x, particle.position.y, particle.position.z);
applyOrientation();
glScalef(particle.size, particle.size, particle.size);
float light = worldRenderer.getRenderingLightValueAt(new Vector3f(worldPos.x + particle.position.x,
worldPos.y + particle.position.y, worldPos.z + particle.position.z));
renderParticle(particle, light);
glPopMatrix();
}
glPopMatrix();
}
private void applyOrientation() {
// Fetch the current modelview matrix
final FloatBuffer model = BufferUtils.createFloatBuffer(16);
GL11.glGetFloat(GL11.GL_MODELVIEW_MATRIX, model);
// And undo all rotations and scaling
for (int i = 0; i < 3; i++) {
for (int j = 0; j < 3; j++) {
if (i == j) {
model.put(i * 4 + j, 1.0f);
} else {
model.put(i * 4 + j, 0.0f);
}
}
}
GL11.glLoadMatrix(model);
}
protected void renderParticle(Particle particle, float light) {
Material mat = Assets.getMaterial("engine:prog.particle");
mat.setFloat4("colorOffset", particle.color.x, particle.color.y, particle.color.z, particle.color.w, true);
mat.setFloat2("texOffset", particle.texOffset.x, particle.texOffset.y, true);
mat.setFloat2("texScale", particle.texSize.x, particle.texSize.y, true);
mat.setFloat("light", light, true);
glCallList(displayList);
}
protected void renderParticle(Particle particle, Block block, Biome biome, float light) {
Material mat = Assets.getMaterial("engine:prog.particle");
Vector4f colorMod = block.calcColorOffsetFor(BlockPart.FRONT, biome);
mat.setFloat4("colorOffset", particle.color.x * colorMod.x, particle.color.y * colorMod.y, particle.color.z * colorMod.z, particle.color.w * colorMod.w, true);
mat.setFloat2("texOffset", particle.texOffset.x, particle.texOffset.y, true);
mat.setFloat2("texScale", particle.texSize.x, particle.texSize.y, true);
mat.setFloat("light", light, true);
glCallList(displayList);
}
private void drawParticle() {
glBegin(GL_QUADS);
GL11.glTexCoord2f(0.0f, 0.0f);
GL11.glVertex3f(-0.5f, 0.5f, 0.0f);
GL11.glTexCoord2f(1.0f, 0.0f);
GL11.glVertex3f(0.5f, 0.5f, 0.0f);
GL11.glTexCoord2f(1.0f, 1.0f);
GL11.glVertex3f(0.5f, -0.5f, 0.0f);
GL11.glTexCoord2f(0.0f, 1.0f);
GL11.glVertex3f(-0.5f, -0.5f, 0.0f);
glEnd();
}
public void renderOpaque() {
}
public void renderOverlay() {
}
public void renderFirstPerson() {
}
@Override
public void renderShadows() {
}
}
| CC4401-TeraCity/TeraCity | engine/src/main/java/org/terasology/logic/particles/BlockParticleEmitterSystem.java | Java | apache-2.0 | 15,781 |
package org.ovirt.engine.core.utils.ipa;
import static org.ovirt.engine.core.utils.kerberos.InstallerConstants.ERROR_PREFIX;
import javax.naming.NamingException;
import javax.naming.directory.Attributes;
import org.springframework.ldap.core.ContextMapper;
import org.springframework.ldap.core.DirContextAdapter;
public class ITDSUserContextMapper implements ContextMapper {
@Override
public Object mapFromContext(Object ctx) {
if (ctx == null) {
return null;
}
DirContextAdapter searchResult = (DirContextAdapter) ctx;
Attributes attributes = searchResult.getAttributes();
if (attributes == null) {
return null;
}
try {
return attributes.get("uid").get(0);
} catch (NamingException e) {
System.err.println(ERROR_PREFIX + "Failed getting user GUID");
return null;
}
}
}
| derekhiggins/ovirt-engine | backend/manager/modules/utils/src/main/java/org/ovirt/engine/core/utils/ipa/ITDSUserContextMapper.java | Java | apache-2.0 | 922 |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.normalizer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
/**
* Interface for normalization plan.
*/
@InterfaceAudience.Private
public interface NormalizationPlan {
enum PlanType {
SPLIT,
MERGE,
NONE
}
/**
* Executes normalization plan on cluster (does actual splitting/merging work).
* @param admin instance of Admin
*/
void execute(Admin admin);
/**
* @return the type of this plan
*/
PlanType getType();
}
| gustavoanatoly/hbase | hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/NormalizationPlan.java | Java | apache-2.0 | 1,367 |
package mock.services;
import io.vertx.codegen.annotations.ProxyGen;
import io.vertx.core.AsyncResult;
import io.vertx.core.Handler;
@ProxyGen
public interface ParrotService {
void echo(String original, Handler<AsyncResult<String>> handler);
}
| aesteve/nubes | src/test/java/mock/services/ParrotService.java | Java | apache-2.0 | 250 |
package com.hubspot.singularity.executor;
import static java.nio.charset.StandardCharsets.UTF_8;
import java.io.BufferedWriter;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
import com.github.jknack.handlebars.Template;
import com.google.common.base.Throwables;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
import com.hubspot.singularity.executor.config.SingularityExecutorModule;
import com.hubspot.singularity.executor.models.DockerContext;
import com.hubspot.singularity.executor.models.EnvironmentContext;
import com.hubspot.singularity.executor.models.LogrotateCronTemplateContext;
import com.hubspot.singularity.executor.models.LogrotateTemplateContext;
import com.hubspot.singularity.executor.models.RunnerContext;
@Singleton
public class TemplateManager {
private final Template runnerTemplate;
private final Template environmentTemplate;
private final Template logrotateTemplate;
private final Template logrotateCronTemplate;
private final Template dockerTemplate;
@Inject
public TemplateManager(@Named(SingularityExecutorModule.RUNNER_TEMPLATE) Template runnerTemplate,
@Named(SingularityExecutorModule.ENVIRONMENT_TEMPLATE) Template environmentTemplate,
@Named(SingularityExecutorModule.LOGROTATE_TEMPLATE) Template logrotateTemplate,
@Named(SingularityExecutorModule.LOGROTATE_CRON_TEMPLATE) Template logrotateCronTemplate,
@Named(SingularityExecutorModule.DOCKER_TEMPLATE) Template dockerTemplate
) {
this.runnerTemplate = runnerTemplate;
this.environmentTemplate = environmentTemplate;
this.logrotateTemplate = logrotateTemplate;
this.logrotateCronTemplate = logrotateCronTemplate;
this.dockerTemplate = dockerTemplate;
}
public void writeRunnerScript(Path destination, RunnerContext runnerContext) {
writeTemplate(destination, runnerTemplate, runnerContext);
}
public void writeEnvironmentScript(Path destination, EnvironmentContext environmentContext) {
writeTemplate(destination, environmentTemplate, environmentContext);
}
public void writeLogrotateFile(Path destination, LogrotateTemplateContext logRotateContext) {
writeTemplate(destination, logrotateTemplate, logRotateContext);
}
public boolean writeCronEntryForLogrotate(Path destination, LogrotateCronTemplateContext logrotateCronTemplateContext) {
writeTemplate(destination, logrotateCronTemplate, logrotateCronTemplateContext);
final File destinationFile = destination.toFile();
// ensure file is 644 -- java file permissions are so lame :/
return destinationFile.setExecutable(false, false) &&
destinationFile.setReadable(true, false) &&
destinationFile.setWritable(false, false) &&
destinationFile.setWritable(true);
}
public void writeDockerScript(Path destination, DockerContext dockerContext) {
writeTemplate(destination, dockerTemplate, dockerContext);
}
private void writeTemplate(Path path, Template template, Object context) {
try (final BufferedWriter writer = Files.newBufferedWriter(path, UTF_8)) {
template.apply(context, writer);
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
}
| andrhamm/Singularity | SingularityExecutor/src/main/java/com/hubspot/singularity/executor/TemplateManager.java | Java | apache-2.0 | 3,350 |
/*
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
/*
*
*
*
*
*
* ASM: a very small and fast Java bytecode manipulation framework
* Copyright (c) 2000-2011 INRIA, France Telecom
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.redkale.asm;
/**
* A {@link MethodVisitor} that generates methods in bytecode form. Each visit
* method of this class appends the bytecode corresponding to the visited
* instruction to a byte vector, in the order these methods are called.
*
* @author Eric Bruneton
* @author Eugene Kuleshov
*/
class MethodWriter extends MethodVisitor {
/**
* Pseudo access flag used to denote constructors.
*/
static final int ACC_CONSTRUCTOR = 0x80000;
/**
* Frame has exactly the same locals as the previous stack map frame and
* number of stack items is zero.
*/
static final int SAME_FRAME = 0; // to 63 (0-3f)
/**
* Frame has exactly the same locals as the previous stack map frame and
* number of stack items is 1
*/
static final int SAME_LOCALS_1_STACK_ITEM_FRAME = 64; // to 127 (40-7f)
/**
* Reserved for future use
*/
static final int RESERVED = 128;
/**
* Frame has exactly the same locals as the previous stack map frame and
* number of stack items is 1. Offset is bigger then 63;
*/
static final int SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED = 247; // f7
/**
* Frame where current locals are the same as the locals in the previous
* frame, except that the k last locals are absent. The value of k is given
* by the formula 251-frame_type.
*/
static final int CHOP_FRAME = 248; // to 250 (f8-fA)
/**
* Frame has exactly the same locals as the previous stack map frame and
* number of stack items is zero. Offset is bigger then 63;
*/
static final int SAME_FRAME_EXTENDED = 251; // fb
/**
* Frame where current locals are the same as the locals in the previous
* frame, except that k additional locals are defined. The value of k is
* given by the formula frame_type-251.
*/
static final int APPEND_FRAME = 252; // to 254 // fc-fe
/**
* Full frame
*/
static final int FULL_FRAME = 255; // ff
/**
* Indicates that the stack map frames must be recomputed from scratch. In
* this case the maximum stack size and number of local variables is also
* recomputed from scratch.
*
* @see #compute
*/
static final int FRAMES = 0;
/**
* Indicates that the stack map frames of type F_INSERT must be computed.
* The other frames are not (re)computed. They should all be of type F_NEW
* and should be sufficient to compute the content of the F_INSERT frames,
* together with the bytecode instructions between a F_NEW and a F_INSERT
* frame - and without any knowledge of the type hierarchy (by definition of
* F_INSERT).
*
* @see #compute
*/
static final int INSERTED_FRAMES = 1;
/**
* Indicates that the maximum stack size and number of local variables must
* be automatically computed.
*
* @see #compute
*/
static final int MAXS = 2;
/**
* Indicates that nothing must be automatically computed.
*
* @see #compute
*/
static final int NOTHING = 3;
/**
* The class writer to which this method must be added.
*/
final ClassWriter cw;
/**
* Access flags of this method.
*/
private int access;
/**
* The index of the constant pool item that contains the name of this
* method.
*/
private final int name;
/**
* The index of the constant pool item that contains the descriptor of this
* method.
*/
private final int desc;
/**
* The descriptor of this method.
*/
private final String descriptor;
/**
* The signature of this method.
*/
String signature;
/**
* If not zero, indicates that the code of this method must be copied from
* the ClassReader associated to this writer in <code>cw.cr</code>. More
* precisely, this field gives the index of the first byte to copied from
* <code>cw.cr.b</code>.
*/
int classReaderOffset;
/**
* If not zero, indicates that the code of this method must be copied from
* the ClassReader associated to this writer in <code>cw.cr</code>. More
* precisely, this field gives the number of bytes to copied from
* <code>cw.cr.b</code>.
*/
int classReaderLength;
/**
* Number of exceptions that can be thrown by this method.
*/
int exceptionCount;
/**
* The exceptions that can be thrown by this method. More precisely, this
* array contains the indexes of the constant pool items that contain the
* internal names of these exception classes.
*/
int[] exceptions;
/**
* The annotation default attribute of this method. May be <tt>null</tt>.
*/
private ByteVector annd;
/**
* The runtime visible annotations of this method. May be <tt>null</tt>.
*/
private AnnotationWriter anns;
/**
* The runtime invisible annotations of this method. May be <tt>null</tt>.
*/
private AnnotationWriter ianns;
/**
* The runtime visible type annotations of this method. May be <tt>null</tt>
* .
*/
private AnnotationWriter tanns;
/**
* The runtime invisible type annotations of this method. May be
* <tt>null</tt>.
*/
private AnnotationWriter itanns;
/**
* The runtime visible parameter annotations of this method. May be
* <tt>null</tt>.
*/
private AnnotationWriter[] panns;
/**
* The runtime invisible parameter annotations of this method. May be
* <tt>null</tt>.
*/
private AnnotationWriter[] ipanns;
/**
* The number of synthetic parameters of this method.
*/
private int synthetics;
/**
* The non standard attributes of the method.
*/
private Attribute attrs;
/**
* The bytecode of this method.
*/
private ByteVector code = new ByteVector();
/**
* Maximum stack size of this method.
*/
private int maxStack;
/**
* Maximum number of local variables for this method.
*/
private int maxLocals;
/**
* Number of local variables in the current stack map frame.
*/
private int currentLocals;
/**
* Number of stack map frames in the StackMapTable attribute.
*/
int frameCount;
/**
* The StackMapTable attribute.
*/
private ByteVector stackMap;
/**
* The offset of the last frame that was written in the StackMapTable
* attribute.
*/
private int previousFrameOffset;
/**
* The last frame that was written in the StackMapTable attribute.
*
* @see #frame
*/
private int[] previousFrame;
/**
* The current stack map frame. The first element contains the offset of the
* instruction to which the frame corresponds, the second element is the
* number of locals and the third one is the number of stack elements. The
* local variables start at index 3 and are followed by the operand stack
* values. In summary frame[0] = offset, frame[1] = nLocal, frame[2] =
* nStack, frame[3] = nLocal. All types are encoded as integers, with the
* same format as the one used in {@link Label}, but limited to BASE types.
*/
private int[] frame;
/**
* Number of elements in the exception handler list.
*/
private int handlerCount;
/**
* The first element in the exception handler list.
*/
private Handler firstHandler;
/**
* The last element in the exception handler list.
*/
private Handler lastHandler;
/**
* Number of entries in the MethodParameters attribute.
*/
private int methodParametersCount;
/**
* The MethodParameters attribute.
*/
private ByteVector methodParameters;
/**
* Number of entries in the LocalVariableTable attribute.
*/
private int localVarCount;
/**
* The LocalVariableTable attribute.
*/
private ByteVector localVar;
/**
* Number of entries in the LocalVariableTypeTable attribute.
*/
private int localVarTypeCount;
/**
* The LocalVariableTypeTable attribute.
*/
private ByteVector localVarType;
/**
* Number of entries in the LineNumberTable attribute.
*/
private int lineNumberCount;
/**
* The LineNumberTable attribute.
*/
private ByteVector lineNumber;
/**
* The start offset of the last visited instruction.
*/
private int lastCodeOffset;
/**
* The runtime visible type annotations of the code. May be <tt>null</tt>.
*/
private AnnotationWriter ctanns;
/**
* The runtime invisible type annotations of the code. May be <tt>null</tt>.
*/
private AnnotationWriter ictanns;
/**
* The non standard attributes of the method's code.
*/
private Attribute cattrs;
/**
* The number of subroutines in this method.
*/
private int subroutines;
// ------------------------------------------------------------------------
/*
* Fields for the control flow graph analysis algorithm (used to compute the
* maximum stack size). A control flow graph contains one node per "basic
* block", and one edge per "jump" from one basic block to another. Each
* node (i.e., each basic block) is represented by the Label object that
* corresponds to the first instruction of this basic block. Each node also
* stores the list of its successors in the graph, as a linked list of Edge
* objects.
*/
/**
* Indicates what must be automatically computed.
*
* @see #FRAMES
* @see #INSERTED_FRAMES
* @see #MAXS
* @see #NOTHING
*/
private final int compute;
/**
* A list of labels. This list is the list of basic blocks in the method,
* i.e. a list of Label objects linked to each other by their
* {@link Label#successor} field, in the order they are visited by
* {@link MethodVisitor#visitLabel}, and starting with the first basic
* block.
*/
private Label labels;
/**
* The previous basic block.
*/
private Label previousBlock;
/**
* The current basic block.
*/
private Label currentBlock;
/**
* The (relative) stack size after the last visited instruction. This size
* is relative to the beginning of the current basic block, i.e., the true
* stack size after the last visited instruction is equal to the
* {@link Label#inputStackTop beginStackSize} of the current basic block
* plus <tt>stackSize</tt>.
*/
private int stackSize;
/**
* The (relative) maximum stack size after the last visited instruction.
* This size is relative to the beginning of the current basic block, i.e.,
* the true maximum stack size after the last visited instruction is equal
* to the {@link Label#inputStackTop beginStackSize} of the current basic
* block plus <tt>stackSize</tt>.
*/
private int maxStackSize;
// ------------------------------------------------------------------------
// Constructor
// ------------------------------------------------------------------------
/**
* Constructs a new {@link MethodWriter}.
*
* @param cw
* the class writer in which the method must be added.
* @param access
* the method's access flags (see {@link Opcodes}).
* @param name
* the method's name.
* @param desc
* the method's descriptor (see {@link Type}).
* @param signature
* the method's signature. May be <tt>null</tt>.
* @param exceptions
* the internal names of the method's exceptions. May be
* <tt>null</tt>.
* @param compute
* Indicates what must be automatically computed (see #compute).
*/
MethodWriter(final ClassWriter cw, final int access, final String name,
final String desc, final String signature,
final String[] exceptions, final int compute) {
super(Opcodes.ASM6);
if (cw.firstMethod == null) {
cw.firstMethod = this;
} else {
cw.lastMethod.mv = this;
}
cw.lastMethod = this;
this.cw = cw;
this.access = access;
if ("<init>".equals(name)) {
this.access |= ACC_CONSTRUCTOR;
}
this.name = cw.newUTF8(name);
this.desc = cw.newUTF8(desc);
this.descriptor = desc;
this.signature = signature;
if (exceptions != null && exceptions.length > 0) {
exceptionCount = exceptions.length;
this.exceptions = new int[exceptionCount];
for (int i = 0; i < exceptionCount; ++i) {
this.exceptions[i] = cw.newClass(exceptions[i]);
}
}
this.compute = compute;
if (compute != NOTHING) {
// updates maxLocals
int size = Type.getArgumentsAndReturnSizes(descriptor) >> 2;
if ((access & Opcodes.ACC_STATIC) != 0) {
--size;
}
maxLocals = size;
currentLocals = size;
// creates and visits the label for the first basic block
labels = new Label();
labels.status |= Label.PUSHED;
visitLabel(labels);
}
}
// ------------------------------------------------------------------------
// Implementation of the MethodVisitor abstract class
// ------------------------------------------------------------------------
@Override
public void visitParameter(String name, int access) {
if (methodParameters == null) {
methodParameters = new ByteVector();
}
++methodParametersCount;
methodParameters.putShort((name == null) ? 0 : cw.newUTF8(name))
.putShort(access);
}
@Override
public AnnotationVisitor visitAnnotationDefault() {
annd = new ByteVector();
return new AnnotationWriter(cw, false, annd, null, 0);
}
@Override
public AnnotationVisitor visitAnnotation(final String desc,
final boolean visible) {
ByteVector bv = new ByteVector();
// write type, and reserve space for values count
bv.putShort(cw.newUTF8(desc)).putShort(0);
AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, 2);
if (visible) {
aw.next = anns;
anns = aw;
} else {
aw.next = ianns;
ianns = aw;
}
return aw;
}
@Override
public AnnotationVisitor visitTypeAnnotation(final int typeRef,
final TypePath typePath, final String desc, final boolean visible) {
ByteVector bv = new ByteVector();
// write target_type and target_info
AnnotationWriter.putTarget(typeRef, typePath, bv);
// write type, and reserve space for values count
bv.putShort(cw.newUTF8(desc)).putShort(0);
AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv,
bv.length - 2);
if (visible) {
aw.next = tanns;
tanns = aw;
} else {
aw.next = itanns;
itanns = aw;
}
return aw;
}
@Override
public AnnotationVisitor visitParameterAnnotation(final int parameter,
final String desc, final boolean visible) {
ByteVector bv = new ByteVector();
if ("Ljava/lang/Synthetic;".equals(desc)) {
// workaround for a bug in javac with synthetic parameters
// see ClassReader.readParameterAnnotations
synthetics = Math.max(synthetics, parameter + 1);
return new AnnotationWriter(cw, false, bv, null, 0);
}
// write type, and reserve space for values count
bv.putShort(cw.newUTF8(desc)).putShort(0);
AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, 2);
if (visible) {
if (panns == null) {
panns = new AnnotationWriter[Type.getArgumentTypes(descriptor).length];
}
aw.next = panns[parameter];
panns[parameter] = aw;
} else {
if (ipanns == null) {
ipanns = new AnnotationWriter[Type.getArgumentTypes(descriptor).length];
}
aw.next = ipanns[parameter];
ipanns[parameter] = aw;
}
return aw;
}
@Override
public void visitAttribute(final Attribute attr) {
if (attr.isCodeAttribute()) {
attr.next = cattrs;
cattrs = attr;
} else {
attr.next = attrs;
attrs = attr;
}
}
@Override
public void visitCode() {
}
@Override
public void visitFrame(final int type, final int nLocal,
final Object[] local, final int nStack, final Object[] stack) {
if (compute == FRAMES) {
return;
}
if (compute == INSERTED_FRAMES) {
if (currentBlock.frame == null) {
// This should happen only once, for the implicit first frame
// (which is explicitly visited in ClassReader if the
// EXPAND_ASM_INSNS option is used).
currentBlock.frame = new CurrentFrame();
currentBlock.frame.owner = currentBlock;
currentBlock.frame.initInputFrame(cw, access,
Type.getArgumentTypes(descriptor), nLocal);
visitImplicitFirstFrame();
} else {
if (type == Opcodes.F_NEW) {
currentBlock.frame.set(cw, nLocal, local, nStack, stack);
} else {
// In this case type is equal to F_INSERT by hypothesis, and
// currentBlock.frame contains the stack map frame at the
// current instruction, computed from the last F_NEW frame
// and the bytecode instructions in between (via calls to
// CurrentFrame#execute).
}
visitFrame(currentBlock.frame);
}
} else if (type == Opcodes.F_NEW) {
if (previousFrame == null) {
visitImplicitFirstFrame();
}
currentLocals = nLocal;
int frameIndex = startFrame(code.length, nLocal, nStack);
for (int i = 0; i < nLocal; ++i) {
if (local[i] instanceof String) {
String desc = Type.getObjectType((String) local[i]).getDescriptor();
frame[frameIndex++] = Frame.type(cw, desc);
} else if (local[i] instanceof Integer) {
frame[frameIndex++] = Frame.BASE | ((Integer) local[i]).intValue();
} else {
frame[frameIndex++] = Frame.UNINITIALIZED
| cw.addUninitializedType("",
((Label) local[i]).position);
}
}
for (int i = 0; i < nStack; ++i) {
if (stack[i] instanceof String) {
String desc = Type.getObjectType((String) stack[i]).getDescriptor();
frame[frameIndex++] = Frame.type(cw, desc);
} else if (stack[i] instanceof Integer) {
frame[frameIndex++] = Frame.BASE | ((Integer) stack[i]).intValue();
} else {
frame[frameIndex++] = Frame.UNINITIALIZED
| cw.addUninitializedType("",
((Label) stack[i]).position);
}
}
endFrame();
} else {
int delta;
if (stackMap == null) {
stackMap = new ByteVector();
delta = code.length;
} else {
delta = code.length - previousFrameOffset - 1;
if (delta < 0) {
if (type == Opcodes.F_SAME) {
return;
} else {
throw new IllegalStateException();
}
}
}
switch (type) {
case Opcodes.F_FULL:
currentLocals = nLocal;
stackMap.putByte(FULL_FRAME).putShort(delta).putShort(nLocal);
for (int i = 0; i < nLocal; ++i) {
writeFrameType(local[i]);
}
stackMap.putShort(nStack);
for (int i = 0; i < nStack; ++i) {
writeFrameType(stack[i]);
}
break;
case Opcodes.F_APPEND:
currentLocals += nLocal;
stackMap.putByte(SAME_FRAME_EXTENDED + nLocal).putShort(delta);
for (int i = 0; i < nLocal; ++i) {
writeFrameType(local[i]);
}
break;
case Opcodes.F_CHOP:
currentLocals -= nLocal;
stackMap.putByte(SAME_FRAME_EXTENDED - nLocal).putShort(delta);
break;
case Opcodes.F_SAME:
if (delta < 64) {
stackMap.putByte(delta);
} else {
stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta);
}
break;
case Opcodes.F_SAME1:
if (delta < 64) {
stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta);
} else {
stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
.putShort(delta);
}
writeFrameType(stack[0]);
break;
}
previousFrameOffset = code.length;
++frameCount;
}
maxStack = Math.max(maxStack, nStack);
maxLocals = Math.max(maxLocals, currentLocals);
}
@Override
public void visitInsn(final int opcode) {
lastCodeOffset = code.length;
// adds the instruction to the bytecode of the method
code.putByte(opcode);
// update currentBlock
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
if (compute == FRAMES || compute == INSERTED_FRAMES) {
currentBlock.frame.execute(opcode, 0, null, null);
} else {
// updates current and max stack sizes
int size = stackSize + Frame.SIZE[opcode];
if (size > maxStackSize) {
maxStackSize = size;
}
stackSize = size;
}
// if opcode == ATHROW or xRETURN, ends current block (no successor)
if ((opcode >= Opcodes.IRETURN && opcode <= Opcodes.RETURN)
|| opcode == Opcodes.ATHROW) {
noSuccessor();
}
}
}
@Override
public void visitIntInsn(final int opcode, final int operand) {
lastCodeOffset = code.length;
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
if (compute == FRAMES || compute == INSERTED_FRAMES) {
currentBlock.frame.execute(opcode, operand, null, null);
} else if (opcode != Opcodes.NEWARRAY) {
// updates current and max stack sizes only for NEWARRAY
// (stack size variation = 0 for BIPUSH or SIPUSH)
int size = stackSize + 1;
if (size > maxStackSize) {
maxStackSize = size;
}
stackSize = size;
}
}
// adds the instruction to the bytecode of the method
if (opcode == Opcodes.SIPUSH) {
code.put12(opcode, operand);
} else { // BIPUSH or NEWARRAY
code.put11(opcode, operand);
}
}
@Override
public void visitVarInsn(final int opcode, final int var) {
lastCodeOffset = code.length;
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
if (compute == FRAMES || compute == INSERTED_FRAMES) {
currentBlock.frame.execute(opcode, var, null, null);
} else {
// updates current and max stack sizes
if (opcode == Opcodes.RET) {
// no stack change, but end of current block (no successor)
currentBlock.status |= Label.RET;
// save 'stackSize' here for future use
// (see {@link #findSubroutineSuccessors})
currentBlock.inputStackTop = stackSize;
noSuccessor();
} else { // xLOAD or xSTORE
int size = stackSize + Frame.SIZE[opcode];
if (size > maxStackSize) {
maxStackSize = size;
}
stackSize = size;
}
}
}
if (compute != NOTHING) {
// updates max locals
int n;
if (opcode == Opcodes.LLOAD || opcode == Opcodes.DLOAD
|| opcode == Opcodes.LSTORE || opcode == Opcodes.DSTORE) {
n = var + 2;
} else {
n = var + 1;
}
if (n > maxLocals) {
maxLocals = n;
}
}
// adds the instruction to the bytecode of the method
if (var < 4 && opcode != Opcodes.RET) {
int opt;
if (opcode < Opcodes.ISTORE) {
/* ILOAD_0 */
opt = 26 + ((opcode - Opcodes.ILOAD) << 2) + var;
} else {
/* ISTORE_0 */
opt = 59 + ((opcode - Opcodes.ISTORE) << 2) + var;
}
code.putByte(opt);
} else if (var >= 256) {
code.putByte(196 /* WIDE */).put12(opcode, var);
} else {
code.put11(opcode, var);
}
if (opcode >= Opcodes.ISTORE && compute == FRAMES && handlerCount > 0) {
visitLabel(new Label());
}
}
@Override
public void visitTypeInsn(final int opcode, final String type) {
lastCodeOffset = code.length;
Item i = cw.newStringishItem(ClassWriter.CLASS, type);
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
if (compute == FRAMES || compute == INSERTED_FRAMES) {
currentBlock.frame.execute(opcode, code.length, cw, i);
} else if (opcode == Opcodes.NEW) {
// updates current and max stack sizes only if opcode == NEW
// (no stack change for ANEWARRAY, CHECKCAST, INSTANCEOF)
int size = stackSize + 1;
if (size > maxStackSize) {
maxStackSize = size;
}
stackSize = size;
}
}
// adds the instruction to the bytecode of the method
code.put12(opcode, i.index);
}
@Override
public void visitFieldInsn(final int opcode, final String owner,
final String name, final String desc) {
lastCodeOffset = code.length;
Item i = cw.newFieldItem(owner, name, desc);
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
if (compute == FRAMES || compute == INSERTED_FRAMES) {
currentBlock.frame.execute(opcode, 0, cw, i);
} else {
int size;
// computes the stack size variation
char c = desc.charAt(0);
switch (opcode) {
case Opcodes.GETSTATIC:
size = stackSize + (c == 'D' || c == 'J' ? 2 : 1);
break;
case Opcodes.PUTSTATIC:
size = stackSize + (c == 'D' || c == 'J' ? -2 : -1);
break;
case Opcodes.GETFIELD:
size = stackSize + (c == 'D' || c == 'J' ? 1 : 0);
break;
// case Constants.PUTFIELD:
default:
size = stackSize + (c == 'D' || c == 'J' ? -3 : -2);
break;
}
// updates current and max stack sizes
if (size > maxStackSize) {
maxStackSize = size;
}
stackSize = size;
}
}
// adds the instruction to the bytecode of the method
code.put12(opcode, i.index);
}
@Override
public void visitMethodInsn(final int opcode, final String owner,
final String name, final String desc, final boolean itf) {
lastCodeOffset = code.length;
Item i = cw.newMethodItem(owner, name, desc, itf);
int argSize = i.intVal;
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
if (compute == FRAMES || compute == INSERTED_FRAMES) {
currentBlock.frame.execute(opcode, 0, cw, i);
} else {
/*
* computes the stack size variation. In order not to recompute
* several times this variation for the same Item, we use the
* intVal field of this item to store this variation, once it
* has been computed. More precisely this intVal field stores
* the sizes of the arguments and of the return value
* corresponding to desc.
*/
if (argSize == 0) {
// the above sizes have not been computed yet,
// so we compute them...
argSize = Type.getArgumentsAndReturnSizes(desc);
// ... and we save them in order
// not to recompute them in the future
i.intVal = argSize;
}
int size;
if (opcode == Opcodes.INVOKESTATIC) {
size = stackSize - (argSize >> 2) + (argSize & 0x03) + 1;
} else {
size = stackSize - (argSize >> 2) + (argSize & 0x03);
}
// updates current and max stack sizes
if (size > maxStackSize) {
maxStackSize = size;
}
stackSize = size;
}
}
// adds the instruction to the bytecode of the method
if (opcode == Opcodes.INVOKEINTERFACE) {
if (argSize == 0) {
argSize = Type.getArgumentsAndReturnSizes(desc);
i.intVal = argSize;
}
code.put12(Opcodes.INVOKEINTERFACE, i.index).put11(argSize >> 2, 0);
} else {
code.put12(opcode, i.index);
}
}
@Override
public void visitInvokeDynamicInsn(final String name, final String desc,
final Handle bsm, final Object... bsmArgs) {
lastCodeOffset = code.length;
Item i = cw.newInvokeDynamicItem(name, desc, bsm, bsmArgs);
int argSize = i.intVal;
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
if (compute == FRAMES || compute == INSERTED_FRAMES) {
currentBlock.frame.execute(Opcodes.INVOKEDYNAMIC, 0, cw, i);
} else {
/*
* computes the stack size variation. In order not to recompute
* several times this variation for the same Item, we use the
* intVal field of this item to store this variation, once it
* has been computed. More precisely this intVal field stores
* the sizes of the arguments and of the return value
* corresponding to desc.
*/
if (argSize == 0) {
// the above sizes have not been computed yet,
// so we compute them...
argSize = Type.getArgumentsAndReturnSizes(desc);
// ... and we save them in order
// not to recompute them in the future
i.intVal = argSize;
}
int size = stackSize - (argSize >> 2) + (argSize & 0x03) + 1;
// updates current and max stack sizes
if (size > maxStackSize) {
maxStackSize = size;
}
stackSize = size;
}
}
// adds the instruction to the bytecode of the method
code.put12(Opcodes.INVOKEDYNAMIC, i.index);
code.putShort(0);
}
@Override
public void visitJumpInsn(int opcode, final Label label) {
boolean isWide = opcode >= 200; // GOTO_W
opcode = isWide ? opcode - 33 : opcode;
lastCodeOffset = code.length;
Label nextInsn = null;
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
if (compute == FRAMES) {
currentBlock.frame.execute(opcode, 0, null, null);
// 'label' is the target of a jump instruction
label.getFirst().status |= Label.TARGET;
// adds 'label' as a successor of this basic block
addSuccessor(Edge.NORMAL, label);
if (opcode != Opcodes.GOTO) {
// creates a Label for the next basic block
nextInsn = new Label();
}
} else if (compute == INSERTED_FRAMES) {
currentBlock.frame.execute(opcode, 0, null, null);
} else {
if (opcode == Opcodes.JSR) {
if ((label.status & Label.SUBROUTINE) == 0) {
label.status |= Label.SUBROUTINE;
++subroutines;
}
currentBlock.status |= Label.JSR;
addSuccessor(stackSize + 1, label);
// creates a Label for the next basic block
nextInsn = new Label();
/*
* note that, by construction in this method, a JSR block
* has at least two successors in the control flow graph:
* the first one leads the next instruction after the JSR,
* while the second one leads to the JSR target.
*/
} else {
// updates current stack size (max stack size unchanged
// because stack size variation always negative in this
// case)
stackSize += Frame.SIZE[opcode];
addSuccessor(stackSize, label);
}
}
}
// adds the instruction to the bytecode of the method
if ((label.status & Label.RESOLVED) != 0
&& label.position - code.length < Short.MIN_VALUE) {
/*
* case of a backward jump with an offset < -32768. In this case we
* automatically replace GOTO with GOTO_W, JSR with JSR_W and IFxxx
* <l> with IFNOTxxx <L> GOTO_W <l> L:..., where IFNOTxxx is the
* "opposite" opcode of IFxxx (i.e., IFNE for IFEQ) and where <L>
* designates the instruction just after the GOTO_W.
*/
if (opcode == Opcodes.GOTO) {
code.putByte(200); // GOTO_W
} else if (opcode == Opcodes.JSR) {
code.putByte(201); // JSR_W
} else {
// if the IF instruction is transformed into IFNOT GOTO_W the
// next instruction becomes the target of the IFNOT instruction
if (nextInsn != null) {
nextInsn.status |= Label.TARGET;
}
code.putByte(opcode <= 166 ? ((opcode + 1) ^ 1) - 1
: opcode ^ 1);
code.putShort(8); // jump offset
// ASM pseudo GOTO_W insn, see ClassReader. We don't use a real
// GOTO_W because we might need to insert a frame just after (as
// the target of the IFNOTxxx jump instruction).
code.putByte(220);
cw.hasAsmInsns = true;
}
label.put(this, code, code.length - 1, true);
} else if (isWide) {
/*
* case of a GOTO_W or JSR_W specified by the user (normally
* ClassReader when used to resize instructions). In this case we
* keep the original instruction.
*/
code.putByte(opcode + 33);
label.put(this, code, code.length - 1, true);
} else {
/*
* case of a backward jump with an offset >= -32768, or of a forward
* jump with, of course, an unknown offset. In these cases we store
* the offset in 2 bytes (which will be increased in
* resizeInstructions, if needed).
*/
code.putByte(opcode);
label.put(this, code, code.length - 1, false);
}
if (currentBlock != null) {
if (nextInsn != null) {
// if the jump instruction is not a GOTO, the next instruction
// is also a successor of this instruction. Calling visitLabel
// adds the label of this next instruction as a successor of the
// current block, and starts a new basic block
visitLabel(nextInsn);
}
if (opcode == Opcodes.GOTO) {
noSuccessor();
}
}
}
@Override
public void visitLabel(final Label label) {
// resolves previous forward references to label, if any
cw.hasAsmInsns |= label.resolve(this, code.length, code.data);
// updates currentBlock
if ((label.status & Label.DEBUG) != 0) {
return;
}
if (compute == FRAMES) {
if (currentBlock != null) {
if (label.position == currentBlock.position) {
// successive labels, do not start a new basic block
currentBlock.status |= (label.status & Label.TARGET);
label.frame = currentBlock.frame;
return;
}
// ends current block (with one new successor)
addSuccessor(Edge.NORMAL, label);
}
// begins a new current block
currentBlock = label;
if (label.frame == null) {
label.frame = new Frame();
label.frame.owner = label;
}
// updates the basic block list
if (previousBlock != null) {
if (label.position == previousBlock.position) {
previousBlock.status |= (label.status & Label.TARGET);
label.frame = previousBlock.frame;
currentBlock = previousBlock;
return;
}
previousBlock.successor = label;
}
previousBlock = label;
} else if (compute == INSERTED_FRAMES) {
if (currentBlock == null) {
// This case should happen only once, for the visitLabel call in
// the constructor. Indeed, if compute is equal to
// INSERTED_FRAMES currentBlock can not be set back to null (see
// #noSuccessor).
currentBlock = label;
} else {
// Updates the frame owner so that a correct frame offset is
// computed in visitFrame(Frame).
currentBlock.frame.owner = label;
}
} else if (compute == MAXS) {
if (currentBlock != null) {
// ends current block (with one new successor)
currentBlock.outputStackMax = maxStackSize;
addSuccessor(stackSize, label);
}
// begins a new current block
currentBlock = label;
// resets the relative current and max stack sizes
stackSize = 0;
maxStackSize = 0;
// updates the basic block list
if (previousBlock != null) {
previousBlock.successor = label;
}
previousBlock = label;
}
}
@Override
public void visitLdcInsn(final Object cst) {
lastCodeOffset = code.length;
Item i = cw.newConstItem(cst);
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
if (compute == FRAMES || compute == INSERTED_FRAMES) {
currentBlock.frame.execute(Opcodes.LDC, 0, cw, i);
} else {
int size;
// computes the stack size variation
if (i.type == ClassWriter.LONG || i.type == ClassWriter.DOUBLE) {
size = stackSize + 2;
} else {
size = stackSize + 1;
}
// updates current and max stack sizes
if (size > maxStackSize) {
maxStackSize = size;
}
stackSize = size;
}
}
// adds the instruction to the bytecode of the method
int index = i.index;
if (i.type == ClassWriter.LONG || i.type == ClassWriter.DOUBLE) {
code.put12(20 /* LDC2_W */, index);
} else if (index >= 256) {
code.put12(19 /* LDC_W */, index);
} else {
code.put11(Opcodes.LDC, index);
}
}
@Override
public void visitIincInsn(final int var, final int increment) {
lastCodeOffset = code.length;
if (currentBlock != null) {
if (compute == FRAMES || compute == INSERTED_FRAMES) {
currentBlock.frame.execute(Opcodes.IINC, var, null, null);
}
}
if (compute != NOTHING) {
// updates max locals
int n = var + 1;
if (n > maxLocals) {
maxLocals = n;
}
}
// adds the instruction to the bytecode of the method
if ((var > 255) || (increment > 127) || (increment < -128)) {
code.putByte(196 /* WIDE */).put12(Opcodes.IINC, var)
.putShort(increment);
} else {
code.putByte(Opcodes.IINC).put11(var, increment);
}
}
@Override
public void visitTableSwitchInsn(final int min, final int max,
final Label dflt, final Label... labels) {
lastCodeOffset = code.length;
// adds the instruction to the bytecode of the method
int source = code.length;
code.putByte(Opcodes.TABLESWITCH);
code.putByteArray(null, 0, (4 - code.length % 4) % 4);
dflt.put(this, code, source, true);
code.putInt(min).putInt(max);
for (int i = 0; i < labels.length; ++i) {
labels[i].put(this, code, source, true);
}
// updates currentBlock
visitSwitchInsn(dflt, labels);
}
@Override
public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
final Label[] labels) {
lastCodeOffset = code.length;
// adds the instruction to the bytecode of the method
int source = code.length;
code.putByte(Opcodes.LOOKUPSWITCH);
code.putByteArray(null, 0, (4 - code.length % 4) % 4);
dflt.put(this, code, source, true);
code.putInt(labels.length);
for (int i = 0; i < labels.length; ++i) {
code.putInt(keys[i]);
labels[i].put(this, code, source, true);
}
// updates currentBlock
visitSwitchInsn(dflt, labels);
}
private void visitSwitchInsn(final Label dflt, final Label[] labels) {
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
if (compute == FRAMES) {
currentBlock.frame.execute(Opcodes.LOOKUPSWITCH, 0, null, null);
// adds current block successors
addSuccessor(Edge.NORMAL, dflt);
dflt.getFirst().status |= Label.TARGET;
for (int i = 0; i < labels.length; ++i) {
addSuccessor(Edge.NORMAL, labels[i]);
labels[i].getFirst().status |= Label.TARGET;
}
} else {
// updates current stack size (max stack size unchanged)
--stackSize;
// adds current block successors
addSuccessor(stackSize, dflt);
for (int i = 0; i < labels.length; ++i) {
addSuccessor(stackSize, labels[i]);
}
}
// ends current block
noSuccessor();
}
}
@Override
public void visitMultiANewArrayInsn(final String desc, final int dims) {
lastCodeOffset = code.length;
Item i = cw.newStringishItem(ClassWriter.CLASS, desc);
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
if (compute == FRAMES || compute == INSERTED_FRAMES) {
currentBlock.frame.execute(Opcodes.MULTIANEWARRAY, dims, cw, i);
} else {
// updates current stack size (max stack size unchanged because
// stack size variation always negative or null)
stackSize += 1 - dims;
}
}
// adds the instruction to the bytecode of the method
code.put12(Opcodes.MULTIANEWARRAY, i.index).putByte(dims);
}
@Override
public AnnotationVisitor visitInsnAnnotation(int typeRef,
TypePath typePath, String desc, boolean visible) {
ByteVector bv = new ByteVector();
// write target_type and target_info
typeRef = (typeRef & 0xFF0000FF) | (lastCodeOffset << 8);
AnnotationWriter.putTarget(typeRef, typePath, bv);
// write type, and reserve space for values count
bv.putShort(cw.newUTF8(desc)).putShort(0);
AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv,
bv.length - 2);
if (visible) {
aw.next = ctanns;
ctanns = aw;
} else {
aw.next = ictanns;
ictanns = aw;
}
return aw;
}
@Override
public void visitTryCatchBlock(final Label start, final Label end,
final Label handler, final String type) {
++handlerCount;
Handler h = new Handler();
h.start = start;
h.end = end;
h.handler = handler;
h.desc = type;
h.type = type != null ? cw.newClass(type) : 0;
if (lastHandler == null) {
firstHandler = h;
} else {
lastHandler.next = h;
}
lastHandler = h;
}
@Override
public AnnotationVisitor visitTryCatchAnnotation(int typeRef,
TypePath typePath, String desc, boolean visible) {
ByteVector bv = new ByteVector();
// write target_type and target_info
AnnotationWriter.putTarget(typeRef, typePath, bv);
// write type, and reserve space for values count
bv.putShort(cw.newUTF8(desc)).putShort(0);
AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv,
bv.length - 2);
if (visible) {
aw.next = ctanns;
ctanns = aw;
} else {
aw.next = ictanns;
ictanns = aw;
}
return aw;
}
@Override
public void visitLocalVariable(final String name, final String desc,
final String signature, final Label start, final Label end,
final int index) {
if (signature != null) {
if (localVarType == null) {
localVarType = new ByteVector();
}
++localVarTypeCount;
localVarType.putShort(start.position)
.putShort(end.position - start.position)
.putShort(cw.newUTF8(name)).putShort(cw.newUTF8(signature))
.putShort(index);
}
if (localVar == null) {
localVar = new ByteVector();
}
++localVarCount;
localVar.putShort(start.position)
.putShort(end.position - start.position)
.putShort(cw.newUTF8(name)).putShort(cw.newUTF8(desc))
.putShort(index);
if (compute != NOTHING) {
// updates max locals
char c = desc.charAt(0);
int n = index + (c == 'J' || c == 'D' ? 2 : 1);
if (n > maxLocals) {
maxLocals = n;
}
}
}
@Override
public AnnotationVisitor visitLocalVariableAnnotation(int typeRef,
TypePath typePath, Label[] start, Label[] end, int[] index,
String desc, boolean visible) {
ByteVector bv = new ByteVector();
// write target_type and target_info
bv.putByte(typeRef >>> 24).putShort(start.length);
for (int i = 0; i < start.length; ++i) {
bv.putShort(start[i].position)
.putShort(end[i].position - start[i].position)
.putShort(index[i]);
}
if (typePath == null) {
bv.putByte(0);
} else {
int length = typePath.b[typePath.offset] * 2 + 1;
bv.putByteArray(typePath.b, typePath.offset, length);
}
// write type, and reserve space for values count
bv.putShort(cw.newUTF8(desc)).putShort(0);
AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv,
bv.length - 2);
if (visible) {
aw.next = ctanns;
ctanns = aw;
} else {
aw.next = ictanns;
ictanns = aw;
}
return aw;
}
@Override
public void visitLineNumber(final int line, final Label start) {
if (lineNumber == null) {
lineNumber = new ByteVector();
}
++lineNumberCount;
lineNumber.putShort(start.position);
lineNumber.putShort(line);
}
@Override
public void visitMaxs(final int maxStack, final int maxLocals) {
if (compute == FRAMES) {
// completes the control flow graph with exception handler blocks
Handler handler = firstHandler;
while (handler != null) {
Label l = handler.start.getFirst();
Label h = handler.handler.getFirst();
Label e = handler.end.getFirst();
// computes the kind of the edges to 'h'
String t = handler.desc == null ? "java/lang/Throwable"
: handler.desc;
int kind = Frame.OBJECT | cw.addType(t);
// h is an exception handler
h.status |= Label.TARGET;
// adds 'h' as a successor of labels between 'start' and 'end'
while (l != e) {
// creates an edge to 'h'
Edge b = new Edge();
b.info = kind;
b.successor = h;
// adds it to the successors of 'l'
b.next = l.successors;
l.successors = b;
// goes to the next label
l = l.successor;
}
handler = handler.next;
}
// creates and visits the first (implicit) frame
Frame f = labels.frame;
f.initInputFrame(cw, access, Type.getArgumentTypes(descriptor),
this.maxLocals);
visitFrame(f);
/*
* fix point algorithm: mark the first basic block as 'changed'
* (i.e. put it in the 'changed' list) and, while there are changed
* basic blocks, choose one, mark it as unchanged, and update its
* successors (which can be changed in the process).
*/
int max = 0;
Label changed = labels;
while (changed != null) {
// removes a basic block from the list of changed basic blocks
Label l = changed;
changed = changed.next;
l.next = null;
f = l.frame;
// a reachable jump target must be stored in the stack map
if ((l.status & Label.TARGET) != 0) {
l.status |= Label.STORE;
}
// all visited labels are reachable, by definition
l.status |= Label.REACHABLE;
// updates the (absolute) maximum stack size
int blockMax = f.inputStack.length + l.outputStackMax;
if (blockMax > max) {
max = blockMax;
}
// updates the successors of the current basic block
Edge e = l.successors;
while (e != null) {
Label n = e.successor.getFirst();
boolean change = f.merge(cw, n.frame, e.info);
if (change && n.next == null) {
// if n has changed and is not already in the 'changed'
// list, adds it to this list
n.next = changed;
changed = n;
}
e = e.next;
}
}
// visits all the frames that must be stored in the stack map
Label l = labels;
while (l != null) {
f = l.frame;
if ((l.status & Label.STORE) != 0) {
visitFrame(f);
}
if ((l.status & Label.REACHABLE) == 0) {
// finds start and end of dead basic block
Label k = l.successor;
int start = l.position;
int end = (k == null ? code.length : k.position) - 1;
// if non empty basic block
if (end >= start) {
max = Math.max(max, 1);
// replaces instructions with NOP ... NOP ATHROW
for (int i = start; i < end; ++i) {
code.data[i] = Opcodes.NOP;
}
code.data[end] = (byte) Opcodes.ATHROW;
// emits a frame for this unreachable block
int frameIndex = startFrame(start, 0, 1);
frame[frameIndex] = Frame.OBJECT
| cw.addType("java/lang/Throwable");
endFrame();
// removes the start-end range from the exception
// handlers
firstHandler = Handler.remove(firstHandler, l, k);
}
}
l = l.successor;
}
handler = firstHandler;
handlerCount = 0;
while (handler != null) {
handlerCount += 1;
handler = handler.next;
}
this.maxStack = max;
} else if (compute == MAXS) {
// completes the control flow graph with exception handler blocks
Handler handler = firstHandler;
while (handler != null) {
Label l = handler.start;
Label h = handler.handler;
Label e = handler.end;
// adds 'h' as a successor of labels between 'start' and 'end'
while (l != e) {
// creates an edge to 'h'
Edge b = new Edge();
b.info = Edge.EXCEPTION;
b.successor = h;
// adds it to the successors of 'l'
if ((l.status & Label.JSR) == 0) {
b.next = l.successors;
l.successors = b;
} else {
// if l is a JSR block, adds b after the first two edges
// to preserve the hypothesis about JSR block successors
// order (see {@link #visitJumpInsn})
b.next = l.successors.next.next;
l.successors.next.next = b;
}
// goes to the next label
l = l.successor;
}
handler = handler.next;
}
if (subroutines > 0) {
// completes the control flow graph with the RET successors
/*
* first step: finds the subroutines. This step determines, for
* each basic block, to which subroutine(s) it belongs.
*/
// finds the basic blocks that belong to the "main" subroutine
int id = 0;
labels.visitSubroutine(null, 1, subroutines);
// finds the basic blocks that belong to the real subroutines
Label l = labels;
while (l != null) {
if ((l.status & Label.JSR) != 0) {
// the subroutine is defined by l's TARGET, not by l
Label subroutine = l.successors.next.successor;
// if this subroutine has not been visited yet...
if ((subroutine.status & Label.VISITED) == 0) {
// ...assigns it a new id and finds its basic blocks
id += 1;
subroutine.visitSubroutine(null, (id / 32L) << 32
| (1L << (id % 32)), subroutines);
}
}
l = l.successor;
}
// second step: finds the successors of RET blocks
l = labels;
while (l != null) {
if ((l.status & Label.JSR) != 0) {
Label L = labels;
while (L != null) {
L.status &= ~Label.VISITED2;
L = L.successor;
}
// the subroutine is defined by l's TARGET, not by l
Label subroutine = l.successors.next.successor;
subroutine.visitSubroutine(l, 0, subroutines);
}
l = l.successor;
}
}
/*
* control flow analysis algorithm: while the block stack is not
* empty, pop a block from this stack, update the max stack size,
* compute the true (non relative) begin stack size of the
* successors of this block, and push these successors onto the
* stack (unless they have already been pushed onto the stack).
* Note: by hypothesis, the {@link Label#inputStackTop} of the
* blocks in the block stack are the true (non relative) beginning
* stack sizes of these blocks.
*/
int max = 0;
Label stack = labels;
while (stack != null) {
// pops a block from the stack
Label l = stack;
stack = stack.next;
// computes the true (non relative) max stack size of this block
int start = l.inputStackTop;
int blockMax = start + l.outputStackMax;
// updates the global max stack size
if (blockMax > max) {
max = blockMax;
}
// analyzes the successors of the block
Edge b = l.successors;
if ((l.status & Label.JSR) != 0) {
// ignores the first edge of JSR blocks (virtual successor)
b = b.next;
}
while (b != null) {
l = b.successor;
// if this successor has not already been pushed...
if ((l.status & Label.PUSHED) == 0) {
// computes its true beginning stack size...
l.inputStackTop = b.info == Edge.EXCEPTION ? 1 : start
+ b.info;
// ...and pushes it onto the stack
l.status |= Label.PUSHED;
l.next = stack;
stack = l;
}
b = b.next;
}
}
this.maxStack = Math.max(maxStack, max);
} else {
this.maxStack = maxStack;
this.maxLocals = maxLocals;
}
}
@Override
public void visitEnd() {
}
// ------------------------------------------------------------------------
// Utility methods: control flow analysis algorithm
// ------------------------------------------------------------------------
/**
* Adds a successor to the {@link #currentBlock currentBlock} block.
*
* @param info
* information about the control flow edge to be added.
* @param successor
* the successor block to be added to the current block.
*/
private void addSuccessor(final int info, final Label successor) {
// creates and initializes an Edge object...
Edge b = new Edge();
b.info = info;
b.successor = successor;
// ...and adds it to the successor list of the currentBlock block
b.next = currentBlock.successors;
currentBlock.successors = b;
}
/**
* Ends the current basic block. This method must be used in the case where
* the current basic block does not have any successor.
*/
private void noSuccessor() {
if (compute == FRAMES) {
Label l = new Label();
l.frame = new Frame();
l.frame.owner = l;
l.resolve(this, code.length, code.data);
previousBlock.successor = l;
previousBlock = l;
} else {
currentBlock.outputStackMax = maxStackSize;
}
if (compute != INSERTED_FRAMES) {
currentBlock = null;
}
}
// ------------------------------------------------------------------------
// Utility methods: stack map frames
// ------------------------------------------------------------------------
/**
* Visits a frame that has been computed from scratch.
*
* @param f
* the frame that must be visited.
*/
private void visitFrame(final Frame f) {
int i, t;
int nTop = 0;
int nLocal = 0;
int nStack = 0;
int[] locals = f.inputLocals;
int[] stacks = f.inputStack;
// computes the number of locals (ignores TOP types that are just after
// a LONG or a DOUBLE, and all trailing TOP types)
for (i = 0; i < locals.length; ++i) {
t = locals[i];
if (t == Frame.TOP) {
++nTop;
} else {
nLocal += nTop + 1;
nTop = 0;
}
if (t == Frame.LONG || t == Frame.DOUBLE) {
++i;
}
}
// computes the stack size (ignores TOP types that are just after
// a LONG or a DOUBLE)
for (i = 0; i < stacks.length; ++i) {
t = stacks[i];
++nStack;
if (t == Frame.LONG || t == Frame.DOUBLE) {
++i;
}
}
// visits the frame and its content
int frameIndex = startFrame(f.owner.position, nLocal, nStack);
for (i = 0; nLocal > 0; ++i, --nLocal) {
t = locals[i];
frame[frameIndex++] = t;
if (t == Frame.LONG || t == Frame.DOUBLE) {
++i;
}
}
for (i = 0; i < stacks.length; ++i) {
t = stacks[i];
frame[frameIndex++] = t;
if (t == Frame.LONG || t == Frame.DOUBLE) {
++i;
}
}
endFrame();
}
/**
* Visit the implicit first frame of this method.
*/
private void visitImplicitFirstFrame() {
// There can be at most descriptor.length() + 1 locals
int frameIndex = startFrame(0, descriptor.length() + 1, 0);
if ((access & Opcodes.ACC_STATIC) == 0) {
if ((access & ACC_CONSTRUCTOR) == 0) {
frame[frameIndex++] = Frame.OBJECT | cw.addType(cw.thisName);
} else {
frame[frameIndex++] = Frame.UNINITIALIZED_THIS;
}
}
int i = 1;
loop: while (true) {
int j = i;
switch (descriptor.charAt(i++)) {
case 'Z':
case 'C':
case 'B':
case 'S':
case 'I':
frame[frameIndex++] = Frame.INTEGER;
break;
case 'F':
frame[frameIndex++] = Frame.FLOAT;
break;
case 'J':
frame[frameIndex++] = Frame.LONG;
break;
case 'D':
frame[frameIndex++] = Frame.DOUBLE;
break;
case '[':
while (descriptor.charAt(i) == '[') {
++i;
}
if (descriptor.charAt(i) == 'L') {
++i;
while (descriptor.charAt(i) != ';') {
++i;
}
}
frame[frameIndex++] = Frame.type(cw, descriptor.substring(j, ++i));
break;
case 'L':
while (descriptor.charAt(i) != ';') {
++i;
}
frame[frameIndex++] = Frame.OBJECT
| cw.addType(descriptor.substring(j + 1, i++));
break;
default:
break loop;
}
}
frame[1] = frameIndex - 3;
endFrame();
}
/**
* Starts the visit of a stack map frame.
*
* @param offset
* the offset of the instruction to which the frame corresponds.
* @param nLocal
* the number of local variables in the frame.
* @param nStack
* the number of stack elements in the frame.
* @return the index of the next element to be written in this frame.
*/
private int startFrame(final int offset, final int nLocal, final int nStack) {
int n = 3 + nLocal + nStack;
if (frame == null || frame.length < n) {
frame = new int[n];
}
frame[0] = offset;
frame[1] = nLocal;
frame[2] = nStack;
return 3;
}
/**
* Checks if the visit of the current frame {@link #frame} is finished, and
* if yes, write it in the StackMapTable attribute.
*/
private void endFrame() {
if (previousFrame != null) { // do not write the first frame
if (stackMap == null) {
stackMap = new ByteVector();
}
writeFrame();
++frameCount;
}
previousFrame = frame;
frame = null;
}
/**
* Compress and writes the current frame {@link #frame} in the StackMapTable
* attribute.
*/
private void writeFrame() {
int clocalsSize = frame[1];
int cstackSize = frame[2];
if ((cw.version & 0xFFFF) < Opcodes.V1_6) {
stackMap.putShort(frame[0]).putShort(clocalsSize);
writeFrameTypes(3, 3 + clocalsSize);
stackMap.putShort(cstackSize);
writeFrameTypes(3 + clocalsSize, 3 + clocalsSize + cstackSize);
return;
}
int localsSize = previousFrame[1];
int type = FULL_FRAME;
int k = 0;
int delta;
if (frameCount == 0) {
delta = frame[0];
} else {
delta = frame[0] - previousFrame[0] - 1;
}
if (cstackSize == 0) {
k = clocalsSize - localsSize;
switch (k) {
case -3:
case -2:
case -1:
type = CHOP_FRAME;
localsSize = clocalsSize;
break;
case 0:
type = delta < 64 ? SAME_FRAME : SAME_FRAME_EXTENDED;
break;
case 1:
case 2:
case 3:
type = APPEND_FRAME;
break;
}
} else if (clocalsSize == localsSize && cstackSize == 1) {
type = delta < 63 ? SAME_LOCALS_1_STACK_ITEM_FRAME
: SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED;
}
if (type != FULL_FRAME) {
// verify if locals are the same
int l = 3;
for (int j = 0; j < localsSize; j++) {
if (frame[l] != previousFrame[l]) {
type = FULL_FRAME;
break;
}
l++;
}
}
switch (type) {
case SAME_FRAME:
stackMap.putByte(delta);
break;
case SAME_LOCALS_1_STACK_ITEM_FRAME:
stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta);
writeFrameTypes(3 + clocalsSize, 4 + clocalsSize);
break;
case SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED:
stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED).putShort(
delta);
writeFrameTypes(3 + clocalsSize, 4 + clocalsSize);
break;
case SAME_FRAME_EXTENDED:
stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta);
break;
case CHOP_FRAME:
stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta);
break;
case APPEND_FRAME:
stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta);
writeFrameTypes(3 + localsSize, 3 + clocalsSize);
break;
// case FULL_FRAME:
default:
stackMap.putByte(FULL_FRAME).putShort(delta).putShort(clocalsSize);
writeFrameTypes(3, 3 + clocalsSize);
stackMap.putShort(cstackSize);
writeFrameTypes(3 + clocalsSize, 3 + clocalsSize + cstackSize);
}
}
/**
* Writes some types of the current frame {@link #frame} into the
* StackMapTableAttribute. This method converts types from the format used
* in {@link Label} to the format used in StackMapTable attributes. In
* particular, it converts type table indexes to constant pool indexes.
*
* @param start
* index of the first type in {@link #frame} to write.
* @param end
* index of last type in {@link #frame} to write (exclusive).
*/
private void writeFrameTypes(final int start, final int end) {
for (int i = start; i < end; ++i) {
int t = frame[i];
int d = t & Frame.DIM;
if (d == 0) {
int v = t & Frame.BASE_VALUE;
switch (t & Frame.BASE_KIND) {
case Frame.OBJECT:
stackMap.putByte(7).putShort(
cw.newClass(cw.typeTable[v].strVal1));
break;
case Frame.UNINITIALIZED:
stackMap.putByte(8).putShort(cw.typeTable[v].intVal);
break;
default:
stackMap.putByte(v);
}
} else {
StringBuilder sb = new StringBuilder();
d >>= 28;
while (d-- > 0) {
sb.append('[');
}
if ((t & Frame.BASE_KIND) == Frame.OBJECT) {
sb.append('L');
sb.append(cw.typeTable[t & Frame.BASE_VALUE].strVal1);
sb.append(';');
} else {
switch (t & 0xF) {
case 1:
sb.append('I');
break;
case 2:
sb.append('F');
break;
case 3:
sb.append('D');
break;
case 9:
sb.append('Z');
break;
case 10:
sb.append('B');
break;
case 11:
sb.append('C');
break;
case 12:
sb.append('S');
break;
default:
sb.append('J');
}
}
stackMap.putByte(7).putShort(cw.newClass(sb.toString()));
}
}
}
private void writeFrameType(final Object type) {
if (type instanceof String) {
stackMap.putByte(7).putShort(cw.newClass((String) type));
} else if (type instanceof Integer) {
stackMap.putByte(((Integer) type).intValue());
} else {
stackMap.putByte(8).putShort(((Label) type).position);
}
}
// ------------------------------------------------------------------------
// Utility methods: dump bytecode array
// ------------------------------------------------------------------------
/**
* Returns the size of the bytecode of this method.
*
* @return the size of the bytecode of this method.
*/
final int getSize() {
if (classReaderOffset != 0) {
return 6 + classReaderLength;
}
int size = 8;
if (code.length > 0) {
if (code.length > 65535) {
throw new RuntimeException("Method code too large!");
}
cw.newUTF8("Code");
size += 18 + code.length + 8 * handlerCount;
if (localVar != null) {
cw.newUTF8("LocalVariableTable");
size += 8 + localVar.length;
}
if (localVarType != null) {
cw.newUTF8("LocalVariableTypeTable");
size += 8 + localVarType.length;
}
if (lineNumber != null) {
cw.newUTF8("LineNumberTable");
size += 8 + lineNumber.length;
}
if (stackMap != null) {
boolean zip = (cw.version & 0xFFFF) >= Opcodes.V1_6;
cw.newUTF8(zip ? "StackMapTable" : "StackMap");
size += 8 + stackMap.length;
}
if (ctanns != null) {
cw.newUTF8("RuntimeVisibleTypeAnnotations");
size += 8 + ctanns.getSize();
}
if (ictanns != null) {
cw.newUTF8("RuntimeInvisibleTypeAnnotations");
size += 8 + ictanns.getSize();
}
if (cattrs != null) {
size += cattrs.getSize(cw, code.data, code.length, maxStack,
maxLocals);
}
}
if (exceptionCount > 0) {
cw.newUTF8("Exceptions");
size += 8 + 2 * exceptionCount;
}
if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
if ((cw.version & 0xFFFF) < Opcodes.V1_5
|| (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
cw.newUTF8("Synthetic");
size += 6;
}
}
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
cw.newUTF8("Deprecated");
size += 6;
}
if (signature != null) {
cw.newUTF8("Signature");
cw.newUTF8(signature);
size += 8;
}
if (methodParameters != null) {
cw.newUTF8("MethodParameters");
size += 7 + methodParameters.length;
}
if (annd != null) {
cw.newUTF8("AnnotationDefault");
size += 6 + annd.length;
}
if (anns != null) {
cw.newUTF8("RuntimeVisibleAnnotations");
size += 8 + anns.getSize();
}
if (ianns != null) {
cw.newUTF8("RuntimeInvisibleAnnotations");
size += 8 + ianns.getSize();
}
if (tanns != null) {
cw.newUTF8("RuntimeVisibleTypeAnnotations");
size += 8 + tanns.getSize();
}
if (itanns != null) {
cw.newUTF8("RuntimeInvisibleTypeAnnotations");
size += 8 + itanns.getSize();
}
if (panns != null) {
cw.newUTF8("RuntimeVisibleParameterAnnotations");
size += 7 + 2 * (panns.length - synthetics);
for (int i = panns.length - 1; i >= synthetics; --i) {
size += panns[i] == null ? 0 : panns[i].getSize();
}
}
if (ipanns != null) {
cw.newUTF8("RuntimeInvisibleParameterAnnotations");
size += 7 + 2 * (ipanns.length - synthetics);
for (int i = ipanns.length - 1; i >= synthetics; --i) {
size += ipanns[i] == null ? 0 : ipanns[i].getSize();
}
}
if (attrs != null) {
size += attrs.getSize(cw, null, 0, -1, -1);
}
return size;
}
/**
* Puts the bytecode of this method in the given byte vector.
*
* @param out
* the byte vector into which the bytecode of this method must be
* copied.
*/
final void put(final ByteVector out) {
final int FACTOR = ClassWriter.TO_ACC_SYNTHETIC;
int mask = ACC_CONSTRUCTOR | Opcodes.ACC_DEPRECATED
| ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
| ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / FACTOR);
out.putShort(access & ~mask).putShort(name).putShort(desc);
if (classReaderOffset != 0) {
out.putByteArray(cw.cr.b, classReaderOffset, classReaderLength);
return;
}
int attributeCount = 0;
if (code.length > 0) {
++attributeCount;
}
if (exceptionCount > 0) {
++attributeCount;
}
if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
if ((cw.version & 0xFFFF) < Opcodes.V1_5
|| (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
++attributeCount;
}
}
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
++attributeCount;
}
if (signature != null) {
++attributeCount;
}
if (methodParameters != null) {
++attributeCount;
}
if (annd != null) {
++attributeCount;
}
if (anns != null) {
++attributeCount;
}
if (ianns != null) {
++attributeCount;
}
if (tanns != null) {
++attributeCount;
}
if (itanns != null) {
++attributeCount;
}
if (panns != null) {
++attributeCount;
}
if (ipanns != null) {
++attributeCount;
}
if (attrs != null) {
attributeCount += attrs.getCount();
}
out.putShort(attributeCount);
if (code.length > 0) {
int size = 12 + code.length + 8 * handlerCount;
if (localVar != null) {
size += 8 + localVar.length;
}
if (localVarType != null) {
size += 8 + localVarType.length;
}
if (lineNumber != null) {
size += 8 + lineNumber.length;
}
if (stackMap != null) {
size += 8 + stackMap.length;
}
if (ctanns != null) {
size += 8 + ctanns.getSize();
}
if (ictanns != null) {
size += 8 + ictanns.getSize();
}
if (cattrs != null) {
size += cattrs.getSize(cw, code.data, code.length, maxStack,
maxLocals);
}
out.putShort(cw.newUTF8("Code")).putInt(size);
out.putShort(maxStack).putShort(maxLocals);
out.putInt(code.length).putByteArray(code.data, 0, code.length);
out.putShort(handlerCount);
if (handlerCount > 0) {
Handler h = firstHandler;
while (h != null) {
out.putShort(h.start.position).putShort(h.end.position)
.putShort(h.handler.position).putShort(h.type);
h = h.next;
}
}
attributeCount = 0;
if (localVar != null) {
++attributeCount;
}
if (localVarType != null) {
++attributeCount;
}
if (lineNumber != null) {
++attributeCount;
}
if (stackMap != null) {
++attributeCount;
}
if (ctanns != null) {
++attributeCount;
}
if (ictanns != null) {
++attributeCount;
}
if (cattrs != null) {
attributeCount += cattrs.getCount();
}
out.putShort(attributeCount);
if (localVar != null) {
out.putShort(cw.newUTF8("LocalVariableTable"));
out.putInt(localVar.length + 2).putShort(localVarCount);
out.putByteArray(localVar.data, 0, localVar.length);
}
if (localVarType != null) {
out.putShort(cw.newUTF8("LocalVariableTypeTable"));
out.putInt(localVarType.length + 2).putShort(localVarTypeCount);
out.putByteArray(localVarType.data, 0, localVarType.length);
}
if (lineNumber != null) {
out.putShort(cw.newUTF8("LineNumberTable"));
out.putInt(lineNumber.length + 2).putShort(lineNumberCount);
out.putByteArray(lineNumber.data, 0, lineNumber.length);
}
if (stackMap != null) {
boolean zip = (cw.version & 0xFFFF) >= Opcodes.V1_6;
out.putShort(cw.newUTF8(zip ? "StackMapTable" : "StackMap"));
out.putInt(stackMap.length + 2).putShort(frameCount);
out.putByteArray(stackMap.data, 0, stackMap.length);
}
if (ctanns != null) {
out.putShort(cw.newUTF8("RuntimeVisibleTypeAnnotations"));
ctanns.put(out);
}
if (ictanns != null) {
out.putShort(cw.newUTF8("RuntimeInvisibleTypeAnnotations"));
ictanns.put(out);
}
if (cattrs != null) {
cattrs.put(cw, code.data, code.length, maxLocals, maxStack, out);
}
}
if (exceptionCount > 0) {
out.putShort(cw.newUTF8("Exceptions")).putInt(
2 * exceptionCount + 2);
out.putShort(exceptionCount);
for (int i = 0; i < exceptionCount; ++i) {
out.putShort(exceptions[i]);
}
}
if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
if ((cw.version & 0xFFFF) < Opcodes.V1_5
|| (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
out.putShort(cw.newUTF8("Synthetic")).putInt(0);
}
}
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
out.putShort(cw.newUTF8("Deprecated")).putInt(0);
}
if (signature != null) {
out.putShort(cw.newUTF8("Signature")).putInt(2)
.putShort(cw.newUTF8(signature));
}
if (methodParameters != null) {
out.putShort(cw.newUTF8("MethodParameters"));
out.putInt(methodParameters.length + 1).putByte(
methodParametersCount);
out.putByteArray(methodParameters.data, 0, methodParameters.length);
}
if (annd != null) {
out.putShort(cw.newUTF8("AnnotationDefault"));
out.putInt(annd.length);
out.putByteArray(annd.data, 0, annd.length);
}
if (anns != null) {
out.putShort(cw.newUTF8("RuntimeVisibleAnnotations"));
anns.put(out);
}
if (ianns != null) {
out.putShort(cw.newUTF8("RuntimeInvisibleAnnotations"));
ianns.put(out);
}
if (tanns != null) {
out.putShort(cw.newUTF8("RuntimeVisibleTypeAnnotations"));
tanns.put(out);
}
if (itanns != null) {
out.putShort(cw.newUTF8("RuntimeInvisibleTypeAnnotations"));
itanns.put(out);
}
if (panns != null) {
out.putShort(cw.newUTF8("RuntimeVisibleParameterAnnotations"));
AnnotationWriter.put(panns, synthetics, out);
}
if (ipanns != null) {
out.putShort(cw.newUTF8("RuntimeInvisibleParameterAnnotations"));
AnnotationWriter.put(ipanns, synthetics, out);
}
if (attrs != null) {
attrs.put(cw, null, 0, -1, -1, out);
}
}
}
| redkale/redkale | src/main/java/org/redkale/asm/MethodWriter.java | Java | apache-2.0 | 88,118 |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def make_name(name: str) -> str:
# Sample function parameter name in delete_specialist_pool_sample
name = name
return name
| googleapis/python-aiplatform | .sample_configs/param_handlers/delete_specialist_pool_sample.py | Python | apache-2.0 | 714 |
import subprocess
import pytest
from utils import *
@all_available_simulators()
def test_filter(tmp_path, simulator):
unit_test = tmp_path.joinpath('some_unit_test.sv')
unit_test.write_text('''
module some_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_failing_test)
`FAIL_IF(1)
`SVTEST_END
`SVTEST(some_passing_test)
`FAIL_IF(0)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
log = tmp_path.joinpath('run.log')
print('Filtering only the passing test should block the fail')
subprocess.check_call(['runSVUnit', '-s', simulator, '--filter', 'some_ut.some_passing_test'], cwd=tmp_path)
assert 'FAILED' not in log.read_text()
print('No explicit filter should cause both tests to run, hence trigger the fail')
subprocess.check_call(['runSVUnit', '-s', simulator], cwd=tmp_path)
assert 'FAILED' in log.read_text()
@all_available_simulators()
def test_filter_wildcards(tmp_path, simulator):
failing_unit_test = tmp_path.joinpath('some_failing_unit_test.sv')
failing_unit_test.write_text('''
module some_failing_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_failing_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_test)
`FAIL_IF(1)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
passing_unit_test = tmp_path.joinpath('some_passing_unit_test.sv')
passing_unit_test.write_text('''
module some_passing_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_passing_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_test)
`FAIL_IF(0)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
log = tmp_path.joinpath('run.log')
print('Filtering only the passing testcase should block the fail')
subprocess.check_call(['runSVUnit', '-s', simulator, '--filter', 'some_passing_ut.*'], cwd=tmp_path)
assert 'FAILED' not in log.read_text()
assert 'some_test' in log.read_text()
print('Filtering only for the test should cause both tests to run, hence trigger the fail')
subprocess.check_call(['runSVUnit', '-s', simulator, '--filter', "*.some_test"], cwd=tmp_path)
assert 'FAILED' in log.read_text()
@all_available_simulators()
def test_filter_without_dot(tmp_path, simulator):
dummy_unit_test = tmp_path.joinpath('dummy_unit_test.sv')
dummy_unit_test.write_text('''
module dummy_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_passing_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVUNIT_TESTS_END
endmodule
''')
subprocess.check_call(['runSVUnit', '-s', simulator, '--filter', 'some_string'], cwd=tmp_path)
log = tmp_path.joinpath('run.log')
assert 'fatal' in log.read_text().lower()
@all_available_simulators()
def test_filter_with_extra_dot(tmp_path, simulator):
dummy_unit_test = tmp_path.joinpath('dummy_unit_test.sv')
dummy_unit_test.write_text('''
module dummy_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_passing_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVUNIT_TESTS_END
endmodule
''')
subprocess.check_call(['runSVUnit', '-s', simulator, '--filter', 'a.b.c'], cwd=tmp_path)
log = tmp_path.joinpath('run.log')
assert 'fatal' in log.read_text().lower()
@all_available_simulators()
def test_filter_with_partial_widlcard(tmp_path, simulator):
dummy_unit_test = tmp_path.joinpath('dummy_unit_test.sv')
dummy_unit_test.write_text('''
module dummy_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_passing_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVUNIT_TESTS_END
endmodule
''')
subprocess.check_call(['runSVUnit', '-s', simulator, '--filter', 'foo*.bar'], cwd=tmp_path)
log = tmp_path.joinpath('run.log')
assert 'fatal' in log.read_text().lower()
subprocess.check_call(['runSVUnit', '-s', simulator, '--filter', 'foo.bar*'], cwd=tmp_path)
log = tmp_path.joinpath('run.log')
assert 'fatal' in log.read_text().lower()
subprocess.check_call(['runSVUnit', '-s', simulator, '--filter', '*foo.bar'], cwd=tmp_path)
log = tmp_path.joinpath('run.log')
assert 'fatal' in log.read_text().lower()
@all_available_simulators()
def test_multiple_filter_expressions(tmp_path, simulator):
unit_test = tmp_path.joinpath('some_unit_test.sv')
unit_test.write_text('''
module some_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_failing_test)
`FAIL_IF(1)
`SVTEST_END
`SVTEST(some_passing_test)
`FAIL_IF(0)
`SVTEST_END
`SVTEST(some_other_passing_test)
`FAIL_IF(0)
`SVTEST_END
`SVTEST(yet_another_passing_test)
`FAIL_IF(0)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
log = tmp_path.joinpath('run.log')
print('Filtering only the passing testcases should block the fail')
subprocess.check_call(
[
'runSVUnit',
'-s', simulator,
'--filter', '*.some_passing_test:*.some_other_passing_test:*.yet_another_passing_test',
],
cwd=tmp_path)
assert 'FAILED' not in log.read_text()
assert 'some_passing_test' in log.read_text()
assert 'some_other_passing_test' in log.read_text()
assert 'yet_another_passing_test' in log.read_text()
@all_available_simulators()
def test_negative_filter(tmp_path, simulator):
unit_test = tmp_path.joinpath('some_unit_test.sv')
unit_test.write_text('''
module some_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_failing_test)
`FAIL_IF(1)
`SVTEST_END
`SVTEST(some_other_failing_test)
`FAIL_IF(1)
`SVTEST_END
`SVTEST(some_passing_test)
`FAIL_IF(0)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
log = tmp_path.joinpath('run.log')
print('Filtering out the failing tests should block the fail')
subprocess.check_call(
['runSVUnit',
'-s', simulator,
'--filter', '-some_ut.some_failing_test:some_ut.some_other_failing_test',
],
cwd=tmp_path)
assert 'FAILED' not in log.read_text()
assert 'some_passing_test' in log.read_text()
@all_available_simulators()
def test_positive_and_negative_filter(tmp_path, simulator):
unit_test = tmp_path.joinpath('some_unit_test.sv')
unit_test.write_text('''
module some_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_failing_test)
`FAIL_IF(1)
`SVTEST_END
`SVTEST(some_passing_test)
`FAIL_IF(0)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
other_unit_test = tmp_path.joinpath('some_other_unit_test.sv')
other_unit_test.write_text('''
module some_other_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_other_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_other_failing_test)
`FAIL_IF(1)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
log = tmp_path.joinpath('run.log')
print('Filtering only tests from the first unit test'
+ ' and then filtering out the failing test should block the fail')
subprocess.check_call(
['runSVUnit',
'-s', simulator,
'--filter', 'some_ut.*-some_ut.some_failing_test',
],
cwd=tmp_path)
assert 'FAILED' not in log.read_text()
assert 'some_passing_test' in log.read_text()
| svunit/svunit | test/test_run_script.py | Python | apache-2.0 | 9,783 |
# import asyncio
#
# async def compute(x, y):
# print("Compute %s + %s ..." % (x, y))
# await asyncio.sleep(1.0)
# return x + y
#
# async def print_sum(x, y):
# for i in range(10):
# result = await compute(x, y)
# print("%s + %s = %s" % (x, y, result))
#
# loop = asyncio.get_event_loop()
# loop.run_until_complete(print_sum(1,2))
# asyncio.ensure_future(print_sum(1, 2))
# asyncio.ensure_future(print_sum(3, 4))
# asyncio.ensure_future(print_sum(5, 6))
# loop.run_forever()
import asyncio
async def display_date(who, num):
i = 0
while True:
if i > num:
return
print('{}: Before loop {}'.format(who, i))
await asyncio.sleep(1)
i += 1
loop = asyncio.get_event_loop()
asyncio.ensure_future(display_date('AAA', 4))
asyncio.ensure_future(display_date('BBB', 6))
loop.run_forever()
| fs714/concurrency-example | asynchronous/py36/asyncio/async_test.py | Python | apache-2.0 | 868 |
/*
* Copyright 2002-2019 Drew Noakes and contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* More information about this project is available at:
*
* https://drewnoakes.com/code/exif/
* https://github.com/drewnoakes/metadata-extractor
*/
package com.drew.metadata.exif.makernotes;
import com.drew.lang.annotations.NotNull;
import com.drew.metadata.Directory;
import java.util.HashMap;
/**
* Describes tags specific to certain Leica cameras.
* <p>
* Tag reference from: http://gvsoft.homedns.org/exif/makernote-leica-type1.html
*
* @author Drew Noakes https://drewnoakes.com
*/
@SuppressWarnings("WeakerAccess")
public class LeicaMakernoteDirectory extends Directory
{
public static final int TAG_QUALITY = 0x0300;
public static final int TAG_USER_PROFILE = 0x0302;
public static final int TAG_SERIAL_NUMBER = 0x0303;
public static final int TAG_WHITE_BALANCE = 0x0304;
public static final int TAG_LENS_TYPE = 0x0310;
public static final int TAG_EXTERNAL_SENSOR_BRIGHTNESS_VALUE = 0x0311;
public static final int TAG_MEASURED_LV = 0x0312;
public static final int TAG_APPROXIMATE_F_NUMBER = 0x0313;
public static final int TAG_CAMERA_TEMPERATURE = 0x0320;
public static final int TAG_COLOR_TEMPERATURE = 0x0321;
public static final int TAG_WB_RED_LEVEL = 0x0322;
public static final int TAG_WB_GREEN_LEVEL = 0x0323;
public static final int TAG_WB_BLUE_LEVEL = 0x0324;
public static final int TAG_CCD_VERSION = 0x0330;
public static final int TAG_CCD_BOARD_VERSION = 0x0331;
public static final int TAG_CONTROLLER_BOARD_VERSION = 0x0332;
public static final int TAG_M16_C_VERSION = 0x0333;
public static final int TAG_IMAGE_ID_NUMBER = 0x0340;
@NotNull
private static final HashMap<Integer, String> _tagNameMap = new HashMap<Integer, String>();
static
{
_tagNameMap.put(TAG_QUALITY, "Quality");
_tagNameMap.put(TAG_USER_PROFILE, "User Profile");
_tagNameMap.put(TAG_SERIAL_NUMBER, "Serial Number");
_tagNameMap.put(TAG_WHITE_BALANCE, "White Balance");
_tagNameMap.put(TAG_LENS_TYPE, "Lens Type");
_tagNameMap.put(TAG_EXTERNAL_SENSOR_BRIGHTNESS_VALUE, "External Sensor Brightness Value");
_tagNameMap.put(TAG_MEASURED_LV, "Measured LV");
_tagNameMap.put(TAG_APPROXIMATE_F_NUMBER, "Approximate F Number");
_tagNameMap.put(TAG_CAMERA_TEMPERATURE, "Camera Temperature");
_tagNameMap.put(TAG_COLOR_TEMPERATURE, "Color Temperature");
_tagNameMap.put(TAG_WB_RED_LEVEL, "WB Red Level");
_tagNameMap.put(TAG_WB_GREEN_LEVEL, "WB Green Level");
_tagNameMap.put(TAG_WB_BLUE_LEVEL, "WB Blue Level");
_tagNameMap.put(TAG_CCD_VERSION, "CCD Version");
_tagNameMap.put(TAG_CCD_BOARD_VERSION, "CCD Board Version");
_tagNameMap.put(TAG_CONTROLLER_BOARD_VERSION, "Controller Board Version");
_tagNameMap.put(TAG_M16_C_VERSION, "M16 C Version");
_tagNameMap.put(TAG_IMAGE_ID_NUMBER, "Image ID Number");
}
public LeicaMakernoteDirectory()
{
this.setDescriptor(new LeicaMakernoteDescriptor(this));
}
@Override
@NotNull
public String getName()
{
return "Leica Makernote";
}
@Override
@NotNull
protected HashMap<Integer, String> getTagNameMap()
{
return _tagNameMap;
}
}
| drewnoakes/metadata-extractor | Source/com/drew/metadata/exif/makernotes/LeicaMakernoteDirectory.java | Java | apache-2.0 | 3,940 |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/**
*
*/
package com.amazonaws.services.cloudfront; | flofreud/aws-sdk-java | aws-java-sdk-cloudfront/src/main/java/com/amazonaws/services/cloudfront/package-info.java | Java | apache-2.0 | 643 |
/*Copyright (C) 2017 Roland Hauser, <sourcepond@gmail.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.*/
package ch.sourcepond.io.fileobserver.impl.dispatch;
import ch.sourcepond.io.fileobserver.api.DispatchKey;
import java.nio.file.Path;
import java.util.Objects;
import static java.lang.String.format;
/**
*
*/
final class DefaultDispatchKey implements DispatchKey {
private final Object directoryKey;
private final Path relativePath;
public DefaultDispatchKey(final Object pDirectoryKey, final Path pRelativePath) {
directoryKey = pDirectoryKey;
relativePath = pRelativePath;
}
@Override
public Object getDirectoryKey() {
return directoryKey;
}
@Override
public Path getRelativePath() {
return relativePath;
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final DefaultDispatchKey other = (DefaultDispatchKey) o;
return Objects.equals(directoryKey, other.directoryKey) &&
Objects.equals(relativePath, other.relativePath);
}
@Override
public int hashCode() {
return Objects.hash(directoryKey, relativePath);
}
@Override
public String toString() {
return format("[%s:%s]", directoryKey, relativePath);
}
}
| SourcePond/fileobserver-api | fileobserver-impl/src/main/java/ch/sourcepond/io/fileobserver/impl/dispatch/DefaultDispatchKey.java | Java | apache-2.0 | 1,911 |
package org.ayo.robot.canvas.shape;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.RectF;
import org.ayo.robot.BaseView;
public class RectView extends BaseView {
public RectView(Context context) {
super(context);
init();
}
private void init(){
}
int centerX, centerY;
int rw, rh;
boolean isInited = false;
@Override
protected void drawShape(Canvas canvas, int w, int h, Paint paint) {
// Rect rect = new Rect(100, 100, 200, 200);
// RectF rectF = new RectF(100, 100, 200, 200);
if(!isInited){
centerX = w/2;
centerY = h/2;
rw = w - 200;
rh = h - 200;
isInited = true;
}
drawRect(canvas, centerX, centerY, rw, rh, paint);
/*
Rect的四个顶点是int
RectF的四个顶点float
RectF和rx(x-radius),ry(y-radius)构成了圆角Rect
rx The x-radius of the oval used to round the corners
ry The y-radius of the oval used to round the corners
Rect和RectF包含的方法:
inset
union
是否包含点或矩形
*/
}
public static void drawRect(Canvas canvas, int centerX, int centerY, int w, int h, Paint p){
int left = centerX - w/2;
int top = centerY - h/2;
int right = left + w;
int bottom = top + h;
canvas.drawRect(new RectF(left, top, right, bottom), p);
}
public void moveCenter(int dx, int dy){
centerX += dx;
centerY += dy;
invalidate();
}
public void changeSize(int dw, int dh){
rw += dw;
rh += dh;
invalidate();
}
@Override
public String getTitle() {
return "canvas.drawRect(rectF, paint)";
}
@Override
public String getMethod() {
return "画矩形";
}
@Override
public String getComment() {
return "画个矩形\n" +
"Rect处理int\n" +
"RectF处理float\n" +
"二者都有inset,union,contains点或矩形的方法";
}
} | cowthan/UI-Robot | app/src/main/java/org/ayo/robot/canvas/shape/RectView.java | Java | apache-2.0 | 2,195 |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.master.file.options;
import alluxio.underfs.UfsStatus;
import com.google.common.base.Objects;
import javax.annotation.concurrent.NotThreadSafe;
/**
* Method options for loading metadata.
*/
@NotThreadSafe
public final class LoadMetadataOptions {
private boolean mCreateAncestors;
private boolean mLoadDirectChildren;
private UfsStatus mUfsStatus;
/**
* @return the default {@link LoadMetadataOptions}
*/
public static LoadMetadataOptions defaults() {
return new LoadMetadataOptions();
}
private LoadMetadataOptions() {
mCreateAncestors = false;
mLoadDirectChildren = false;
mUfsStatus = null;
}
/**
* @return null if unknown, else the status of UFS path for which loading metadata
*/
public UfsStatus getUfsStatus() {
return mUfsStatus;
}
/**
* @return the recursive flag value; it specifies whether parent directories should be created if
* they do not already exist
*/
public boolean isCreateAncestors() {
return mCreateAncestors;
}
/**
* @return the load direct children flag. It specifies whether the direct children should
* be loaded.
*/
public boolean isLoadDirectChildren() {
return mLoadDirectChildren;
}
/**
* Sets the recursive flag.
*
* @param createAncestors the recursive flag value to use; it specifies whether parent directories
* should be created if they do not already exist
* @return the updated options object
*/
public LoadMetadataOptions setCreateAncestors(boolean createAncestors) {
mCreateAncestors = createAncestors;
return this;
}
/**
* Sets the load direct children flag.
*
* @param loadDirectChildren the load direct children flag. It specifies whether the direct
* children should be loaded.
* @return the updated object
*/
public LoadMetadataOptions setLoadDirectChildren(boolean loadDirectChildren) {
mLoadDirectChildren = loadDirectChildren;
return this;
}
/**
* Sets the UFS status of path.
*
* @param status UFS status of path
* @return the updated object
*/
public LoadMetadataOptions setUfsStatus(UfsStatus status) {
mUfsStatus = status;
return this;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof LoadMetadataOptions)) {
return false;
}
LoadMetadataOptions that = (LoadMetadataOptions) o;
return Objects.equal(mCreateAncestors, that.mCreateAncestors)
&& Objects.equal(mLoadDirectChildren, that.mLoadDirectChildren)
&& Objects.equal(mUfsStatus, that.mUfsStatus);
}
@Override
public int hashCode() {
return Objects.hashCode(mCreateAncestors, mLoadDirectChildren, mUfsStatus);
}
@Override
public String toString() {
return Objects.toStringHelper(this).add("createAncestors", mCreateAncestors)
.add("loadDirectChildren", mLoadDirectChildren)
.add("ufsStatus", mUfsStatus).toString();
}
}
| ShailShah/alluxio | core/server/master/src/main/java/alluxio/master/file/options/LoadMetadataOptions.java | Java | apache-2.0 | 3,539 |
//===--- CGCXXRTTI.cpp - Emit LLVM Code for C++ RTTI descriptors ----------===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This contains code dealing with C++ code generation of RTTI descriptors.
//
//===----------------------------------------------------------------------===//
#include "CodeGenModule.h"
#include "CGCXXABI.h"
#include "clang/AST/RecordLayout.h"
#include "clang/AST/Type.h"
#include "clang/Frontend/CodeGenOptions.h"
#include "CGObjCRuntime.h"
using namespace clang;
using namespace CodeGen;
namespace {
class RTTIBuilder {
CodeGenModule &CGM; // Per-module state.
llvm::LLVMContext &VMContext;
/// Fields - The fields of the RTTI descriptor currently being built.
SmallVector<llvm::Constant *, 16> Fields;
/// GetAddrOfTypeName - Returns the mangled type name of the given type.
llvm::GlobalVariable *
GetAddrOfTypeName(QualType Ty, llvm::GlobalVariable::LinkageTypes Linkage);
/// GetAddrOfExternalRTTIDescriptor - Returns the constant for the RTTI
/// descriptor of the given type.
llvm::Constant *GetAddrOfExternalRTTIDescriptor(QualType Ty);
/// BuildVTablePointer - Build the vtable pointer for the given type.
void BuildVTablePointer(const Type *Ty);
/// BuildSIClassTypeInfo - Build an abi::__si_class_type_info, used for single
/// inheritance, according to the Itanium C++ ABI, 2.9.5p6b.
void BuildSIClassTypeInfo(const CXXRecordDecl *RD);
/// BuildVMIClassTypeInfo - Build an abi::__vmi_class_type_info, used for
/// classes with bases that do not satisfy the abi::__si_class_type_info
/// constraints, according ti the Itanium C++ ABI, 2.9.5p5c.
void BuildVMIClassTypeInfo(const CXXRecordDecl *RD);
/// BuildPointerTypeInfo - Build an abi::__pointer_type_info struct, used
/// for pointer types.
void BuildPointerTypeInfo(QualType PointeeTy);
/// BuildObjCObjectTypeInfo - Build the appropriate kind of
/// type_info for an object type.
void BuildObjCObjectTypeInfo(const ObjCObjectType *Ty);
/// BuildPointerToMemberTypeInfo - Build an abi::__pointer_to_member_type_info
/// struct, used for member pointer types.
void BuildPointerToMemberTypeInfo(const MemberPointerType *Ty);
public:
RTTIBuilder(CodeGenModule &CGM) : CGM(CGM),
VMContext(CGM.getModule().getContext()) { }
// Pointer type info flags.
enum {
/// PTI_Const - Type has const qualifier.
PTI_Const = 0x1,
/// PTI_Volatile - Type has volatile qualifier.
PTI_Volatile = 0x2,
/// PTI_Restrict - Type has restrict qualifier.
PTI_Restrict = 0x4,
/// PTI_Incomplete - Type is incomplete.
PTI_Incomplete = 0x8,
/// PTI_ContainingClassIncomplete - Containing class is incomplete.
/// (in pointer to member).
PTI_ContainingClassIncomplete = 0x10
};
// VMI type info flags.
enum {
/// VMI_NonDiamondRepeat - Class has non-diamond repeated inheritance.
VMI_NonDiamondRepeat = 0x1,
/// VMI_DiamondShaped - Class is diamond shaped.
VMI_DiamondShaped = 0x2
};
// Base class type info flags.
enum {
/// BCTI_Virtual - Base class is virtual.
BCTI_Virtual = 0x1,
/// BCTI_Public - Base class is public.
BCTI_Public = 0x2
};
/// BuildTypeInfo - Build the RTTI type info struct for the given type.
///
/// \param Force - true to force the creation of this RTTI value
llvm::Constant *BuildTypeInfo(QualType Ty, bool Force = false);
};
}
llvm::GlobalVariable *
RTTIBuilder::GetAddrOfTypeName(QualType Ty,
llvm::GlobalVariable::LinkageTypes Linkage) {
SmallString<256> OutName;
llvm::raw_svector_ostream Out(OutName);
CGM.getCXXABI().getMangleContext().mangleCXXRTTIName(Ty, Out);
Out.flush();
StringRef Name = OutName.str();
// We know that the mangled name of the type starts at index 4 of the
// mangled name of the typename, so we can just index into it in order to
// get the mangled name of the type.
llvm::Constant *Init = llvm::ConstantDataArray::getString(VMContext,
Name.substr(4));
llvm::GlobalVariable *GV =
CGM.CreateOrReplaceCXXRuntimeVariable(Name, Init->getType(), Linkage);
GV->setInitializer(Init);
return GV;
}
llvm::Constant *RTTIBuilder::GetAddrOfExternalRTTIDescriptor(QualType Ty) {
// Mangle the RTTI name.
SmallString<256> OutName;
llvm::raw_svector_ostream Out(OutName);
CGM.getCXXABI().getMangleContext().mangleCXXRTTI(Ty, Out);
Out.flush();
StringRef Name = OutName.str();
// Look for an existing global.
llvm::GlobalVariable *GV = CGM.getModule().getNamedGlobal(Name);
if (!GV) {
// Create a new global variable.
GV = new llvm::GlobalVariable(CGM.getModule(), CGM.Int8PtrTy,
/*Constant=*/true,
llvm::GlobalValue::ExternalLinkage, 0, Name);
}
return llvm::ConstantExpr::getBitCast(GV, CGM.Int8PtrTy);
}
/// TypeInfoIsInStandardLibrary - Given a builtin type, returns whether the type
/// info for that type is defined in the standard library.
static bool TypeInfoIsInStandardLibrary(const BuiltinType *Ty) {
// Itanium C++ ABI 2.9.2:
// Basic type information (e.g. for "int", "bool", etc.) will be kept in
// the run-time support library. Specifically, the run-time support
// library should contain type_info objects for the types X, X* and
// X const*, for every X in: void, std::nullptr_t, bool, wchar_t, char,
// unsigned char, signed char, short, unsigned short, int, unsigned int,
// long, unsigned long, long long, unsigned long long, float, double,
// long double, char16_t, char32_t, and the IEEE 754r decimal and
// half-precision floating point types.
switch (Ty->getKind()) {
case BuiltinType::Void:
case BuiltinType::NullPtr:
case BuiltinType::Bool:
case BuiltinType::WChar_S:
case BuiltinType::WChar_U:
case BuiltinType::Char_U:
case BuiltinType::Char_S:
case BuiltinType::UChar:
case BuiltinType::SChar:
case BuiltinType::Short:
case BuiltinType::UShort:
case BuiltinType::Int:
case BuiltinType::UInt:
case BuiltinType::Long:
case BuiltinType::ULong:
case BuiltinType::LongLong:
case BuiltinType::ULongLong:
case BuiltinType::Half:
case BuiltinType::Float:
case BuiltinType::Double:
case BuiltinType::LongDouble:
case BuiltinType::Char16:
case BuiltinType::Char32:
case BuiltinType::Int128:
case BuiltinType::UInt128:
return true;
case BuiltinType::Dependent:
#define BUILTIN_TYPE(Id, SingletonId)
#define PLACEHOLDER_TYPE(Id, SingletonId) \
case BuiltinType::Id:
#include "clang/AST/BuiltinTypes.def"
llvm_unreachable("asking for RRTI for a placeholder type!");
case BuiltinType::ObjCId:
case BuiltinType::ObjCClass:
case BuiltinType::ObjCSel:
llvm_unreachable("FIXME: Objective-C types are unsupported!");
}
llvm_unreachable("Invalid BuiltinType Kind!");
}
static bool TypeInfoIsInStandardLibrary(const PointerType *PointerTy) {
QualType PointeeTy = PointerTy->getPointeeType();
const BuiltinType *BuiltinTy = dyn_cast<BuiltinType>(PointeeTy);
if (!BuiltinTy)
return false;
// Check the qualifiers.
Qualifiers Quals = PointeeTy.getQualifiers();
Quals.removeConst();
if (!Quals.empty())
return false;
return TypeInfoIsInStandardLibrary(BuiltinTy);
}
/// IsStandardLibraryRTTIDescriptor - Returns whether the type
/// information for the given type exists in the standard library.
static bool IsStandardLibraryRTTIDescriptor(QualType Ty) {
// Type info for builtin types is defined in the standard library.
if (const BuiltinType *BuiltinTy = dyn_cast<BuiltinType>(Ty))
return TypeInfoIsInStandardLibrary(BuiltinTy);
// Type info for some pointer types to builtin types is defined in the
// standard library.
if (const PointerType *PointerTy = dyn_cast<PointerType>(Ty))
return TypeInfoIsInStandardLibrary(PointerTy);
return false;
}
/// ShouldUseExternalRTTIDescriptor - Returns whether the type information for
/// the given type exists somewhere else, and that we should not emit the type
/// information in this translation unit. Assumes that it is not a
/// standard-library type.
static bool ShouldUseExternalRTTIDescriptor(CodeGenModule &CGM, QualType Ty) {
ASTContext &Context = CGM.getContext();
// If RTTI is disabled, don't consider key functions.
if (!Context.getLangOpts().RTTI) return false;
if (const RecordType *RecordTy = dyn_cast<RecordType>(Ty)) {
const CXXRecordDecl *RD = cast<CXXRecordDecl>(RecordTy->getDecl());
if (!RD->hasDefinition())
return false;
if (!RD->isDynamicClass())
return false;
return !CGM.getVTables().ShouldEmitVTableInThisTU(RD);
}
return false;
}
/// IsIncompleteClassType - Returns whether the given record type is incomplete.
static bool IsIncompleteClassType(const RecordType *RecordTy) {
return !RecordTy->getDecl()->isCompleteDefinition();
}
/// ContainsIncompleteClassType - Returns whether the given type contains an
/// incomplete class type. This is true if
///
/// * The given type is an incomplete class type.
/// * The given type is a pointer type whose pointee type contains an
/// incomplete class type.
/// * The given type is a member pointer type whose class is an incomplete
/// class type.
/// * The given type is a member pointer type whoise pointee type contains an
/// incomplete class type.
/// is an indirect or direct pointer to an incomplete class type.
static bool ContainsIncompleteClassType(QualType Ty) {
if (const RecordType *RecordTy = dyn_cast<RecordType>(Ty)) {
if (IsIncompleteClassType(RecordTy))
return true;
}
if (const PointerType *PointerTy = dyn_cast<PointerType>(Ty))
return ContainsIncompleteClassType(PointerTy->getPointeeType());
if (const MemberPointerType *MemberPointerTy =
dyn_cast<MemberPointerType>(Ty)) {
// Check if the class type is incomplete.
const RecordType *ClassType = cast<RecordType>(MemberPointerTy->getClass());
if (IsIncompleteClassType(ClassType))
return true;
return ContainsIncompleteClassType(MemberPointerTy->getPointeeType());
}
return false;
}
/// getTypeInfoLinkage - Return the linkage that the type info and type info
/// name constants should have for the given type.
static llvm::GlobalVariable::LinkageTypes
getTypeInfoLinkage(CodeGenModule &CGM, QualType Ty) {
// Itanium C++ ABI 2.9.5p7:
// In addition, it and all of the intermediate abi::__pointer_type_info
// structs in the chain down to the abi::__class_type_info for the
// incomplete class type must be prevented from resolving to the
// corresponding type_info structs for the complete class type, possibly
// by making them local static objects. Finally, a dummy class RTTI is
// generated for the incomplete type that will not resolve to the final
// complete class RTTI (because the latter need not exist), possibly by
// making it a local static object.
if (ContainsIncompleteClassType(Ty))
return llvm::GlobalValue::InternalLinkage;
switch (Ty->getLinkage()) {
case NoLinkage:
case InternalLinkage:
case UniqueExternalLinkage:
return llvm::GlobalValue::InternalLinkage;
case ExternalLinkage:
if (!CGM.getLangOpts().RTTI) {
// RTTI is not enabled, which means that this type info struct is going
// to be used for exception handling. Give it linkonce_odr linkage.
return llvm::GlobalValue::LinkOnceODRLinkage;
}
if (const RecordType *Record = dyn_cast<RecordType>(Ty)) {
const CXXRecordDecl *RD = cast<CXXRecordDecl>(Record->getDecl());
if (RD->hasAttr<WeakAttr>())
return llvm::GlobalValue::WeakODRLinkage;
if (RD->isDynamicClass())
return CGM.getVTableLinkage(RD);
}
return llvm::GlobalValue::LinkOnceODRLinkage;
}
llvm_unreachable("Invalid linkage!");
}
// CanUseSingleInheritance - Return whether the given record decl has a "single,
// public, non-virtual base at offset zero (i.e. the derived class is dynamic
// iff the base is)", according to Itanium C++ ABI, 2.95p6b.
static bool CanUseSingleInheritance(const CXXRecordDecl *RD) {
// Check the number of bases.
if (RD->getNumBases() != 1)
return false;
// Get the base.
CXXRecordDecl::base_class_const_iterator Base = RD->bases_begin();
// Check that the base is not virtual.
if (Base->isVirtual())
return false;
// Check that the base is public.
if (Base->getAccessSpecifier() != AS_public)
return false;
// Check that the class is dynamic iff the base is.
const CXXRecordDecl *BaseDecl =
cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl());
if (!BaseDecl->isEmpty() &&
BaseDecl->isDynamicClass() != RD->isDynamicClass())
return false;
return true;
}
void RTTIBuilder::BuildVTablePointer(const Type *Ty) {
// abi::__class_type_info.
static const char * const ClassTypeInfo =
"_ZTVN10__cxxabiv117__class_type_infoE";
// abi::__si_class_type_info.
static const char * const SIClassTypeInfo =
"_ZTVN10__cxxabiv120__si_class_type_infoE";
// abi::__vmi_class_type_info.
static const char * const VMIClassTypeInfo =
"_ZTVN10__cxxabiv121__vmi_class_type_infoE";
const char *VTableName = 0;
switch (Ty->getTypeClass()) {
#define TYPE(Class, Base)
#define ABSTRACT_TYPE(Class, Base)
#define NON_CANONICAL_UNLESS_DEPENDENT_TYPE(Class, Base) case Type::Class:
#define NON_CANONICAL_TYPE(Class, Base) case Type::Class:
#define DEPENDENT_TYPE(Class, Base) case Type::Class:
#include "clang/AST/TypeNodes.def"
llvm_unreachable("Non-canonical and dependent types shouldn't get here");
case Type::LValueReference:
case Type::RValueReference:
llvm_unreachable("References shouldn't get here");
case Type::Builtin:
// GCC treats vector and complex types as fundamental types.
case Type::Vector:
case Type::ExtVector:
case Type::Complex:
case Type::Atomic:
// FIXME: GCC treats block pointers as fundamental types?!
case Type::BlockPointer:
// abi::__fundamental_type_info.
VTableName = "_ZTVN10__cxxabiv123__fundamental_type_infoE";
break;
case Type::ConstantArray:
case Type::IncompleteArray:
case Type::VariableArray:
// abi::__array_type_info.
VTableName = "_ZTVN10__cxxabiv117__array_type_infoE";
break;
case Type::FunctionNoProto:
case Type::FunctionProto:
// abi::__function_type_info.
VTableName = "_ZTVN10__cxxabiv120__function_type_infoE";
break;
case Type::Enum:
// abi::__enum_type_info.
VTableName = "_ZTVN10__cxxabiv116__enum_type_infoE";
break;
case Type::Record: {
const CXXRecordDecl *RD =
cast<CXXRecordDecl>(cast<RecordType>(Ty)->getDecl());
if (!RD->hasDefinition() || !RD->getNumBases()) {
VTableName = ClassTypeInfo;
} else if (CanUseSingleInheritance(RD)) {
VTableName = SIClassTypeInfo;
} else {
VTableName = VMIClassTypeInfo;
}
break;
}
case Type::ObjCObject:
// Ignore protocol qualifiers.
Ty = cast<ObjCObjectType>(Ty)->getBaseType().getTypePtr();
// Handle id and Class.
if (isa<BuiltinType>(Ty)) {
VTableName = ClassTypeInfo;
break;
}
assert(isa<ObjCInterfaceType>(Ty));
// Fall through.
case Type::ObjCInterface:
if (cast<ObjCInterfaceType>(Ty)->getDecl()->getSuperClass()) {
VTableName = SIClassTypeInfo;
} else {
VTableName = ClassTypeInfo;
}
break;
case Type::ObjCObjectPointer:
case Type::Pointer:
// abi::__pointer_type_info.
VTableName = "_ZTVN10__cxxabiv119__pointer_type_infoE";
break;
case Type::MemberPointer:
// abi::__pointer_to_member_type_info.
VTableName = "_ZTVN10__cxxabiv129__pointer_to_member_type_infoE";
break;
}
llvm::Constant *VTable =
CGM.getModule().getOrInsertGlobal(VTableName, CGM.Int8PtrTy);
llvm::Type *PtrDiffTy =
CGM.getTypes().ConvertType(CGM.getContext().getPointerDiffType());
// The vtable address point is 2.
llvm::Constant *Two = llvm::ConstantInt::get(PtrDiffTy, 2);
VTable = llvm::ConstantExpr::getInBoundsGetElementPtr(VTable, Two);
VTable = llvm::ConstantExpr::getBitCast(VTable, CGM.Int8PtrTy);
Fields.push_back(VTable);
}
// maybeUpdateRTTILinkage - Will update the linkage of the RTTI data structures
// from available_externally to the correct linkage if necessary. An example of
// this is:
//
// struct A {
// virtual void f();
// };
//
// const std::type_info &g() {
// return typeid(A);
// }
//
// void A::f() { }
//
// When we're generating the typeid(A) expression, we do not yet know that
// A's key function is defined in this translation unit, so we will give the
// typeinfo and typename structures available_externally linkage. When A::f
// forces the vtable to be generated, we need to change the linkage of the
// typeinfo and typename structs, otherwise we'll end up with undefined
// externals when linking.
static void
maybeUpdateRTTILinkage(CodeGenModule &CGM, llvm::GlobalVariable *GV,
QualType Ty) {
// We're only interested in globals with available_externally linkage.
if (!GV->hasAvailableExternallyLinkage())
return;
// Get the real linkage for the type.
llvm::GlobalVariable::LinkageTypes Linkage = getTypeInfoLinkage(CGM, Ty);
// If variable is supposed to have available_externally linkage, we don't
// need to do anything.
if (Linkage == llvm::GlobalVariable::AvailableExternallyLinkage)
return;
// Update the typeinfo linkage.
GV->setLinkage(Linkage);
// Get the typename global.
SmallString<256> OutName;
llvm::raw_svector_ostream Out(OutName);
CGM.getCXXABI().getMangleContext().mangleCXXRTTIName(Ty, Out);
Out.flush();
StringRef Name = OutName.str();
llvm::GlobalVariable *TypeNameGV = CGM.getModule().getNamedGlobal(Name);
assert(TypeNameGV->hasAvailableExternallyLinkage() &&
"Type name has different linkage from type info!");
// And update its linkage.
TypeNameGV->setLinkage(Linkage);
}
llvm::Constant *RTTIBuilder::BuildTypeInfo(QualType Ty, bool Force) {
// We want to operate on the canonical type.
Ty = CGM.getContext().getCanonicalType(Ty);
// Check if we've already emitted an RTTI descriptor for this type.
SmallString<256> OutName;
llvm::raw_svector_ostream Out(OutName);
CGM.getCXXABI().getMangleContext().mangleCXXRTTI(Ty, Out);
Out.flush();
StringRef Name = OutName.str();
llvm::GlobalVariable *OldGV = CGM.getModule().getNamedGlobal(Name);
if (OldGV && !OldGV->isDeclaration()) {
maybeUpdateRTTILinkage(CGM, OldGV, Ty);
return llvm::ConstantExpr::getBitCast(OldGV, CGM.Int8PtrTy);
}
// Check if there is already an external RTTI descriptor for this type.
bool IsStdLib = IsStandardLibraryRTTIDescriptor(Ty);
if (!Force && (IsStdLib || ShouldUseExternalRTTIDescriptor(CGM, Ty)))
return GetAddrOfExternalRTTIDescriptor(Ty);
// Emit the standard library with external linkage.
llvm::GlobalVariable::LinkageTypes Linkage;
if (IsStdLib)
Linkage = llvm::GlobalValue::ExternalLinkage;
else
Linkage = getTypeInfoLinkage(CGM, Ty);
// Add the vtable pointer.
BuildVTablePointer(cast<Type>(Ty));
// And the name.
llvm::GlobalVariable *TypeName = GetAddrOfTypeName(Ty, Linkage);
Fields.push_back(llvm::ConstantExpr::getBitCast(TypeName, CGM.Int8PtrTy));
switch (Ty->getTypeClass()) {
#define TYPE(Class, Base)
#define ABSTRACT_TYPE(Class, Base)
#define NON_CANONICAL_UNLESS_DEPENDENT_TYPE(Class, Base) case Type::Class:
#define NON_CANONICAL_TYPE(Class, Base) case Type::Class:
#define DEPENDENT_TYPE(Class, Base) case Type::Class:
#include "clang/AST/TypeNodes.def"
llvm_unreachable("Non-canonical and dependent types shouldn't get here");
// GCC treats vector types as fundamental types.
case Type::Builtin:
case Type::Vector:
case Type::ExtVector:
case Type::Complex:
case Type::BlockPointer:
// Itanium C++ ABI 2.9.5p4:
// abi::__fundamental_type_info adds no data members to std::type_info.
break;
case Type::LValueReference:
case Type::RValueReference:
llvm_unreachable("References shouldn't get here");
case Type::ConstantArray:
case Type::IncompleteArray:
case Type::VariableArray:
// Itanium C++ ABI 2.9.5p5:
// abi::__array_type_info adds no data members to std::type_info.
break;
case Type::FunctionNoProto:
case Type::FunctionProto:
// Itanium C++ ABI 2.9.5p5:
// abi::__function_type_info adds no data members to std::type_info.
break;
case Type::Enum:
// Itanium C++ ABI 2.9.5p5:
// abi::__enum_type_info adds no data members to std::type_info.
break;
case Type::Record: {
const CXXRecordDecl *RD =
cast<CXXRecordDecl>(cast<RecordType>(Ty)->getDecl());
if (!RD->hasDefinition() || !RD->getNumBases()) {
// We don't need to emit any fields.
break;
}
if (CanUseSingleInheritance(RD))
BuildSIClassTypeInfo(RD);
else
BuildVMIClassTypeInfo(RD);
break;
}
case Type::ObjCObject:
case Type::ObjCInterface:
BuildObjCObjectTypeInfo(cast<ObjCObjectType>(Ty));
break;
case Type::ObjCObjectPointer:
BuildPointerTypeInfo(cast<ObjCObjectPointerType>(Ty)->getPointeeType());
break;
case Type::Pointer:
BuildPointerTypeInfo(cast<PointerType>(Ty)->getPointeeType());
break;
case Type::MemberPointer:
BuildPointerToMemberTypeInfo(cast<MemberPointerType>(Ty));
break;
case Type::Atomic:
// No fields, at least for the moment.
break;
}
llvm::Constant *Init = llvm::ConstantStruct::getAnon(Fields);
llvm::GlobalVariable *GV =
new llvm::GlobalVariable(CGM.getModule(), Init->getType(),
/*Constant=*/true, Linkage, Init, Name);
// If there's already an old global variable, replace it with the new one.
if (OldGV) {
GV->takeName(OldGV);
llvm::Constant *NewPtr =
llvm::ConstantExpr::getBitCast(GV, OldGV->getType());
OldGV->replaceAllUsesWith(NewPtr);
OldGV->eraseFromParent();
}
// GCC only relies on the uniqueness of the type names, not the
// type_infos themselves, so we can emit these as hidden symbols.
// But don't do this if we're worried about strict visibility
// compatibility.
if (const RecordType *RT = dyn_cast<RecordType>(Ty)) {
const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
CGM.setTypeVisibility(GV, RD, CodeGenModule::TVK_ForRTTI);
CGM.setTypeVisibility(TypeName, RD, CodeGenModule::TVK_ForRTTIName);
} else {
Visibility TypeInfoVisibility = DefaultVisibility;
if (CGM.getCodeGenOpts().HiddenWeakVTables &&
Linkage == llvm::GlobalValue::LinkOnceODRLinkage)
TypeInfoVisibility = HiddenVisibility;
// The type name should have the same visibility as the type itself.
Visibility ExplicitVisibility = Ty->getVisibility();
TypeName->setVisibility(CodeGenModule::
GetLLVMVisibility(ExplicitVisibility));
TypeInfoVisibility = minVisibility(TypeInfoVisibility, Ty->getVisibility());
GV->setVisibility(CodeGenModule::GetLLVMVisibility(TypeInfoVisibility));
}
GV->setUnnamedAddr(true);
return llvm::ConstantExpr::getBitCast(GV, CGM.Int8PtrTy);
}
/// ComputeQualifierFlags - Compute the pointer type info flags from the
/// given qualifier.
static unsigned ComputeQualifierFlags(Qualifiers Quals) {
unsigned Flags = 0;
if (Quals.hasConst())
Flags |= RTTIBuilder::PTI_Const;
if (Quals.hasVolatile())
Flags |= RTTIBuilder::PTI_Volatile;
if (Quals.hasRestrict())
Flags |= RTTIBuilder::PTI_Restrict;
return Flags;
}
/// BuildObjCObjectTypeInfo - Build the appropriate kind of type_info
/// for the given Objective-C object type.
void RTTIBuilder::BuildObjCObjectTypeInfo(const ObjCObjectType *OT) {
// Drop qualifiers.
const Type *T = OT->getBaseType().getTypePtr();
assert(isa<BuiltinType>(T) || isa<ObjCInterfaceType>(T));
// The builtin types are abi::__class_type_infos and don't require
// extra fields.
if (isa<BuiltinType>(T)) return;
ObjCInterfaceDecl *Class = cast<ObjCInterfaceType>(T)->getDecl();
ObjCInterfaceDecl *Super = Class->getSuperClass();
// Root classes are also __class_type_info.
if (!Super) return;
QualType SuperTy = CGM.getContext().getObjCInterfaceType(Super);
// Everything else is single inheritance.
llvm::Constant *BaseTypeInfo = RTTIBuilder(CGM).BuildTypeInfo(SuperTy);
Fields.push_back(BaseTypeInfo);
}
/// BuildSIClassTypeInfo - Build an abi::__si_class_type_info, used for single
/// inheritance, according to the Itanium C++ ABI, 2.95p6b.
void RTTIBuilder::BuildSIClassTypeInfo(const CXXRecordDecl *RD) {
// Itanium C++ ABI 2.9.5p6b:
// It adds to abi::__class_type_info a single member pointing to the
// type_info structure for the base type,
llvm::Constant *BaseTypeInfo =
RTTIBuilder(CGM).BuildTypeInfo(RD->bases_begin()->getType());
Fields.push_back(BaseTypeInfo);
}
namespace {
/// SeenBases - Contains virtual and non-virtual bases seen when traversing
/// a class hierarchy.
struct SeenBases {
llvm::SmallPtrSet<const CXXRecordDecl *, 16> NonVirtualBases;
llvm::SmallPtrSet<const CXXRecordDecl *, 16> VirtualBases;
};
}
/// ComputeVMIClassTypeInfoFlags - Compute the value of the flags member in
/// abi::__vmi_class_type_info.
///
static unsigned ComputeVMIClassTypeInfoFlags(const CXXBaseSpecifier *Base,
SeenBases &Bases) {
unsigned Flags = 0;
const CXXRecordDecl *BaseDecl =
cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl());
if (Base->isVirtual()) {
// Mark the virtual base as seen.
if (!Bases.VirtualBases.insert(BaseDecl)) {
// If this virtual base has been seen before, then the class is diamond
// shaped.
Flags |= RTTIBuilder::VMI_DiamondShaped;
} else {
if (Bases.NonVirtualBases.count(BaseDecl))
Flags |= RTTIBuilder::VMI_NonDiamondRepeat;
}
} else {
// Mark the non-virtual base as seen.
if (!Bases.NonVirtualBases.insert(BaseDecl)) {
// If this non-virtual base has been seen before, then the class has non-
// diamond shaped repeated inheritance.
Flags |= RTTIBuilder::VMI_NonDiamondRepeat;
} else {
if (Bases.VirtualBases.count(BaseDecl))
Flags |= RTTIBuilder::VMI_NonDiamondRepeat;
}
}
// Walk all bases.
for (CXXRecordDecl::base_class_const_iterator I = BaseDecl->bases_begin(),
E = BaseDecl->bases_end(); I != E; ++I)
Flags |= ComputeVMIClassTypeInfoFlags(I, Bases);
return Flags;
}
static unsigned ComputeVMIClassTypeInfoFlags(const CXXRecordDecl *RD) {
unsigned Flags = 0;
SeenBases Bases;
// Walk all bases.
for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
E = RD->bases_end(); I != E; ++I)
Flags |= ComputeVMIClassTypeInfoFlags(I, Bases);
return Flags;
}
/// BuildVMIClassTypeInfo - Build an abi::__vmi_class_type_info, used for
/// classes with bases that do not satisfy the abi::__si_class_type_info
/// constraints, according ti the Itanium C++ ABI, 2.9.5p5c.
void RTTIBuilder::BuildVMIClassTypeInfo(const CXXRecordDecl *RD) {
llvm::Type *UnsignedIntLTy =
CGM.getTypes().ConvertType(CGM.getContext().UnsignedIntTy);
// Itanium C++ ABI 2.9.5p6c:
// __flags is a word with flags describing details about the class
// structure, which may be referenced by using the __flags_masks
// enumeration. These flags refer to both direct and indirect bases.
unsigned Flags = ComputeVMIClassTypeInfoFlags(RD);
Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, Flags));
// Itanium C++ ABI 2.9.5p6c:
// __base_count is a word with the number of direct proper base class
// descriptions that follow.
Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, RD->getNumBases()));
if (!RD->getNumBases())
return;
llvm::Type *LongLTy =
CGM.getTypes().ConvertType(CGM.getContext().LongTy);
// Now add the base class descriptions.
// Itanium C++ ABI 2.9.5p6c:
// __base_info[] is an array of base class descriptions -- one for every
// direct proper base. Each description is of the type:
//
// struct abi::__base_class_type_info {
// public:
// const __class_type_info *__base_type;
// long __offset_flags;
//
// enum __offset_flags_masks {
// __virtual_mask = 0x1,
// __public_mask = 0x2,
// __offset_shift = 8
// };
// };
for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
E = RD->bases_end(); I != E; ++I) {
const CXXBaseSpecifier *Base = I;
// The __base_type member points to the RTTI for the base type.
Fields.push_back(RTTIBuilder(CGM).BuildTypeInfo(Base->getType()));
const CXXRecordDecl *BaseDecl =
cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl());
int64_t OffsetFlags = 0;
// All but the lower 8 bits of __offset_flags are a signed offset.
// For a non-virtual base, this is the offset in the object of the base
// subobject. For a virtual base, this is the offset in the virtual table of
// the virtual base offset for the virtual base referenced (negative).
CharUnits Offset;
if (Base->isVirtual())
Offset =
CGM.getVTableContext().getVirtualBaseOffsetOffset(RD, BaseDecl);
else {
const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
Offset = Layout.getBaseClassOffset(BaseDecl);
};
OffsetFlags = uint64_t(Offset.getQuantity()) << 8;
// The low-order byte of __offset_flags contains flags, as given by the
// masks from the enumeration __offset_flags_masks.
if (Base->isVirtual())
OffsetFlags |= BCTI_Virtual;
if (Base->getAccessSpecifier() == AS_public)
OffsetFlags |= BCTI_Public;
Fields.push_back(llvm::ConstantInt::get(LongLTy, OffsetFlags));
}
}
/// BuildPointerTypeInfo - Build an abi::__pointer_type_info struct,
/// used for pointer types.
void RTTIBuilder::BuildPointerTypeInfo(QualType PointeeTy) {
Qualifiers Quals;
QualType UnqualifiedPointeeTy =
CGM.getContext().getUnqualifiedArrayType(PointeeTy, Quals);
// Itanium C++ ABI 2.9.5p7:
// __flags is a flag word describing the cv-qualification and other
// attributes of the type pointed to
unsigned Flags = ComputeQualifierFlags(Quals);
// Itanium C++ ABI 2.9.5p7:
// When the abi::__pbase_type_info is for a direct or indirect pointer to an
// incomplete class type, the incomplete target type flag is set.
if (ContainsIncompleteClassType(UnqualifiedPointeeTy))
Flags |= PTI_Incomplete;
llvm::Type *UnsignedIntLTy =
CGM.getTypes().ConvertType(CGM.getContext().UnsignedIntTy);
Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, Flags));
// Itanium C++ ABI 2.9.5p7:
// __pointee is a pointer to the std::type_info derivation for the
// unqualified type being pointed to.
llvm::Constant *PointeeTypeInfo =
RTTIBuilder(CGM).BuildTypeInfo(UnqualifiedPointeeTy);
Fields.push_back(PointeeTypeInfo);
}
/// BuildPointerToMemberTypeInfo - Build an abi::__pointer_to_member_type_info
/// struct, used for member pointer types.
void RTTIBuilder::BuildPointerToMemberTypeInfo(const MemberPointerType *Ty) {
QualType PointeeTy = Ty->getPointeeType();
Qualifiers Quals;
QualType UnqualifiedPointeeTy =
CGM.getContext().getUnqualifiedArrayType(PointeeTy, Quals);
// Itanium C++ ABI 2.9.5p7:
// __flags is a flag word describing the cv-qualification and other
// attributes of the type pointed to.
unsigned Flags = ComputeQualifierFlags(Quals);
const RecordType *ClassType = cast<RecordType>(Ty->getClass());
// Itanium C++ ABI 2.9.5p7:
// When the abi::__pbase_type_info is for a direct or indirect pointer to an
// incomplete class type, the incomplete target type flag is set.
if (ContainsIncompleteClassType(UnqualifiedPointeeTy))
Flags |= PTI_Incomplete;
if (IsIncompleteClassType(ClassType))
Flags |= PTI_ContainingClassIncomplete;
llvm::Type *UnsignedIntLTy =
CGM.getTypes().ConvertType(CGM.getContext().UnsignedIntTy);
Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, Flags));
// Itanium C++ ABI 2.9.5p7:
// __pointee is a pointer to the std::type_info derivation for the
// unqualified type being pointed to.
llvm::Constant *PointeeTypeInfo =
RTTIBuilder(CGM).BuildTypeInfo(UnqualifiedPointeeTy);
Fields.push_back(PointeeTypeInfo);
// Itanium C++ ABI 2.9.5p9:
// __context is a pointer to an abi::__class_type_info corresponding to the
// class type containing the member pointed to
// (e.g., the "A" in "int A::*").
Fields.push_back(RTTIBuilder(CGM).BuildTypeInfo(QualType(ClassType, 0)));
}
llvm::Constant *CodeGenModule::GetAddrOfRTTIDescriptor(QualType Ty,
bool ForEH) {
// Return a bogus pointer if RTTI is disabled, unless it's for EH.
// FIXME: should we even be calling this method if RTTI is disabled
// and it's not for EH?
if (!ForEH && !getContext().getLangOpts().RTTI)
return llvm::Constant::getNullValue(Int8PtrTy);
if (ForEH && Ty->isObjCObjectPointerType() &&
LangOpts.ObjCRuntime.isGNUFamily())
return ObjCRuntime->GetEHType(Ty);
return RTTIBuilder(*this).BuildTypeInfo(Ty);
}
void CodeGenModule::EmitFundamentalRTTIDescriptor(QualType Type) {
QualType PointerType = Context.getPointerType(Type);
QualType PointerTypeConst = Context.getPointerType(Type.withConst());
RTTIBuilder(*this).BuildTypeInfo(Type, true);
RTTIBuilder(*this).BuildTypeInfo(PointerType, true);
RTTIBuilder(*this).BuildTypeInfo(PointerTypeConst, true);
}
void CodeGenModule::EmitFundamentalRTTIDescriptors() {
QualType FundamentalTypes[] = { Context.VoidTy, Context.NullPtrTy,
Context.BoolTy, Context.WCharTy,
Context.CharTy, Context.UnsignedCharTy,
Context.SignedCharTy, Context.ShortTy,
Context.UnsignedShortTy, Context.IntTy,
Context.UnsignedIntTy, Context.LongTy,
Context.UnsignedLongTy, Context.LongLongTy,
Context.UnsignedLongLongTy, Context.FloatTy,
Context.DoubleTy, Context.LongDoubleTy,
Context.Char16Ty, Context.Char32Ty };
for (unsigned i = 0; i < sizeof(FundamentalTypes)/sizeof(QualType); ++i)
EmitFundamentalRTTIDescriptor(FundamentalTypes[i]);
}
| jeltz/rust-debian-package | src/llvm/tools/clang/lib/CodeGen/CGRTTI.cpp | C++ | apache-2.0 | 35,304 |
/*
* Copyright 2015 Dimitry Ivanov (mail@dimitryivanov.ru)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ru.noties.storm.sd;
import ru.noties.storm.FieldType;
/**
* Created by Dimitry Ivanov (mail@dimitryivanov.ru) on 16.02.2015.
*/
public abstract class IntSerializer<T> extends AbsSerializer<T> {
@Override
public final FieldType getSerializedFieldType() {
return FieldType.INT;
}
public abstract T deserialize (int value);
public abstract int serialize (T value);
}
| noties/Storm | library/src/main/java/ru/noties/storm/sd/IntSerializer.java | Java | apache-2.0 | 1,033 |
/*
* Copyright 2018 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.server.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* An alias for {@code @Produces("application/octet-stream")}.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.TYPE, ElementType.METHOD })
@Produces("application/octet-stream")
public @interface ProducesOctetStream {
}
| jmostella/armeria | core/src/main/java/com/linecorp/armeria/server/annotation/ProducesOctetStream.java | Java | apache-2.0 | 1,082 |
Genoverse.Genomes.mus_musculus = {
"1": {
"size": 195471971,
"bands": [
{
"id": "A1",
"start": 2973781,
"end": 8840440,
"type": "gpos100"
},
{
"id": "A2",
"start": 8840441,
"end": 12278389,
"type": "gneg"
},
{
"id": "A3",
"start": 12278390,
"end": 20136559,
"type": "gpos33"
},
{
"id": "A4",
"start": 20136560,
"end": 22101101,
"type": "gneg"
},
{
"id": "A5",
"start": 22101102,
"end": 30941542,
"type": "gpos100"
},
{
"id": "B",
"start": 30941543,
"end": 43219933,
"type": "gneg"
},
{
"id": "C1.1",
"start": 43219934,
"end": 54516051,
"type": "gpos66"
},
{
"id": "C1.2",
"start": 54516052,
"end": 55989458,
"type": "gneg"
},
{
"id": "C1.3",
"start": 55989459,
"end": 59427408,
"type": "gpos66"
},
{
"id": "C2",
"start": 59427409,
"end": 65321034,
"type": "gneg"
},
{
"id": "C3",
"start": 65321035,
"end": 74652611,
"type": "gpos33"
},
{
"id": "C4",
"start": 74652612,
"end": 80055103,
"type": "gneg"
},
{
"id": "C5",
"start": 80055104,
"end": 87422136,
"type": "gpos33"
},
{
"id": "cenp",
"start": 991261,
"end": 1982520,
"type": "acen"
},
{
"id": "cenq",
"start": 1982521,
"end": 2973780,
"type": "acen"
},
{
"id": "D",
"start": 87422137,
"end": 99700527,
"type": "gneg"
},
{
"id": "E1.1",
"start": 99700528,
"end": 102647341,
"type": "gpos33"
},
{
"id": "E1.2",
"start": 102647342,
"end": 103629611,
"type": "gneg"
},
{
"id": "E2.1",
"start": 103629612,
"end": 112470053,
"type": "gpos100"
},
{
"id": "E2.2",
"start": 112470054,
"end": 113943460,
"type": "gneg"
},
{
"id": "E2.3",
"start": 113943461,
"end": 125730714,
"type": "gpos100"
},
{
"id": "E3",
"start": 125730715,
"end": 128677528,
"type": "gneg"
},
{
"id": "E4",
"start": 128677529,
"end": 139482511,
"type": "gpos66"
},
{
"id": "F",
"start": 139482512,
"end": 147340680,
"type": "gneg"
},
{
"id": "G1",
"start": 147340681,
"end": 151760902,
"type": "gpos100"
},
{
"id": "G2",
"start": 151760903,
"end": 152743172,
"type": "gneg"
},
{
"id": "G3",
"start": 152743173,
"end": 157163393,
"type": "gpos100"
},
{
"id": "H1",
"start": 157163394,
"end": 160110206,
"type": "gneg"
},
{
"id": "H2.1",
"start": 160110207,
"end": 164039291,
"type": "gpos33"
},
{
"id": "H2.2",
"start": 164039292,
"end": 165512698,
"type": "gneg"
},
{
"id": "H2.3",
"start": 165512699,
"end": 169932918,
"type": "gpos33"
},
{
"id": "H3",
"start": 169932919,
"end": 175826546,
"type": "gneg"
},
{
"id": "H4",
"start": 175826547,
"end": 181720173,
"type": "gpos33"
},
{
"id": "H5",
"start": 181720174,
"end": 188104936,
"type": "gneg"
},
{
"id": "H6",
"start": 188104937,
"end": 195471971,
"type": "gpos33"
},
{
"id": "tip",
"start": 1,
"end": 991260,
"type": "tip"
}
]
},
"2": {
"size": 182113224,
"bands": [
{
"id": "A1",
"start": 3006028,
"end": 14080919,
"type": "gpos100"
},
{
"id": "A2",
"start": 14080920,
"end": 16427738,
"type": "gneg"
},
{
"id": "A3",
"start": 16427739,
"end": 29100566,
"type": "gpos33"
},
{
"id": "B",
"start": 29100567,
"end": 48344489,
"type": "gneg"
},
{
"id": "C1.1",
"start": 48344490,
"end": 60547952,
"type": "gpos100"
},
{
"id": "C1.2",
"start": 60547953,
"end": 61017316,
"type": "gneg"
},
{
"id": "C1.3",
"start": 61017317,
"end": 68527140,
"type": "gpos100"
},
{
"id": "C2",
"start": 68527141,
"end": 71812688,
"type": "gneg"
},
{
"id": "C3",
"start": 71812689,
"end": 81199967,
"type": "gpos66"
},
{
"id": "cenp",
"start": 1002010,
"end": 2004018,
"type": "acen"
},
{
"id": "cenq",
"start": 2004019,
"end": 3006027,
"type": "acen"
},
{
"id": "D",
"start": 81199968,
"end": 88709791,
"type": "gneg"
},
{
"id": "E1",
"start": 88709792,
"end": 101382619,
"type": "gpos100"
},
{
"id": "E2",
"start": 101382620,
"end": 105137530,
"type": "gneg"
},
{
"id": "E3",
"start": 105137531,
"end": 113116719,
"type": "gpos33"
},
{
"id": "E4",
"start": 113116720,
"end": 115932902,
"type": "gneg"
},
{
"id": "E5",
"start": 115932903,
"end": 123912089,
"type": "gpos66"
},
{
"id": "F1",
"start": 123912090,
"end": 131891278,
"type": "gneg"
},
{
"id": "F2",
"start": 131891279,
"end": 134707461,
"type": "gpos33"
},
{
"id": "F3",
"start": 134707462,
"end": 141278557,
"type": "gneg"
},
{
"id": "G1",
"start": 141278558,
"end": 146910925,
"type": "gpos100"
},
{
"id": "G2",
"start": 146910926,
"end": 147849652,
"type": "gneg"
},
{
"id": "G3",
"start": 147849653,
"end": 152543293,
"type": "gpos100"
},
{
"id": "H1",
"start": 152543294,
"end": 159114388,
"type": "gneg"
},
{
"id": "H2",
"start": 159114389,
"end": 163338664,
"type": "gpos33"
},
{
"id": "H3",
"start": 163338665,
"end": 173664671,
"type": "gneg"
},
{
"id": "H4",
"start": 173664672,
"end": 182113224,
"type": "gpos33"
},
{
"id": "tip",
"start": 1,
"end": 1002009,
"type": "tip"
}
]
},
"3": {
"size": 160039680,
"bands": [
{
"id": "A1",
"start": 3008269,
"end": 18541181,
"type": "gpos100"
},
{
"id": "A2",
"start": 18541182,
"end": 20492885,
"type": "gneg"
},
{
"id": "A3",
"start": 20492886,
"end": 35618586,
"type": "gpos66"
},
{
"id": "B",
"start": 35618587,
"end": 46840881,
"type": "gneg"
},
{
"id": "C",
"start": 46840882,
"end": 56599398,
"type": "gpos100"
},
{
"id": "cenp",
"start": 1002757,
"end": 2005512,
"type": "acen"
},
{
"id": "cenq",
"start": 2005513,
"end": 3008268,
"type": "acen"
},
{
"id": "D",
"start": 56599399,
"end": 60990731,
"type": "gneg"
},
{
"id": "E1",
"start": 60990732,
"end": 69773396,
"type": "gpos33"
},
{
"id": "E2",
"start": 69773397,
"end": 72700951,
"type": "gneg"
},
{
"id": "E3",
"start": 72700952,
"end": 83923246,
"type": "gpos100"
},
{
"id": "F1",
"start": 83923247,
"end": 93193837,
"type": "gneg"
},
{
"id": "F2.1",
"start": 93193838,
"end": 97585169,
"type": "gpos33"
},
{
"id": "F2.2",
"start": 97585170,
"end": 106367835,
"type": "gneg"
},
{
"id": "F2.3",
"start": 106367836,
"end": 108319539,
"type": "gpos33"
},
{
"id": "F3",
"start": 108319540,
"end": 115150501,
"type": "gneg"
},
{
"id": "G1",
"start": 115150502,
"end": 126860721,
"type": "gpos100"
},
{
"id": "G2",
"start": 126860722,
"end": 128812424,
"type": "gneg"
},
{
"id": "G3",
"start": 128812425,
"end": 138570942,
"type": "gpos66"
},
{
"id": "H1",
"start": 138570943,
"end": 143938126,
"type": "gneg"
},
{
"id": "H2",
"start": 143938127,
"end": 148329459,
"type": "gpos33"
},
{
"id": "H3",
"start": 148329460,
"end": 154184569,
"type": "gneg"
},
{
"id": "H4",
"start": 154184570,
"end": 160039680,
"type": "gpos33"
},
{
"id": "tip",
"start": 1,
"end": 1002756,
"type": "tip"
}
]
},
"4": {
"size": 156508116,
"bands": [
{
"id": "A1",
"start": 3016925,
"end": 14882673,
"type": "gpos100"
},
{
"id": "A2",
"start": 14882674,
"end": 17763190,
"type": "gneg"
},
{
"id": "A3",
"start": 17763191,
"end": 28325088,
"type": "gpos100"
},
{
"id": "A4",
"start": 28325089,
"end": 30245433,
"type": "gneg"
},
{
"id": "A5",
"start": 30245434,
"end": 43687847,
"type": "gpos66"
},
{
"id": "B1",
"start": 43687848,
"end": 51849313,
"type": "gneg"
},
{
"id": "B2",
"start": 51849314,
"end": 55209917,
"type": "gpos33"
},
{
"id": "B3",
"start": 55209918,
"end": 63371383,
"type": "gneg"
},
{
"id": "C1",
"start": 63371384,
"end": 69612504,
"type": "gpos33"
},
{
"id": "C2",
"start": 69612505,
"end": 72012935,
"type": "gneg"
},
{
"id": "C3",
"start": 72012936,
"end": 84015092,
"type": "gpos100"
},
{
"id": "C4",
"start": 84015093,
"end": 89776127,
"type": "gneg"
},
{
"id": "C5",
"start": 89776128,
"end": 97457507,
"type": "gpos66"
},
{
"id": "C6",
"start": 97457508,
"end": 105618973,
"type": "gneg"
},
{
"id": "C7",
"start": 105618974,
"end": 110899922,
"type": "gpos66"
},
{
"id": "cenp",
"start": 1005642,
"end": 2011283,
"type": "acen"
},
{
"id": "cenq",
"start": 2011284,
"end": 3016924,
"type": "acen"
},
{
"id": "D1",
"start": 110899923,
"end": 117621129,
"type": "gneg"
},
{
"id": "D2.1",
"start": 117621130,
"end": 120501647,
"type": "gpos33"
},
{
"id": "D2.2",
"start": 120501648,
"end": 131063544,
"type": "gneg"
},
{
"id": "D2.3",
"start": 131063545,
"end": 133944061,
"type": "gpos33"
},
{
"id": "D3",
"start": 133944062,
"end": 141625441,
"type": "gneg"
},
{
"id": "E1",
"start": 141625442,
"end": 147866562,
"type": "gpos100"
},
{
"id": "E2",
"start": 147866563,
"end": 156508116,
"type": "gneg"
},
{
"id": "tip",
"start": 1,
"end": 1005641,
"type": "tip"
}
]
},
"5": {
"size": 151834684,
"bands": [
{
"id": "A1",
"start": 2986183,
"end": 14895174,
"type": "gpos100"
},
{
"id": "A2",
"start": 14895175,
"end": 16336642,
"type": "gneg"
},
{
"id": "A3",
"start": 16336643,
"end": 25465943,
"type": "gpos66"
},
{
"id": "B1",
"start": 25465944,
"end": 33634265,
"type": "gneg"
},
{
"id": "B2",
"start": 33634266,
"end": 35556222,
"type": "gpos33"
},
{
"id": "B3",
"start": 35556223,
"end": 50451397,
"type": "gneg"
},
{
"id": "C1",
"start": 50451398,
"end": 58619719,
"type": "gpos33"
},
{
"id": "C2",
"start": 58619720,
"end": 61022166,
"type": "gneg"
},
{
"id": "C3.1",
"start": 61022167,
"end": 71592935,
"type": "gpos100"
},
{
"id": "C3.2",
"start": 71592936,
"end": 73514894,
"type": "gneg"
},
{
"id": "C3.3",
"start": 73514895,
"end": 77839299,
"type": "gpos66"
},
{
"id": "cenp",
"start": 995395,
"end": 1990788,
"type": "acen"
},
{
"id": "cenq",
"start": 1990789,
"end": 2986182,
"type": "acen"
},
{
"id": "D",
"start": 77839300,
"end": 81683215,
"type": "gneg"
},
{
"id": "E1",
"start": 81683216,
"end": 91293005,
"type": "gpos100"
},
{
"id": "E2",
"start": 91293006,
"end": 93695452,
"type": "gneg"
},
{
"id": "E3",
"start": 93695453,
"end": 99461326,
"type": "gpos33"
},
{
"id": "E4",
"start": 99461327,
"end": 101863775,
"type": "gneg"
},
{
"id": "E5",
"start": 101863776,
"end": 107629649,
"type": "gpos33"
},
{
"id": "F",
"start": 107629650,
"end": 124927270,
"type": "gneg"
},
{
"id": "G1.1",
"start": 124927271,
"end": 126849229,
"type": "gpos33"
},
{
"id": "G1.2",
"start": 126849230,
"end": 127810207,
"type": "gneg"
},
{
"id": "G1.3",
"start": 127810208,
"end": 130693144,
"type": "gpos33"
},
{
"id": "G2",
"start": 130693145,
"end": 146068809,
"type": "gneg"
},
{
"id": "G3",
"start": 146068810,
"end": 151834684,
"type": "gpos33"
},
{
"id": "tip",
"start": 1,
"end": 995394,
"type": "tip"
}
]
},
"6": {
"size": 149736546,
"bands": [
{
"id": "A1",
"start": 3004405,
"end": 16637393,
"type": "gpos100"
},
{
"id": "A2",
"start": 16637394,
"end": 21530744,
"type": "gneg"
},
{
"id": "A3.1",
"start": 21530745,
"end": 27402766,
"type": "gpos100"
},
{
"id": "A3.2",
"start": 27402767,
"end": 28381436,
"type": "gneg"
},
{
"id": "A3.3",
"start": 28381437,
"end": 34253457,
"type": "gpos100"
},
{
"id": "B1",
"start": 34253458,
"end": 41593484,
"type": "gneg"
},
{
"id": "B2.1",
"start": 41593485,
"end": 44529494,
"type": "gpos66"
},
{
"id": "B2.2",
"start": 44529495,
"end": 45997500,
"type": "gneg"
},
{
"id": "B2.3",
"start": 45997501,
"end": 50890851,
"type": "gpos66"
},
{
"id": "B3",
"start": 50890852,
"end": 62634894,
"type": "gneg"
},
{
"id": "C1",
"start": 62634895,
"end": 74378937,
"type": "gpos100"
},
{
"id": "C2",
"start": 74378938,
"end": 76825612,
"type": "gneg"
},
{
"id": "C3",
"start": 76825613,
"end": 86122980,
"type": "gpos66"
},
{
"id": "cenp",
"start": 1001469,
"end": 2002936,
"type": "acen"
},
{
"id": "cenq",
"start": 2002937,
"end": 3004404,
"type": "acen"
},
{
"id": "D1",
"start": 86122981,
"end": 94441677,
"type": "gneg"
},
{
"id": "D2",
"start": 94441678,
"end": 95909682,
"type": "gpos33"
},
{
"id": "D3",
"start": 95909683,
"end": 103249709,
"type": "gneg"
},
{
"id": "E1",
"start": 103249710,
"end": 108632395,
"type": "gpos100"
},
{
"id": "E2",
"start": 108632396,
"end": 109611066,
"type": "gneg"
},
{
"id": "E3",
"start": 109611067,
"end": 116951092,
"type": "gpos100"
},
{
"id": "F1",
"start": 116951093,
"end": 122823113,
"type": "gneg"
},
{
"id": "F2",
"start": 122823114,
"end": 125269789,
"type": "gpos33"
},
{
"id": "F3",
"start": 125269790,
"end": 132120481,
"type": "gneg"
},
{
"id": "G1",
"start": 132120482,
"end": 139460507,
"type": "gpos66"
},
{
"id": "G2",
"start": 139460508,
"end": 142885854,
"type": "gneg"
},
{
"id": "G3",
"start": 142885855,
"end": 149736546,
"type": "gpos33"
},
{
"id": "tip",
"start": 1,
"end": 1001468,
"type": "tip"
}
]
},
"7": {
"size": 145441459,
"bands": [
{
"id": "A1",
"start": 2860683,
"end": 15202939,
"type": "gpos100"
},
{
"id": "A2",
"start": 15202940,
"end": 18243527,
"type": "gneg"
},
{
"id": "A3",
"start": 18243528,
"end": 28378820,
"type": "gpos33"
},
{
"id": "B1",
"start": 28378821,
"end": 34459996,
"type": "gneg"
},
{
"id": "B2",
"start": 34459997,
"end": 37500585,
"type": "gpos33"
},
{
"id": "B3",
"start": 37500585,
"end": 47635877,
"type": "gneg"
},
{
"id": "B4",
"start": 47635878,
"end": 54223818,
"type": "gpos33"
},
{
"id": "B5",
"start": 54223819,
"end": 60811759,
"type": "gneg"
},
{
"id": "C",
"start": 60811760,
"end": 71453817,
"type": "gpos100"
},
{
"id": "cenp",
"start": 953561,
"end": 1907121,
"type": "acen"
},
{
"id": "cenq",
"start": 1907122,
"end": 2860682,
"type": "acen"
},
{
"id": "D1",
"start": 71453818,
"end": 77028228,
"type": "gneg"
},
{
"id": "D2",
"start": 77028229,
"end": 80575581,
"type": "gpos66"
},
{
"id": "D3",
"start": 80575582,
"end": 90204109,
"type": "gneg"
},
{
"id": "E1",
"start": 90204110,
"end": 99832638,
"type": "gpos100"
},
{
"id": "E2",
"start": 99832639,
"end": 102366461,
"type": "gneg"
},
{
"id": "E3",
"start": 102366462,
"end": 111488225,
"type": "gpos33"
},
{
"id": "F1",
"start": 111488226,
"end": 118582930,
"type": "gneg"
},
{
"id": "F2",
"start": 118582931,
"end": 123143812,
"type": "gpos33"
},
{
"id": "F3",
"start": 123143813,
"end": 137333224,
"type": "gneg"
},
{
"id": "F4",
"start": 137333225,
"end": 140880576,
"type": "gpos33"
},
{
"id": "F5",
"start": 140880577,
"end": 145441459,
"type": "gneg"
},
{
"id": "tip",
"start": 1,
"end": 953560,
"type": "tip"
}
]
},
"8": {
"size": 129401213,
"bands": [
{
"id": "A1.1",
"start": 2946767,
"end": 15940728,
"type": "gpos100"
},
{
"id": "A1.2",
"start": 15940729,
"end": 16878419,
"type": "gneg"
},
{
"id": "A1.3",
"start": 16878420,
"end": 20160333,
"type": "gpos33"
},
{
"id": "A2",
"start": 20160334,
"end": 29537233,
"type": "gneg"
},
{
"id": "A3",
"start": 29537234,
"end": 33756838,
"type": "gpos33"
},
{
"id": "A4",
"start": 33756839,
"end": 44071427,
"type": "gneg"
},
{
"id": "B1.1",
"start": 44071428,
"end": 48291032,
"type": "gpos66"
},
{
"id": "B1.2",
"start": 48291033,
"end": 50166412,
"type": "gneg"
},
{
"id": "B1.3",
"start": 50166413,
"end": 55792551,
"type": "gpos66"
},
{
"id": "B2",
"start": 55792552,
"end": 59543311,
"type": "gneg"
},
{
"id": "B3.1",
"start": 59543312,
"end": 67044831,
"type": "gpos100"
},
{
"id": "B3.2",
"start": 67044832,
"end": 67982520,
"type": "gneg"
},
{
"id": "B3.3",
"start": 67982521,
"end": 74546350,
"type": "gpos100"
},
{
"id": "C1",
"start": 74546351,
"end": 80172490,
"type": "gneg"
},
{
"id": "C2",
"start": 80172491,
"end": 84860939,
"type": "gpos33"
},
{
"id": "C3",
"start": 84860940,
"end": 90018235,
"type": "gneg"
},
{
"id": "C4",
"start": 90018236,
"end": 91424769,
"type": "gpos33"
},
{
"id": "C5",
"start": 91424770,
"end": 95644374,
"type": "gneg"
},
{
"id": "cenp",
"start": 982256,
"end": 1964510,
"type": "acen"
},
{
"id": "cenq",
"start": 1964511,
"end": 2946766,
"type": "acen"
},
{
"id": "D1",
"start": 95644375,
"end": 103145894,
"type": "gpos100"
},
{
"id": "D2",
"start": 103145895,
"end": 104083583,
"type": "gneg"
},
{
"id": "D3",
"start": 104083584,
"end": 110647414,
"type": "gpos33"
},
{
"id": "E1",
"start": 110647414,
"end": 123775073,
"type": "gneg"
},
{
"id": "E2",
"start": 123775074,
"end": 129401213,
"type": "gpos33"
},
{
"id": "tip",
"start": 1,
"end": 982255,
"type": "tip"
}
]
},
"9": {
"size": 124595110,
"bands": [
{
"id": "A1",
"start": 3012548,
"end": 14412120,
"type": "gpos100"
},
{
"id": "A2",
"start": 14412121,
"end": 19526099,
"type": "gneg"
},
{
"id": "A3",
"start": 19526100,
"end": 24175170,
"type": "gpos33"
},
{
"id": "A4",
"start": 24175171,
"end": 38122383,
"type": "gneg"
},
{
"id": "A5.1",
"start": 38122384,
"end": 44166176,
"type": "gpos66"
},
{
"id": "A5.2",
"start": 44166177,
"end": 46490712,
"type": "gneg"
},
{
"id": "A5.3",
"start": 46490713,
"end": 54859040,
"type": "gpos66"
},
{
"id": "B",
"start": 54859041,
"end": 63227368,
"type": "gneg"
},
{
"id": "C",
"start": 63227369,
"end": 69736068,
"type": "gpos33"
},
{
"id": "cenp",
"start": 1004183,
"end": 2008364,
"type": "acen"
},
{
"id": "cenq",
"start": 2008365,
"end": 3012547,
"type": "acen"
},
{
"id": "D",
"start": 69736069,
"end": 77639490,
"type": "gneg"
},
{
"id": "E1",
"start": 77639491,
"end": 82753467,
"type": "gpos33"
},
{
"id": "E2",
"start": 82753468,
"end": 84613096,
"type": "gneg"
},
{
"id": "E3.1",
"start": 84613097,
"end": 91121796,
"type": "gpos100"
},
{
"id": "E3.2",
"start": 91121797,
"end": 91586703,
"type": "gneg"
},
{
"id": "E3.3",
"start": 91586704,
"end": 100884845,
"type": "gpos100"
},
{
"id": "E4",
"start": 100884846,
"end": 101814660,
"type": "gpos66"
},
{
"id": "F1",
"start": 101814661,
"end": 108323360,
"type": "gneg"
},
{
"id": "F2",
"start": 108323361,
"end": 111112803,
"type": "gpos33"
},
{
"id": "F3",
"start": 111112804,
"end": 119946038,
"type": "gneg"
},
{
"id": "F4",
"start": 119946039,
"end": 124595110,
"type": "gpos33"
},
{
"id": "tip",
"start": 1,
"end": 1004182,
"type": "tip"
}
]
},
"10": {
"size": 130694993,
"bands": [
{
"id": "A1",
"start": 3016195,
"end": 12822904,
"type": "gpos100"
},
{
"id": "A2",
"start": 12822905,
"end": 17754791,
"type": "gneg"
},
{
"id": "A3",
"start": 17754792,
"end": 23673055,
"type": "gpos33"
},
{
"id": "A4",
"start": 23673056,
"end": 33536827,
"type": "gneg"
},
{
"id": "B1",
"start": 33536828,
"end": 41427846,
"type": "gpos100"
},
{
"id": "B2",
"start": 41427847,
"end": 48332487,
"type": "gneg"
},
{
"id": "B3",
"start": 48332488,
"end": 56223505,
"type": "gpos100"
},
{
"id": "B4",
"start": 56223506,
"end": 64114524,
"type": "gneg"
},
{
"id": "B5.1",
"start": 64114525,
"end": 68060033,
"type": "gpos100"
},
{
"id": "B5.2",
"start": 68060034,
"end": 68553222,
"type": "gneg"
},
{
"id": "B5.3",
"start": 68553223,
"end": 74964674,
"type": "gpos100"
},
{
"id": "C1",
"start": 74964675,
"end": 89267145,
"type": "gneg"
},
{
"id": "C2",
"start": 89267146,
"end": 96171787,
"type": "gpos33"
},
{
"id": "C3",
"start": 96171788,
"end": 99130918,
"type": "gneg"
},
{
"id": "cenp",
"start": 1005399,
"end": 2010796,
"type": "acen"
},
{
"id": "cenq",
"start": 2010797,
"end": 3016194,
"type": "acen"
},
{
"id": "D1",
"start": 99130919,
"end": 111953823,
"type": "gpos100"
},
{
"id": "D2",
"start": 111953824,
"end": 124776728,
"type": "gneg"
},
{
"id": "D3",
"start": 124776729,
"end": 130694993,
"type": "gpos33"
},
{
"id": "tip",
"start": 1,
"end": 1005398,
"type": "tip"
}
]
},
"11": {
"size": 122082543,
"bands": [
{
"id": "A1",
"start": 3005877,
"end": 13046988,
"type": "gpos100"
},
{
"id": "A2",
"start": 13046989,
"end": 17240663,
"type": "gneg"
},
{
"id": "A3.1",
"start": 17240664,
"end": 21900302,
"type": "gpos100"
},
{
"id": "A3.2",
"start": 21900303,
"end": 25628014,
"type": "gneg"
},
{
"id": "A3.3",
"start": 25628015,
"end": 30287653,
"type": "gpos100"
},
{
"id": "A4",
"start": 30287654,
"end": 36345184,
"type": "gneg"
},
{
"id": "A5",
"start": 36345185,
"end": 43334642,
"type": "gpos100"
},
{
"id": "B1.1",
"start": 43334643,
"end": 47994281,
"type": "gneg"
},
{
"id": "B1.2",
"start": 47994282,
"end": 49858137,
"type": "gpos33"
},
{
"id": "B1.3",
"start": 49858138,
"end": 60109343,
"type": "gneg"
},
{
"id": "B2",
"start": 60109344,
"end": 62905126,
"type": "gpos33"
},
{
"id": "B3",
"start": 62905127,
"end": 70826512,
"type": "gneg"
},
{
"id": "B4",
"start": 70826513,
"end": 74088260,
"type": "gpos33"
},
{
"id": "B5",
"start": 74088261,
"end": 82009646,
"type": "gneg"
},
{
"id": "C",
"start": 82009647,
"end": 90396996,
"type": "gpos100"
},
{
"id": "cenp",
"start": 1001959,
"end": 2003917,
"type": "acen"
},
{
"id": "cenq",
"start": 2003918,
"end": 3005876,
"type": "acen"
},
{
"id": "D",
"start": 90396997,
"end": 102512058,
"type": "gneg"
},
{
"id": "E1",
"start": 102512059,
"end": 110433444,
"type": "gpos66"
},
{
"id": "E2",
"start": 110433445,
"end": 122082543,
"type": "gneg"
},
{
"id": "tip",
"start": 1,
"end": 1001958,
"type": "tip"
}
]
},
"12": {
"size": 120129022,
"bands": [
{
"id": "A1.1",
"start": 2972080,
"end": 17601321,
"type": "gpos100"
},
{
"id": "A1.2",
"start": 17601322,
"end": 21121586,
"type": "gneg"
},
{
"id": "A1.3",
"start": 21121587,
"end": 25961949,
"type": "gpos66"
},
{
"id": "A2",
"start": 25961949,
"end": 31682378,
"type": "gneg"
},
{
"id": "A3",
"start": 31682379,
"end": 39162941,
"type": "gpos33"
},
{
"id": "B1",
"start": 39162942,
"end": 44003304,
"type": "gneg"
},
{
"id": "B2",
"start": 44003305,
"end": 44883370,
"type": "gpos33"
},
{
"id": "B3",
"start": 44883371,
"end": 51923898,
"type": "gneg"
},
{
"id": "C1",
"start": 51923899,
"end": 66004956,
"type": "gpos100"
},
{
"id": "C2",
"start": 66004957,
"end": 71285352,
"type": "gneg"
},
{
"id": "C3",
"start": 71285353,
"end": 80966079,
"type": "gpos100"
},
{
"id": "cenp",
"start": 990694,
"end": 1981386,
"type": "acen"
},
{
"id": "cenq",
"start": 1981387,
"end": 2972079,
"type": "acen"
},
{
"id": "D1",
"start": 80966080,
"end": 85366410,
"type": "gneg"
},
{
"id": "D2",
"start": 85366411,
"end": 88446642,
"type": "gpos33"
},
{
"id": "D3",
"start": 88446643,
"end": 95487170,
"type": "gneg"
},
{
"id": "E",
"start": 95487171,
"end": 106047964,
"type": "gpos100"
},
{
"id": "F1",
"start": 106047965,
"end": 114408591,
"type": "gneg"
},
{
"id": "F2",
"start": 114408592,
"end": 120129022,
"type": "gpos66"
},
{
"id": "tip",
"start": 1,
"end": 990693,
"type": "tip"
}
]
},
"13": {
"size": 120421639,
"bands": [
{
"id": "A1",
"start": 3003426,
"end": 16286532,
"type": "gpos100"
},
{
"id": "A2",
"start": 16286533,
"end": 21221846,
"type": "gneg"
},
{
"id": "A3.1",
"start": 21221847,
"end": 29611877,
"type": "gpos66"
},
{
"id": "A3.2",
"start": 29611878,
"end": 33066596,
"type": "gneg"
},
{
"id": "A3.3",
"start": 33066597,
"end": 41456629,
"type": "gpos33"
},
{
"id": "A4",
"start": 41456630,
"end": 44417817,
"type": "gneg"
},
{
"id": "A5",
"start": 44417818,
"end": 52807849,
"type": "gpos33"
},
{
"id": "B1",
"start": 52807850,
"end": 59223756,
"type": "gneg"
},
{
"id": "B2",
"start": 59223757,
"end": 61691412,
"type": "gpos33"
},
{
"id": "B3",
"start": 61691413,
"end": 69587913,
"type": "gneg"
},
{
"id": "C1",
"start": 69587914,
"end": 78471477,
"type": "gpos33"
},
{
"id": "C2",
"start": 78471478,
"end": 80939133,
"type": "gneg"
},
{
"id": "C3",
"start": 80939134,
"end": 94758010,
"type": "gpos100"
},
{
"id": "cenp",
"start": 1001142,
"end": 2002283,
"type": "acen"
},
{
"id": "cenq",
"start": 2002284,
"end": 3003425,
"type": "acen"
},
{
"id": "D1",
"start": 94758011,
"end": 106602762,
"type": "gneg"
},
{
"id": "D2.1",
"start": 106602763,
"end": 110551012,
"type": "gpos33"
},
{
"id": "D2.2",
"start": 110551013,
"end": 116473388,
"type": "gneg"
},
{
"id": "D2.3",
"start": 116473389,
"end": 120421639,
"type": "gpos33"
},
{
"id": "tip",
"start": 1,
"end": 1001141,
"type": "tip"
}
]
},
"14": {
"size": 124902244,
"bands": [
{
"id": "A1",
"start": 2992989,
"end": 14988268,
"type": "gpos100"
},
{
"id": "A2",
"start": 14988269,
"end": 19484749,
"type": "gneg"
},
{
"id": "A3",
"start": 19484750,
"end": 29976538,
"type": "gpos33"
},
{
"id": "B",
"start": 29976539,
"end": 43465980,
"type": "gneg"
},
{
"id": "C1",
"start": 43465981,
"end": 51959333,
"type": "gpos100"
},
{
"id": "C2",
"start": 51959334,
"end": 54956987,
"type": "gneg"
},
{
"id": "C3",
"start": 54956988,
"end": 59953076,
"type": "gpos66"
},
{
"id": "cenp",
"start": 997663,
"end": 1995325,
"type": "acen"
},
{
"id": "cenq",
"start": 1995326,
"end": 2992988,
"type": "acen"
},
{
"id": "D1",
"start": 59953077,
"end": 68946037,
"type": "gneg"
},
{
"id": "D2",
"start": 68946038,
"end": 72942909,
"type": "gpos33"
},
{
"id": "D3",
"start": 72942910,
"end": 84933525,
"type": "gneg"
},
{
"id": "E1",
"start": 84933526,
"end": 88930397,
"type": "gpos66"
},
{
"id": "E2.1",
"start": 88930398,
"end": 98922576,
"type": "gpos100"
},
{
"id": "E2.2",
"start": 98922577,
"end": 99921795,
"type": "gneg"
},
{
"id": "E2.3",
"start": 99921795,
"end": 107415929,
"type": "gpos100"
},
{
"id": "E3",
"start": 107415930,
"end": 110913192,
"type": "gneg"
},
{
"id": "E4",
"start": 110913193,
"end": 120905371,
"type": "gpos100"
},
{
"id": "E5",
"start": 120905372,
"end": 124902244,
"type": "gneg"
},
{
"id": "tip",
"start": 1,
"end": 997662,
"type": "tip"
}
]
},
"15": {
"size": 104043685,
"bands": [
{
"id": "A1",
"start": 3015906,
"end": 16500319,
"type": "gpos100"
},
{
"id": "A2",
"start": 16500320,
"end": 24292137,
"type": "gneg"
},
{
"id": "B1",
"start": 24292138,
"end": 29792243,
"type": "gpos33"
},
{
"id": "B2",
"start": 29792244,
"end": 32083955,
"type": "gneg"
},
{
"id": "B3.1",
"start": 32083956,
"end": 43084168,
"type": "gpos100"
},
{
"id": "B3.2",
"start": 43084169,
"end": 44917537,
"type": "gneg"
},
{
"id": "B3.3",
"start": 44917538,
"end": 49959301,
"type": "gpos66"
},
{
"id": "C",
"start": 49959302,
"end": 53626039,
"type": "gneg"
},
{
"id": "cenp",
"start": 1005302,
"end": 2010603,
"type": "acen"
},
{
"id": "cenq",
"start": 2010604,
"end": 3015905,
"type": "acen"
},
{
"id": "D1",
"start": 53626040,
"end": 66459622,
"type": "gpos100"
},
{
"id": "D2",
"start": 66459623,
"end": 68751333,
"type": "gneg"
},
{
"id": "D3",
"start": 68751334,
"end": 77459835,
"type": "gpos66"
},
{
"id": "E1",
"start": 77459836,
"end": 83876626,
"type": "gneg"
},
{
"id": "E2",
"start": 83876627,
"end": 87085022,
"type": "gpos33"
},
{
"id": "E3",
"start": 87085023,
"end": 95793524,
"type": "gneg"
},
{
"id": "F1",
"start": 95793525,
"end": 101293631,
"type": "gpos66"
},
{
"id": "F2",
"start": 101293632,
"end": 102210316,
"type": "gneg"
},
{
"id": "F3",
"start": 102210317,
"end": 104043685,
"type": "gpos33"
},
{
"id": "tip",
"start": 1,
"end": 1005301,
"type": "tip"
}
]
},
"16": {
"size": 98207768,
"bands": [
{
"id": "A1",
"start": 2996602,
"end": 15432649,
"type": "gpos100"
},
{
"id": "A2",
"start": 15432650,
"end": 16367961,
"type": "gneg"
},
{
"id": "A3",
"start": 16367962,
"end": 20576864,
"type": "gpos33"
},
{
"id": "B1",
"start": 20576865,
"end": 26188738,
"type": "gneg"
},
{
"id": "B2",
"start": 26188739,
"end": 32268266,
"type": "gpos33"
},
{
"id": "B3",
"start": 32268267,
"end": 38347794,
"type": "gneg"
},
{
"id": "B4",
"start": 38347795,
"end": 44894979,
"type": "gpos33"
},
{
"id": "B5",
"start": 44894980,
"end": 53780444,
"type": "gneg"
},
{
"id": "C1.1",
"start": 53780445,
"end": 57989348,
"type": "gpos66"
},
{
"id": "C1.2",
"start": 57989349,
"end": 58924660,
"type": "gneg"
},
{
"id": "C1.3",
"start": 58924661,
"end": 66874813,
"type": "gpos66"
},
{
"id": "C2",
"start": 66874814,
"end": 70616061,
"type": "gneg"
},
{
"id": "C3.1",
"start": 70616062,
"end": 79033870,
"type": "gpos100"
},
{
"id": "C3.2",
"start": 79033871,
"end": 79501525,
"type": "gneg"
},
{
"id": "C3.3",
"start": 79501526,
"end": 91660583,
"type": "gpos100"
},
{
"id": "C4",
"start": 91660584,
"end": 98207768,
"type": "gneg"
},
{
"id": "cenp",
"start": 998868,
"end": 1997734,
"type": "acen"
},
{
"id": "cenq",
"start": 1997735,
"end": 2996601,
"type": "acen"
},
{
"id": "tip",
"start": 1,
"end": 998867,
"type": "tip"
}
]
},
"17": {
"size": 94987271,
"bands": [
{
"id": "A1",
"start": 2991014,
"end": 13943085,
"type": "gpos100"
},
{
"id": "A2",
"start": 13943086,
"end": 16121691,
"type": "gneg"
},
{
"id": "A3.1",
"start": 16121692,
"end": 17428856,
"type": "gpos33"
},
{
"id": "A3.2",
"start": 17428857,
"end": 21786070,
"type": "gneg"
},
{
"id": "A3.3",
"start": 21786071,
"end": 31371942,
"type": "gpos66"
},
{
"id": "B1",
"start": 31371943,
"end": 40086370,
"type": "gneg"
},
{
"id": "B2",
"start": 40086371,
"end": 41393535,
"type": "gpos33"
},
{
"id": "B3",
"start": 41393536,
"end": 45750749,
"type": "gneg"
},
{
"id": "C",
"start": 45750750,
"end": 55772342,
"type": "gpos66"
},
{
"id": "cenp",
"start": 997005,
"end": 1994009,
"type": "acen"
},
{
"id": "cenq",
"start": 1994010,
"end": 2991013,
"type": "acen"
},
{
"id": "D",
"start": 55772343,
"end": 60129556,
"type": "gneg"
},
{
"id": "E1.1",
"start": 60129557,
"end": 67972542,
"type": "gpos100"
},
{
"id": "E1.2",
"start": 67972543,
"end": 68843984,
"type": "gneg"
},
{
"id": "E1.3",
"start": 68843985,
"end": 73201199,
"type": "gpos100"
},
{
"id": "E2",
"start": 73201200,
"end": 78429856,
"type": "gneg"
},
{
"id": "E3",
"start": 78429857,
"end": 82787070,
"type": "gpos33"
},
{
"id": "E4",
"start": 82787071,
"end": 88887170,
"type": "gneg"
},
{
"id": "E5",
"start": 88887171,
"end": 94987271,
"type": "gpos33"
},
{
"id": "tip",
"start": 1,
"end": 997004,
"type": "tip"
}
]
},
"18": {
"size": 90702639,
"bands": [
{
"id": "A1",
"start": 2997707,
"end": 19406145,
"type": "gpos100"
},
{
"id": "A2",
"start": 19406146,
"end": 29531091,
"type": "gneg"
},
{
"id": "B1",
"start": 29531092,
"end": 35437309,
"type": "gpos66"
},
{
"id": "B2",
"start": 35437310,
"end": 37124800,
"type": "gneg"
},
{
"id": "B3",
"start": 37124801,
"end": 45562255,
"type": "gpos100"
},
{
"id": "C",
"start": 45562256,
"end": 49780983,
"type": "gneg"
},
{
"id": "cenp",
"start": 999236,
"end": 1998471,
"type": "acen"
},
{
"id": "cenq",
"start": 1998472,
"end": 2997706,
"type": "acen"
},
{
"id": "D1",
"start": 49780984,
"end": 53999710,
"type": "gpos100"
},
{
"id": "D2",
"start": 53999711,
"end": 54421582,
"type": "gneg"
},
{
"id": "D3",
"start": 54421583,
"end": 60749673,
"type": "gpos100"
},
{
"id": "E1",
"start": 60749674,
"end": 67921510,
"type": "gneg"
},
{
"id": "E2",
"start": 67921511,
"end": 75093346,
"type": "gpos33"
},
{
"id": "E3",
"start": 75093347,
"end": 83530801,
"type": "gneg"
},
{
"id": "E4",
"start": 83530802,
"end": 90702639,
"type": "gpos33"
},
{
"id": "tip",
"start": 1,
"end": 999235,
"type": "tip"
}
]
},
"19": {
"size": 61431566,
"bands": [
{
"id": "A",
"start": 3004360,
"end": 16680093,
"type": "gpos100"
},
{
"id": "B",
"start": 16680094,
"end": 25630388,
"type": "gneg"
},
{
"id": "C1",
"start": 25630389,
"end": 34987514,
"type": "gpos66"
},
{
"id": "C2",
"start": 34987515,
"end": 38242166,
"type": "gneg"
},
{
"id": "C3",
"start": 38242167,
"end": 47599292,
"type": "gpos66"
},
{
"id": "cenp",
"start": 1001454,
"end": 2002906,
"type": "acen"
},
{
"id": "cenq",
"start": 2002907,
"end": 3004359,
"type": "acen"
},
{
"id": "D1",
"start": 47599293,
"end": 51667607,
"type": "gneg"
},
{
"id": "D2",
"start": 51667608,
"end": 58990576,
"type": "gpos33"
},
{
"id": "D3",
"start": 58990577,
"end": 61431566,
"type": "gneg"
},
{
"id": "tip",
"start": 1,
"end": 1001453,
"type": "tip"
}
]
},
"X": {
"size": 171031299,
"bands": [
{
"id": "A1.1",
"start": 3078866,
"end": 15772338,
"type": "gpos100"
},
{
"id": "A1.2",
"start": 15772339,
"end": 18236766,
"type": "gneg"
},
{
"id": "A1.3",
"start": 18236767,
"end": 21194079,
"type": "gpos33"
},
{
"id": "A2",
"start": 21194080,
"end": 28094478,
"type": "gneg"
},
{
"id": "A3.1",
"start": 28094479,
"end": 33516219,
"type": "gpos66"
},
{
"id": "A3.2",
"start": 33516220,
"end": 34501990,
"type": "gneg"
},
{
"id": "A3.3",
"start": 34501991,
"end": 39923731,
"type": "gpos66"
},
{
"id": "A4",
"start": 39923732,
"end": 47809901,
"type": "gneg"
},
{
"id": "A5",
"start": 47809902,
"end": 56188956,
"type": "gpos66"
},
{
"id": "A6",
"start": 56188957,
"end": 63089355,
"type": "gneg"
},
{
"id": "A7.1",
"start": 63089356,
"end": 69496866,
"type": "gpos66"
},
{
"id": "A7.2",
"start": 69496868,
"end": 70975524,
"type": "gneg"
},
{
"id": "A7.3",
"start": 70975525,
"end": 77383036,
"type": "gpos66"
},
{
"id": "B",
"start": 77383037,
"end": 82311892,
"type": "gneg"
},
{
"id": "C1",
"start": 82311893,
"end": 91183833,
"type": "gpos100"
},
{
"id": "C2",
"start": 91183834,
"end": 92169603,
"type": "gneg"
},
{
"id": "C3",
"start": 92169604,
"end": 101041544,
"type": "gpos100"
},
{
"id": "cenp",
"start": 1026289,
"end": 2052577,
"type": "acen"
},
{
"id": "cenq",
"start": 2052578,
"end": 3078865,
"type": "acen"
},
{
"id": "D",
"start": 101041545,
"end": 109913485,
"type": "gneg"
},
{
"id": "E1",
"start": 109913486,
"end": 120264082,
"type": "gpos100"
},
{
"id": "E2",
"start": 120264084,
"end": 121249853,
"type": "gneg"
},
{
"id": "E3",
"start": 121249854,
"end": 135050651,
"type": "gpos100"
},
{
"id": "F1",
"start": 135050652,
"end": 141458162,
"type": "gneg"
},
{
"id": "F2",
"start": 141458163,
"end": 148851447,
"type": "gpos33"
},
{
"id": "F3",
"start": 148851448,
"end": 156244730,
"type": "gneg"
},
{
"id": "F4",
"start": 156244731,
"end": 163638014,
"type": "gpos33"
},
{
"id": "F5",
"start": 163638015,
"end": 171031299,
"type": "gneg"
},
{
"id": "tip",
"start": 1,
"end": 1026288,
"type": "tip"
}
]
},
"Y": {
"size": 91744698,
"bands": [
{
"id": "A1",
"start": 5,
"end": 20642552,
"type": "gpos100"
},
{
"id": "A2",
"start": 20642557,
"end": 32684047,
"type": "gpos66"
},
{
"id": "B",
"start": 32684053,
"end": 45298941,
"type": "gpos33"
},
{
"id": "C1",
"start": 45298947,
"end": 54473414,
"type": "gpos100"
},
{
"id": "C2",
"start": 54473420,
"end": 61927667,
"type": "gpos33"
},
{
"id": "C3",
"start": 61927673,
"end": 72248949,
"type": "gpos100"
},
{
"id": "D",
"start": 72248955,
"end": 83143629,
"type": "gpos33"
},
{
"id": "E",
"start": 83143635,
"end": 91744698,
"type": "gpos66"
}
]
},
"MT": {
"size": 16299,
"bands": [
{
"start": 1,
"end": 16299
}
]
}
}; | RNAcentral/angularjs-genoverse | lib/Genoverse/js/genomes/mus_musculus.js | JavaScript | apache-2.0 | 53,885 |
/*
* Copyright © 2014 TU Berlin (emma@dima.tu-berlin.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.emmalanguage.mitos.operators;
import org.emmalanguage.mitos.util.TupleIntDouble;
import it.unimi.dsi.fastutil.ints.Int2DoubleMap;
//import it.unimi.dsi.fastutil.ints.Int2DoubleOpenHashMap;
import it.unimi.dsi.fastutil.ints.Int2DoubleRBTreeMap;
import java.util.function.Consumer;
public abstract class GroupBy0ReduceTupleIntDouble extends BagOperator<TupleIntDouble, TupleIntDouble> {
//protected Int2DoubleOpenHashMap hm;
protected Int2DoubleRBTreeMap hm;
@Override
public void openOutBag() {
super.openOutBag();
hm = new Int2DoubleRBTreeMap();
hm.defaultReturnValue(Double.MIN_VALUE);
}
@Override
public void pushInElement(TupleIntDouble e, int logicalInputId) {
super.pushInElement(e, logicalInputId);
double g = hm.putIfAbsent(e.f0, e.f1);
if (g != hm.defaultReturnValue()) {
reduceFunc(e, g);
}
}
protected abstract void reduceFunc(TupleIntDouble e, double g);
@Override
public void closeInBag(int inputId) {
super.closeInBag(inputId);
//hm.int2DoubleEntrySet().fastForEach(new Consumer<Int2DoubleMap.Entry>() {
hm.int2DoubleEntrySet().forEach(new Consumer<Int2DoubleMap.Entry>() {
@Override
public void accept(Int2DoubleMap.Entry e) {
out.collectElement(TupleIntDouble.of(e.getIntKey(), e.getDoubleValue()));
}
});
hm = null;
out.closeBag();
}
}
| emmalanguage/emma | emma-mitos/src/main/java/org/emmalanguage/mitos/operators/GroupBy0ReduceTupleIntDouble.java | Java | apache-2.0 | 2,114 |
package org.vertexium;
public interface ElementId extends VertexiumObjectId {
static ElementId vertex(String id) {
return new DefaultElementId(ElementType.VERTEX, id);
}
static ElementId edge(String id) {
return new DefaultElementId(ElementType.EDGE, id);
}
static ElementId create(ElementType elementType, String id) {
return new DefaultElementId(elementType, id);
}
/**
* the type of the element.
*/
ElementType getElementType();
/**
* id of the element.
*/
String getId();
}
| visallo/vertexium | core/src/main/java/org/vertexium/ElementId.java | Java | apache-2.0 | 567 |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.projectRoots.impl;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.SdkModel;
import com.intellij.openapi.projectRoots.SdkType;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Ref;
import com.intellij.util.Consumer;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.util.Arrays;
/**
* @author Dmitry Avdeev
*/
public abstract class DependentSdkType extends SdkType {
public DependentSdkType(@NonNls String name) {
super(name);
}
/**
* Checks if dependencies satisfied.
*/
protected boolean checkDependency(SdkModel sdkModel) {
return ContainerUtil.find(sdkModel.getSdks(), sdk -> isValidDependency(sdk)) != null;
}
protected abstract boolean isValidDependency(Sdk sdk);
public abstract String getUnsatisfiedDependencyMessage();
@Override
public boolean supportsCustomCreateUI() {
return true;
}
@Override
public void showCustomCreateUI(@NotNull final SdkModel sdkModel, @NotNull JComponent parentComponent, @NotNull final Consumer<Sdk> sdkCreatedCallback) {
if (!checkDependency(sdkModel)) {
if (Messages.showOkCancelDialog(parentComponent, getUnsatisfiedDependencyMessage(), "Cannot Create SDK", Messages.getWarningIcon()) != Messages.OK) {
return;
}
if (fixDependency(sdkModel, sdkCreatedCallback) == null) {
return;
}
}
createSdkOfType(sdkModel, this, sdkCreatedCallback);
}
public abstract SdkType getDependencyType();
protected Sdk fixDependency(SdkModel sdkModel, Consumer<Sdk> sdkCreatedCallback) {
return createSdkOfType(sdkModel, getDependencyType(), sdkCreatedCallback);
}
protected static Sdk createSdkOfType(final SdkModel sdkModel,
final SdkType sdkType,
final Consumer<Sdk> sdkCreatedCallback) {
final Ref<Sdk> result = new Ref<>(null);
SdkConfigurationUtil.selectSdkHome(sdkType, home -> {
String newSdkName = SdkConfigurationUtil.createUniqueSdkName(sdkType, home, Arrays.asList(sdkModel.getSdks()));
final ProjectJdkImpl newJdk = new ProjectJdkImpl(newSdkName, sdkType);
newJdk.setHomePath(home);
sdkCreatedCallback.consume(newJdk);
result.set(newJdk);
});
return result.get();
}
}
| jk1/intellij-community | platform/lang-impl/src/com/intellij/openapi/projectRoots/impl/DependentSdkType.java | Java | apache-2.0 | 3,053 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gobblin.runtime.instance;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.Service;
import com.google.common.util.concurrent.ServiceManager;
import com.typesafe.config.ConfigFactory;
import gobblin.broker.gobblin_scopes.GobblinScopeTypes;
import gobblin.broker.SimpleScope;
import gobblin.broker.SharedResourcesBrokerFactory;
import gobblin.broker.SharedResourcesBrokerImpl;
import gobblin.broker.iface.SharedResourcesBroker;
import gobblin.instrumented.Instrumented;
import gobblin.metrics.GobblinMetrics;
import gobblin.metrics.MetricContext;
import gobblin.metrics.Tag;
import gobblin.runtime.api.Configurable;
import gobblin.runtime.api.GobblinInstanceEnvironment;
import gobblin.runtime.api.GobblinInstanceLauncher;
import gobblin.runtime.api.GobblinInstancePlugin;
import gobblin.runtime.api.GobblinInstancePluginFactory;
import gobblin.runtime.api.JobCatalog;
import gobblin.runtime.api.JobExecutionLauncher;
import gobblin.runtime.api.JobSpecScheduler;
import gobblin.runtime.job_catalog.FSJobCatalog;
import gobblin.runtime.job_catalog.ImmutableFSJobCatalog;
import gobblin.runtime.job_catalog.InMemoryJobCatalog;
import gobblin.runtime.job_exec.JobLauncherExecutionDriver;
import gobblin.runtime.plugins.email.EmailNotificationPlugin;
import gobblin.runtime.scheduler.ImmediateJobSpecScheduler;
import gobblin.runtime.scheduler.QuartzJobSpecScheduler;
import gobblin.runtime.std.DefaultConfigurableImpl;
import gobblin.util.ClassAliasResolver;
import gobblin.util.ConfigUtils;
/** A simple wrapper {@link DefaultGobblinInstanceDriverImpl} that will instantiate necessary
* sub-components (e.g. {@link JobCatalog}, {@link JobSpecScheduler}, {@link JobExecutionLauncher}
* and it will manage their lifecycle. */
public class StandardGobblinInstanceDriver extends DefaultGobblinInstanceDriverImpl {
public static final String INSTANCE_CFG_PREFIX = "gobblin.instance";
/** A comma-separated list of class names or aliases of {@link GobblinInstancePluginFactory} for
* plugins to be instantiated with this instance. */
public static final String PLUGINS_KEY = "plugins";
public static final String PLUGINS_FULL_KEY = INSTANCE_CFG_PREFIX + "." + PLUGINS_KEY;
private ServiceManager _subservices;
private final List<GobblinInstancePlugin> _plugins;
protected StandardGobblinInstanceDriver(String instanceName, Configurable sysConfig,
JobCatalog jobCatalog,
JobSpecScheduler jobScheduler, JobExecutionLauncher jobLauncher,
Optional<MetricContext> instanceMetricContext,
Optional<Logger> log,
List<GobblinInstancePluginFactory> plugins,
SharedResourcesBroker<GobblinScopeTypes> instanceBroker) {
super(instanceName, sysConfig, jobCatalog, jobScheduler, jobLauncher, instanceMetricContext, log, instanceBroker);
List<Service> componentServices = new ArrayList<>();
checkComponentService(getJobCatalog(), componentServices);
checkComponentService(getJobScheduler(), componentServices);
checkComponentService(getJobLauncher(), componentServices);
_plugins = createPlugins(plugins, componentServices);
if (componentServices.size() > 0) {
_subservices = new ServiceManager(componentServices);
}
}
private List<GobblinInstancePlugin> createPlugins(List<GobblinInstancePluginFactory> plugins,
List<Service> componentServices) {
List<GobblinInstancePlugin> res = new ArrayList<>();
for (GobblinInstancePluginFactory pluginFactory: plugins) {
Optional<GobblinInstancePlugin> plugin = createPlugin(this, pluginFactory, componentServices);
if (plugin.isPresent()) {
res.add(plugin.get());
}
}
return res;
}
static Optional<GobblinInstancePlugin> createPlugin(StandardGobblinInstanceDriver instance,
GobblinInstancePluginFactory pluginFactory, List<Service> componentServices) {
instance.getLog().info("Instantiating a plugin of type: " + pluginFactory);
try {
GobblinInstancePlugin plugin = pluginFactory.createPlugin(instance);
componentServices.add(plugin);
instance.getLog().info("Instantiated plugin: " + plugin);
return Optional.of(plugin);
}
catch (RuntimeException e) {
instance.getLog().warn("Failed to create plugin: " + e, e);
}
return Optional.absent();
}
@Override
protected void startUp() throws Exception {
getLog().info("Starting driver ...");
if (null != _subservices) {
getLog().info("Starting subservices");
_subservices.startAsync();
_subservices.awaitHealthy(getInstanceCfg().getStartTimeoutMs(), TimeUnit.MILLISECONDS);
getLog().info("All subservices have been started.");
}
else {
getLog().info("No subservices found.");
}
super.startUp();
}
private void checkComponentService(Object component, List<Service> componentServices) {
if (component instanceof Service) {
componentServices.add((Service)component);
}
}
@Override protected void shutDown() throws Exception {
getLog().info("Shutting down driver ...");
super.shutDown();
if (null != _subservices) {
getLog().info("Shutting down subservices ...");
_subservices.stopAsync();
_subservices.awaitStopped(getInstanceCfg().getShutdownTimeoutMs(), TimeUnit.MILLISECONDS);
getLog().info("All subservices have been shutdown.");
}
}
public static Builder builder() {
return new Builder();
}
/**
* A builder for StandardGobblinInstanceDriver instances. The goal is to be convention driven
* rather than configuration.
*
* <p>Conventions:
* <ul>
* <li> Logger uses the instance name as a category
* <li> Default implementations of JobCatalog, JobSpecScheduler, JobExecutionLauncher use the
* logger as their logger.
* </ul>
*
*/
public static class Builder implements GobblinInstanceEnvironment {
private static final AtomicInteger INSTANCE_COUNTER = new AtomicInteger(0);
private Optional<GobblinInstanceEnvironment> _instanceEnv =
Optional.<GobblinInstanceEnvironment>absent();
private Optional<String> _instanceName = Optional.absent();
private Optional<Logger> _log = Optional.absent();
private Optional<JobCatalog> _jobCatalog = Optional.absent();
private Optional<JobSpecScheduler> _jobScheduler = Optional.absent();
private Optional<JobExecutionLauncher> _jobLauncher = Optional.absent();
private Optional<MetricContext> _metricContext = Optional.absent();
private Optional<Boolean> _instrumentationEnabled = Optional.absent();
private Optional<SharedResourcesBroker<GobblinScopeTypes>> _instanceBroker = Optional.absent();
private List<GobblinInstancePluginFactory> _plugins = new ArrayList<>();
private final ClassAliasResolver<GobblinInstancePluginFactory> _aliasResolver =
new ClassAliasResolver<>(GobblinInstancePluginFactory.class);
public Builder(Optional<GobblinInstanceEnvironment> instanceLauncher) {
_instanceEnv = instanceLauncher;
}
/** Constructor with no Gobblin instance launcher */
public Builder() {
}
/** Constructor with a launcher */
public Builder(GobblinInstanceLauncher instanceLauncher) {
this();
withInstanceEnvironment(instanceLauncher);
}
public Builder withInstanceEnvironment(GobblinInstanceEnvironment instanceLauncher) {
Preconditions.checkNotNull(instanceLauncher);
_instanceEnv = Optional.of(instanceLauncher);
return this;
}
public Optional<GobblinInstanceEnvironment> getInstanceEnvironment() {
return _instanceEnv;
}
public String getDefaultInstanceName() {
if (_instanceEnv.isPresent()) {
return _instanceEnv.get().getInstanceName();
}
else {
return StandardGobblinInstanceDriver.class.getName() + "-" +
INSTANCE_COUNTER.getAndIncrement();
}
}
@Override
public String getInstanceName() {
if (! _instanceName.isPresent()) {
_instanceName = Optional.of(getDefaultInstanceName());
}
return _instanceName.get();
}
public Builder withInstanceName(String instanceName) {
_instanceName = Optional.of(instanceName);
return this;
}
public Logger getDefaultLog() {
return _instanceEnv.isPresent() ? _instanceEnv.get().getLog() :
LoggerFactory.getLogger(getInstanceName());
}
@Override
public Logger getLog() {
if (! _log.isPresent()) {
_log = Optional.of(getDefaultLog());
}
return _log.get();
}
public Builder withLog(Logger log) {
_log = Optional.of(log);
return this;
}
public JobCatalog getDefaultJobCatalog() {
return new InMemoryJobCatalog(this);
}
public JobCatalog getJobCatalog() {
if (! _jobCatalog.isPresent()) {
_jobCatalog = Optional.of(getDefaultJobCatalog());
}
return _jobCatalog.get();
}
public Builder withJobCatalog(JobCatalog jobCatalog) {
_jobCatalog = Optional.of(jobCatalog);
return this;
}
public Builder withInMemoryJobCatalog() {
return withJobCatalog(new InMemoryJobCatalog(this));
}
public Builder withFSJobCatalog() {
try {
return withJobCatalog(new FSJobCatalog(this));
} catch (IOException e) {
throw new RuntimeException("Unable to create FS Job Catalog");
}
}
public Builder withImmutableFSJobCatalog() {
try {
return withJobCatalog(new ImmutableFSJobCatalog(this));
} catch (IOException e) {
throw new RuntimeException("Unable to create FS Job Catalog");
}
}
public JobSpecScheduler getDefaultJobScheduler() {
return new ImmediateJobSpecScheduler(Optional.of(getLog()));
}
public JobSpecScheduler getJobScheduler() {
if (!_jobScheduler.isPresent()) {
_jobScheduler = Optional.of(getDefaultJobScheduler());
}
return _jobScheduler.get();
}
public Builder withJobScheduler(JobSpecScheduler jobScheduler) {
_jobScheduler = Optional.of(jobScheduler);
return this;
}
public Builder withImmediateJobScheduler() {
return withJobScheduler(new ImmediateJobSpecScheduler(Optional.of(getLog())));
}
public Builder withQuartzJobScheduler() {
return withJobScheduler(new QuartzJobSpecScheduler(this));
}
public JobExecutionLauncher getDefaultJobLauncher() {
JobLauncherExecutionDriver.Launcher res =
new JobLauncherExecutionDriver.Launcher().withGobblinInstanceEnvironment(this);
return res;
}
public JobExecutionLauncher getJobLauncher() {
if (! _jobLauncher.isPresent()) {
_jobLauncher = Optional.of(getDefaultJobLauncher());
}
return _jobLauncher.get();
}
public Builder withJobLauncher(JobExecutionLauncher jobLauncher) {
_jobLauncher = Optional.of(jobLauncher);
return this;
}
public Builder withMetricContext(MetricContext instanceMetricContext) {
_metricContext = Optional.of(instanceMetricContext);
return this;
}
@Override
public MetricContext getMetricContext() {
if (!_metricContext.isPresent()) {
_metricContext = Optional.of(getDefaultMetricContext());
}
return _metricContext.get();
}
public MetricContext getDefaultMetricContext() {
gobblin.configuration.State fakeState =
new gobblin.configuration.State(getSysConfig().getConfigAsProperties());
List<Tag<?>> tags = new ArrayList<>();
tags.add(new Tag<>(StandardMetrics.INSTANCE_NAME_TAG, getInstanceName()));
MetricContext res = Instrumented.getMetricContext(fakeState,
StandardGobblinInstanceDriver.class, tags);
return res;
}
public Builder withInstanceBroker(SharedResourcesBroker<GobblinScopeTypes> broker) {
_instanceBroker = Optional.of(broker);
return this;
}
public SharedResourcesBroker<GobblinScopeTypes> getInstanceBroker() {
if (!_instanceBroker.isPresent()) {
_instanceBroker = Optional.of(getDefaultInstanceBroker());
}
return _instanceBroker.get();
}
public SharedResourcesBroker<GobblinScopeTypes> getDefaultInstanceBroker() {
SharedResourcesBrokerImpl<GobblinScopeTypes> globalBroker =
SharedResourcesBrokerFactory.createDefaultTopLevelBroker(getSysConfig().getConfig(),
GobblinScopeTypes.GLOBAL.defaultScopeInstance());
return globalBroker.newSubscopedBuilder(new SimpleScope<>(GobblinScopeTypes.INSTANCE, getInstanceName())).build();
}
public StandardGobblinInstanceDriver build() {
Configurable sysConfig = getSysConfig();
return new StandardGobblinInstanceDriver(getInstanceName(), sysConfig, getJobCatalog(),
getJobScheduler(),
getJobLauncher(),
isInstrumentationEnabled() ? Optional.of(getMetricContext()) :
Optional.<MetricContext>absent(),
Optional.of(getLog()),
getPlugins(),
getInstanceBroker()
);
}
@Override public Configurable getSysConfig() {
return _instanceEnv.isPresent() ? _instanceEnv.get().getSysConfig() :
DefaultConfigurableImpl.createFromConfig(ConfigFactory.load());
}
public Builder withInstrumentationEnabled(boolean enabled) {
_instrumentationEnabled = Optional.of(enabled);
return this;
}
public boolean getDefaultInstrumentationEnabled() {
return GobblinMetrics.isEnabled(getSysConfig().getConfig());
}
@Override
public boolean isInstrumentationEnabled() {
if (!_instrumentationEnabled.isPresent()) {
_instrumentationEnabled = Optional.of(getDefaultInstrumentationEnabled());
}
return _instrumentationEnabled.get();
}
@Override public List<Tag<?>> generateTags(gobblin.configuration.State state) {
return Collections.emptyList();
}
@Override public void switchMetricContext(List<Tag<?>> tags) {
throw new UnsupportedOperationException();
}
@Override public void switchMetricContext(MetricContext context) {
throw new UnsupportedOperationException();
}
/**
* Returns the list of plugins as defined in the system configuration. These are the
* defined in the PLUGINS_FULL_KEY config option.
* The list also includes plugins that are automatically added by gobblin.
* */
public List<GobblinInstancePluginFactory> getDefaultPlugins() {
List<String> pluginNames =
ConfigUtils.getStringList(getSysConfig().getConfig(), PLUGINS_FULL_KEY);
List<GobblinInstancePluginFactory> pluginFactories = Lists.newArrayList();
// By default email notification plugin is added.
if (!ConfigUtils.getBoolean(getSysConfig().getConfig(), EmailNotificationPlugin.EMAIL_NOTIFICATIONS_DISABLED_KEY,
EmailNotificationPlugin.EMAIL_NOTIFICATIONS_DISABLED_DEFAULT)) {
pluginFactories.add(new EmailNotificationPlugin.Factory());
}
pluginFactories.addAll(Lists.transform(pluginNames, new Function<String, GobblinInstancePluginFactory>() {
@Override public GobblinInstancePluginFactory apply(String input) {
Class<? extends GobblinInstancePluginFactory> factoryClass;
try {
factoryClass = _aliasResolver.resolveClass(input);
return factoryClass.newInstance();
} catch (ClassNotFoundException|InstantiationException|IllegalAccessException e) {
throw new RuntimeException("Unable to instantiate plugin factory " + input + ": " + e, e);
}
}
}));
return pluginFactories;
}
public List<GobblinInstancePluginFactory> getPlugins() {
List<GobblinInstancePluginFactory> res = new ArrayList<>(getDefaultPlugins());
res.addAll(_plugins);
return res;
}
public Builder addPlugin(GobblinInstancePluginFactory pluginFactory) {
_plugins.add(pluginFactory);
return this;
}
}
public List<GobblinInstancePlugin> getPlugins() {
return _plugins;
}
}
| chavdar/gobblin-1 | gobblin-runtime/src/main/java/gobblin/runtime/instance/StandardGobblinInstanceDriver.java | Java | apache-2.0 | 17,339 |
/**
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.rest.restmodel.causeway;
import com.fasterxml.jackson.annotation.JsonInclude;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
/**
* Author: Michal Szynkiewicz, michal.l.szynkiewicz@gmail.com
* Date: 8/25/16
* Time: 2:48 PM
*
*/
@Deprecated
@Data
@JsonInclude(JsonInclude.Include.NON_NULL)
@NoArgsConstructor
@AllArgsConstructor
public class BuildImportResultRest {
/**
* id of pnc build record
*/
private Integer buildRecordId;
/**
* build id assigned by brew
*/
private Integer brewBuildId;
/**
* link to brew
*/
private String brewBuildUrl;
private BuildImportStatus status;
/**
* global errors
*/
private String errorMessage;
/**
* list of errors for artifact imports
*/
private List<ArtifactImportError> errors;
}
| ruhan1/pnc | rest-model/src/main/java/org/jboss/pnc/rest/restmodel/causeway/BuildImportResultRest.java | Java | apache-2.0 | 1,612 |
#include <iostream>
#include "GameObject.h"
using namespace Physics;
void GameObject::init(string name, string particle, string entity)
{
m_name = name;
if (mp_PhysicsManager->hasParticle(particle))
m_particleName = particle;
if (mp_GraphicsManager->hasEntity(entity))
m_entityName = entity;
m_position = mp_PhysicsManager->getParticlePosition(m_particleName);
}
void GameObject::update()
{
// get data from physics engine
m_position = mp_PhysicsManager->getParticlePosition(m_particleName);
//std::cout << "GAMEOBJECT:: update(): Attempting to update particle " << m_particleName << " with physics postion: " << newPos.ToString() << std::endl;
// give data to graphics engine
mp_GraphicsManager->updateEntityPosition(m_entityName, m_position.GLM());
//std::cout << "GAMEOBJECT:: update(): " << m_particleName << "'s position after update: " << newPos.ToString() << std::endl;
} | MorrigansWings/GamePhysics | RubeGoldberg/RubeGoldberg/RubeGoldberg/GameObject.cpp | C++ | apache-2.0 | 901 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.network.shuffle;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import com.google.common.io.Files;
import org.apache.spark.network.shuffle.protocol.ExecutorShuffleInfo;
/**
* Manages some sort- and hash-based shuffle data, including the creation
* and cleanup of directories that can be read by the {@link ExternalShuffleBlockManager}.
*/
public class TestShuffleDataContext {
public final String[] localDirs;
public final int subDirsPerLocalDir;
public TestShuffleDataContext(int numLocalDirs, int subDirsPerLocalDir) {
this.localDirs = new String[numLocalDirs];
this.subDirsPerLocalDir = subDirsPerLocalDir;
}
public void create() {
for (int i = 0; i < localDirs.length; i ++) {
localDirs[i] = Files.createTempDir().getAbsolutePath();
for (int p = 0; p < subDirsPerLocalDir; p ++) {
new File(localDirs[i], String.format("%02x", p)).mkdirs();
}
}
}
public void cleanup() {
for (String localDir : localDirs) {
deleteRecursively(new File(localDir));
}
}
/** Creates reducer blocks in a sort-based data format within our local dirs. */
public void insertSortShuffleData(int shuffleId, int mapId, byte[][] blocks) throws IOException {
String blockId = "shuffle_" + shuffleId + "_" + mapId + "_0";
OutputStream dataStream = new FileOutputStream(
ExternalShuffleBlockManager.getFile(localDirs, subDirsPerLocalDir, blockId + ".data"));
DataOutputStream indexStream = new DataOutputStream(new FileOutputStream(
ExternalShuffleBlockManager.getFile(localDirs, subDirsPerLocalDir, blockId + ".index")));
long offset = 0;
indexStream.writeLong(offset);
for (byte[] block : blocks) {
offset += block.length;
dataStream.write(block);
indexStream.writeLong(offset);
}
dataStream.close();
indexStream.close();
}
/** Creates reducer blocks in a hash-based data format within our local dirs. */
public void insertHashShuffleData(int shuffleId, int mapId, byte[][] blocks) throws IOException {
for (int i = 0; i < blocks.length; i ++) {
String blockId = "shuffle_" + shuffleId + "_" + mapId + "_" + i;
Files.write(blocks[i],
ExternalShuffleBlockManager.getFile(localDirs, subDirsPerLocalDir, blockId));
}
}
/**
* Creates an ExecutorShuffleInfo object based on the given shuffle manager which targets this
* context's directories.
*/
public ExecutorShuffleInfo createExecutorInfo(String shuffleManager) {
return new ExecutorShuffleInfo(localDirs, subDirsPerLocalDir, shuffleManager);
}
private static void deleteRecursively(File f) {
assert f != null;
if (f.isDirectory()) {
File[] children = f.listFiles();
if (children != null) {
for (File child : children) {
deleteRecursively(child);
}
}
}
f.delete();
}
}
| hengyicai/OnlineAggregationUCAS | network/shuffle/src/test/java/org/apache/spark/network/shuffle/TestShuffleDataContext.java | Java | apache-2.0 | 3,794 |
/*
* WSearchCsvEnum.java
* Created on 2013/06/28
*
* Copyright (C) 2011-2013 Nippon Telegraph and Telephone Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tubame.wsearch.models;
/**
* An enumeration that defines the item in the search results file.<br/>
*/
public enum WSearchCsvEnum {
/**
* Result
*/
TOKEN_INDEX_STATUS(0),
/**
* Category
*/
TOKEN_INDEX_CATEGORY(1),
/**
* Package
*/
TOKEN_INDEX_PACKAGE(2),
/**
* Porting the original library
*/
TOKEN_INDEX_SRC_LIBRARY(3),
/**
* Porting library
*/
TOKEN_INDEX_DEST_LIBRARY(4),
/**
* Class
*/
TOKEN_INDEX_CLAZZ(5),
/**
* Porting the original file
*/
TOKEN_INDEX_FILES(6),
/**
* Search corresponding line
*/
TOKEN_INDEX_HIT_LINE(7),
/**
* Result detail
*/
TOKEN_INDEX_DETAIL(8),
/**
* Remarks
*/
TOKEN_INDEX_NOTE(9);
/**
* CSV column Index value
*/
private int index;
/**
* The maximum number of CSV delimiter
*/
public static final int CSV_COLUMN_NUM = 8;
/**
* Constructor.<br/>
* Do not do anything.<br/>
*
* @param index
* CSV column Index value
*/
private WSearchCsvEnum(int index) {
this.index = index;
}
/**
* Get the index.<br/>
*
* @return index
*/
public int getIndex() {
return index;
}
/**
* Set the index.<br/>
*
* @param index
* index
*/
public void setIndex(int index) {
this.index = index;
}
} | TUBAME/migration-tool | src/tubame.wsearch/src/tubame/wsearch/models/WSearchCsvEnum.java | Java | apache-2.0 | 2,181 |
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Author: Endre Karlson <endre.karlson@hp.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
import socket
import base64
import time
from threading import Lock
import six
import dns
import dns.exception
import dns.zone
import eventlet
from dns import rdatatype
from oslo_log import log as logging
from oslo_config import cfg
from designate import context
from designate import exceptions
from designate import objects
from designate.i18n import _LE
from designate.i18n import _LI
LOG = logging.getLogger(__name__)
util_opts = [
cfg.IntOpt('xfr_timeout', help="Timeout in seconds for XFR's.", default=10)
]
class DNSMiddleware(object):
"""Base DNS Middleware class with some utility methods"""
def __init__(self, application):
self.application = application
def process_request(self, request):
"""Called on each request.
If this returns None, the next application down the stack will be
executed. If it returns a response then that response will be returned
and execution will stop here.
"""
return None
def process_response(self, response):
"""Do whatever you'd like to the response."""
return response
def __call__(self, request):
response = self.process_request(request)
if response:
return response
response = self.application(request)
return self.process_response(response)
def _build_error_response(self):
response = dns.message.make_response(
dns.message.make_query('unknown', dns.rdatatype.A))
response.set_rcode(dns.rcode.FORMERR)
return response
class SerializationMiddleware(DNSMiddleware):
"""DNS Middleware to serialize/deserialize DNS Packets"""
def __init__(self, application, tsig_keyring=None):
self.application = application
self.tsig_keyring = tsig_keyring
def __call__(self, request):
# Generate the initial context. This may be updated by other middleware
# as we learn more information about the Request.
ctxt = context.DesignateContext.get_admin_context(all_tenants=True)
try:
message = dns.message.from_wire(request['payload'],
self.tsig_keyring)
if message.had_tsig:
LOG.debug('Request signed with TSIG key: %s', message.keyname)
# Create + Attach the initial "environ" dict. This is similar to
# the environ dict used in typical WSGI middleware.
message.environ = {
'context': ctxt,
'addr': request['addr'],
}
except dns.message.UnknownTSIGKey:
LOG.error(_LE("Unknown TSIG key from %(host)s:"
"%(port)d") % {'host': request['addr'][0],
'port': request['addr'][1]})
response = self._build_error_response()
except dns.tsig.BadSignature:
LOG.error(_LE("Invalid TSIG signature from %(host)s:"
"%(port)d") % {'host': request['addr'][0],
'port': request['addr'][1]})
response = self._build_error_response()
except dns.exception.DNSException:
LOG.error(_LE("Failed to deserialize packet from %(host)s:"
"%(port)d") % {'host': request['addr'][0],
'port': request['addr'][1]})
response = self._build_error_response()
except Exception:
LOG.exception(_LE("Unknown exception deserializing packet "
"from %(host)s %(port)d") %
{'host': request['addr'][0],
'port': request['addr'][1]})
response = self._build_error_response()
else:
# Hand the Deserialized packet onto the Application
for response in self.application(message):
# Serialize and return the response if present
if isinstance(response, dns.message.Message):
yield response.to_wire(max_size=65535)
elif isinstance(response, dns.renderer.Renderer):
yield response.get_wire()
class TsigInfoMiddleware(DNSMiddleware):
"""Middleware which looks up the information available for a TsigKey"""
def __init__(self, application, storage):
super(TsigInfoMiddleware, self).__init__(application)
self.storage = storage
def process_request(self, request):
if not request.had_tsig:
return None
try:
criterion = {'name': request.keyname.to_text(True)}
tsigkey = self.storage.find_tsigkey(
context.get_current(), criterion)
request.environ['tsigkey'] = tsigkey
request.environ['context'].tsigkey_id = tsigkey.id
except exceptions.TsigKeyNotFound:
# This should never happen, as we just validated the key.. Except
# for race conditions..
return self._build_error_response()
return None
class TsigKeyring(object):
"""Implements the DNSPython KeyRing API, backed by the Designate DB"""
def __init__(self, storage):
self.storage = storage
def __getitem__(self, key):
return self.get(key)
def get(self, key, default=None):
try:
criterion = {'name': key.to_text(True)}
tsigkey = self.storage.find_tsigkey(
context.get_current(), criterion)
return base64.decodestring(tsigkey.secret)
except exceptions.TsigKeyNotFound:
return default
class ZoneLock(object):
"""A Lock across all zones that enforces a rate limit on NOTIFYs"""
def __init__(self, delay):
self.lock = Lock()
self.data = {}
self.delay = delay
def acquire(self, zone):
with self.lock:
# If no one holds the lock for the zone, grant it
if zone not in self.data:
self.data[zone] = time.time()
return True
# Otherwise, get the time that it was locked
locktime = self.data[zone]
now = time.time()
period = now - locktime
# If it has been locked for longer than the allowed period
# give the lock to the new requester
if period > self.delay:
self.data[zone] = now
return True
LOG.debug('Lock for %(zone)s can\'t be releaesed for %(period)s'
'seconds' % {'zone': zone,
'period': str(self.delay - period)})
# Don't grant the lock for the zone
return False
def release(self, zone):
# Release the lock
with self.lock:
try:
self.data.pop(zone)
except KeyError:
pass
class LimitNotifyMiddleware(DNSMiddleware):
"""Middleware that rate limits NOTIFYs to the Agent"""
def __init__(self, application):
super(LimitNotifyMiddleware, self).__init__(application)
self.delay = cfg.CONF['service:agent'].notify_delay
self.locker = ZoneLock(self.delay)
def process_request(self, request):
opcode = request.opcode()
if opcode != dns.opcode.NOTIFY:
return None
zone_name = request.question[0].name.to_text()
if self.locker.acquire(zone_name):
time.sleep(self.delay)
self.locker.release(zone_name)
return None
else:
LOG.debug('Threw away NOTIFY for %(zone)s, already '
'working on an update.' % {'zone': zone_name})
response = dns.message.make_response(request)
# Provide an authoritative answer
response.flags |= dns.flags.AA
return (response,)
def from_dnspython_zone(dnspython_zone):
# dnspython never builds a zone with more than one SOA, even if we give
# it a zonefile that contains more than one
soa = dnspython_zone.get_rdataset(dnspython_zone.origin, 'SOA')
if soa is None:
raise exceptions.BadRequest('An SOA record is required')
email = soa[0].rname.to_text().rstrip('.')
email = email.replace('.', '@', 1)
values = {
'name': dnspython_zone.origin.to_text(),
'email': email,
'ttl': soa.ttl,
'serial': soa[0].serial,
'retry': soa[0].retry,
'expire': soa[0].expire
}
zone = objects.Domain(**values)
rrsets = dnspyrecords_to_recordsetlist(dnspython_zone.nodes)
zone.recordsets = rrsets
return zone
def dnspyrecords_to_recordsetlist(dnspython_records):
rrsets = objects.RecordList()
for rname in six.iterkeys(dnspython_records):
for rdataset in dnspython_records[rname]:
rrset = dnspythonrecord_to_recordset(rname, rdataset)
if rrset is None:
continue
rrsets.append(rrset)
return rrsets
def dnspythonrecord_to_recordset(rname, rdataset):
record_type = rdatatype.to_text(rdataset.rdtype)
# Create the other recordsets
values = {
'name': rname.to_text(),
'type': record_type
}
if rdataset.ttl != 0:
values['ttl'] = rdataset.ttl
rrset = objects.RecordSet(**values)
rrset.records = objects.RecordList()
for rdata in rdataset:
rr = objects.Record(data=rdata.to_text())
rrset.records.append(rr)
return rrset
def bind_tcp(host, port, tcp_backlog):
# Bind to the TCP port
LOG.info(_LI('Opening TCP Listening Socket on %(host)s:%(port)d') %
{'host': host, 'port': port})
sock_tcp = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock_tcp.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock_tcp.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
# NOTE: Linux supports socket.SO_REUSEPORT only in 3.9 and later releases.
try:
sock_tcp.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except Exception:
pass
sock_tcp.setblocking(True)
sock_tcp.bind((host, port))
sock_tcp.listen(tcp_backlog)
return sock_tcp
def bind_udp(host, port):
# Bind to the UDP port
LOG.info(_LI('Opening UDP Listening Socket on %(host)s:%(port)d') %
{'host': host, 'port': port})
sock_udp = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock_udp.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# NOTE: Linux supports socket.SO_REUSEPORT only in 3.9 and later releases.
try:
sock_udp.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except Exception:
pass
sock_udp.setblocking(True)
sock_udp.bind((host, port))
return sock_udp
def do_axfr(zone_name, servers, timeout=None, source=None):
"""
Performs an AXFR for a given zone name
"""
random.shuffle(servers)
timeout = timeout or cfg.CONF["service:mdns"].xfr_timeout
xfr = None
for srv in servers:
to = eventlet.Timeout(timeout)
log_info = {'name': zone_name, 'host': srv}
try:
LOG.info(_LI("Doing AXFR for %(name)s from %(host)s") % log_info)
xfr = dns.query.xfr(srv['host'], zone_name, relativize=False,
timeout=1, port=srv['port'], source=source)
raw_zone = dns.zone.from_xfr(xfr, relativize=False)
break
except eventlet.Timeout as t:
if t == to:
msg = _LE("AXFR timed out for %(name)s from %(host)s")
LOG.error(msg % log_info)
continue
except dns.exception.FormError:
msg = _LE("Domain %(name)s is not present on %(host)s."
"Trying next server.")
LOG.error(msg % log_info)
except socket.error:
msg = _LE("Connection error when doing AXFR for %(name)s from "
"%(host)s")
LOG.error(msg % log_info)
except Exception:
msg = _LE("Problem doing AXFR %(name)s from %(host)s. "
"Trying next server.")
LOG.exception(msg % log_info)
finally:
to.cancel()
continue
else:
msg = _LE("XFR failed for %(name)s. No servers in %(servers)s was "
"reached.")
raise exceptions.XFRFailure(
msg % {"name": zone_name, "servers": servers})
LOG.debug("AXFR Successful for %s" % raw_zone.origin.to_text())
return raw_zone
| tonyli71/designate | designate/dnsutils.py | Python | apache-2.0 | 13,265 |
/*
* Copyright 2010-2013 Ning, Inc.
*
* Ning licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.payment.invoice;
import java.util.UUID;
import org.killbill.billing.ObjectType;
import org.killbill.billing.account.api.Account;
import org.killbill.billing.account.api.AccountApiException;
import org.killbill.billing.account.api.AccountInternalApi;
import org.killbill.billing.callcontext.InternalCallContext;
import org.killbill.billing.events.ControlTagDeletionInternalEvent;
import org.killbill.billing.osgi.api.OSGIServiceRegistration;
import org.killbill.billing.payment.core.PaymentProcessor;
import org.killbill.billing.routing.plugin.api.PaymentRoutingPluginApi;
import org.killbill.billing.util.callcontext.CallOrigin;
import org.killbill.billing.util.callcontext.InternalCallContextFactory;
import org.killbill.billing.util.callcontext.UserType;
import org.killbill.billing.util.tag.ControlTagType;
import org.killbill.clock.Clock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.eventbus.Subscribe;
import com.google.inject.Inject;
public class PaymentTagHandler {
private static final Logger log = LoggerFactory.getLogger(PaymentTagHandler.class);
private final Clock clock;
private final AccountInternalApi accountApi;
private final PaymentProcessor paymentProcessor;
private final InternalCallContextFactory internalCallContextFactory;
private final OSGIServiceRegistration<PaymentRoutingPluginApi> paymentControlPluginRegistry;
private final PaymentRoutingPluginApi invoicePaymentControlPlugin;
@Inject
public PaymentTagHandler(final Clock clock,
final AccountInternalApi accountApi,
final PaymentProcessor paymentProcessor,
final OSGIServiceRegistration<PaymentRoutingPluginApi> paymentControlPluginRegistry,
final InternalCallContextFactory internalCallContextFactory) {
this.clock = clock;
this.accountApi = accountApi;
this.paymentProcessor = paymentProcessor;
this.paymentControlPluginRegistry = paymentControlPluginRegistry;
this.invoicePaymentControlPlugin = paymentControlPluginRegistry.getServiceForName(InvoicePaymentRoutingPluginApi.PLUGIN_NAME);
this.internalCallContextFactory = internalCallContextFactory;
}
@Subscribe
public void process_AUTO_PAY_OFF_removal(final ControlTagDeletionInternalEvent event) {
if (event.getTagDefinition().getName().equals(ControlTagType.AUTO_PAY_OFF.toString()) && event.getObjectType() == ObjectType.ACCOUNT) {
final UUID accountId = event.getObjectId();
processUnpaid_AUTO_PAY_OFF_payments(accountId, event.getSearchKey1(), event.getSearchKey2(), event.getUserToken());
}
}
private void processUnpaid_AUTO_PAY_OFF_payments(final UUID accountId, final Long accountRecordId, final Long tenantRecordId, final UUID userToken) {
try {
final InternalCallContext internalCallContext = internalCallContextFactory.createInternalCallContext(tenantRecordId, accountRecordId,
"PaymentRequestProcessor", CallOrigin.INTERNAL, UserType.SYSTEM, userToken);
final Account account = accountApi.getAccountById(accountId, internalCallContext);
((InvoicePaymentRoutingPluginApi) invoicePaymentControlPlugin).process_AUTO_PAY_OFF_removal(account, internalCallContext);
} catch (AccountApiException e) {
log.warn(String.format("Failed to process process removal AUTO_PAY_OFF for account %s", accountId), e);
}
}
}
| kares/killbill | payment/src/main/java/org/killbill/billing/payment/invoice/PaymentTagHandler.java | Java | apache-2.0 | 4,302 |
package org.jboss.resteasy.examples.oauth;
import javax.ws.rs.core.Application;
import java.util.HashSet;
import java.util.Set;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision$
*/
public class OAuthApplication extends Application
{
HashSet<Object> singletons = new HashSet<Object>();
public OAuthApplication()
{
singletons.add(new ConsumerResource());
singletons.add(new ServiceProviderResource());
}
@Override
public Set<Class<?>> getClasses()
{
HashSet<Class<?>> set = new HashSet<Class<?>>();
return set;
}
@Override
public Set<Object> getSingletons()
{
return singletons;
}
}
| raphaelning/resteasy-client-android | jaxrs/examples/oauth1-examples/oauth/src/main/java/org/jboss/resteasy/examples/oauth/OAuthApplication.java | Java | apache-2.0 | 697 |
#region License
/*
* Copyright (c) Lightstreamer Srl
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#endregion License
using Lightstreamer.DotNet.Client;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using Windows.ApplicationModel;
using Windows.ApplicationModel.Activation;
using Windows.Foundation;
using Windows.Foundation.Collections;
using Windows.System.Threading;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Controls.Primitives;
using Windows.UI.Xaml.Data;
using Windows.UI.Xaml.Input;
using Windows.UI.Xaml.Media;
using Windows.UI.Xaml.Navigation;
namespace WinRTStockListDemo
{
sealed partial class App : Application
{
private const string pushServerHost = "http://push.lightstreamer.com"; //internal note: switching to SSL requires changing the cryptography declaration on the windows store
//private const string pushServerHost = "http://localhost:8080";
public static string[] items = {"item1", "item2", "item3", "item4", "item5",
"item6", "item7", "item8", "item9", "item10", "item11", "item12", "item13",
"item14", "item15"};
public static string[] fields = { "stock_name", "last_price", "time", "pct_change", "bid_quantity", "bid", "ask", "ask_quantity", "min", "max", "ref_price", "open_price" };
private static Object ConnLock = new Object();
private static int phase = 0;
private static int lastDelay = 1;
private static ILightstreamerListener listener = null;
public static LightstreamerClient client = new LightstreamerClient(items, fields);
public App()
{
InitializeComponent();
this.Suspending += OnResuming;
}
protected override void OnLaunched(LaunchActivatedEventArgs args)
{
Frame rootFrame = Window.Current.Content as Frame;
// Do not repeat app initialization when the Window already has content,
// just ensure that the window is active
if (rootFrame == null)
{
// Create a Frame to act as the navigation context and navigate to the first page
rootFrame = new Frame();
// Place the frame in the current Window
Window.Current.Content = rootFrame;
}
if (rootFrame.Content == null)
{
// This application has only one page, we always navigate to it
if (!rootFrame.Navigate(typeof(MainPage), args.Arguments))
{
throw new Exception("Failed to create initial page");
}
}
// Ensure the current window is active
Window.Current.Activate();
}
private void OnResuming(object sender, SuspendingEventArgs e) {
}
private void OnSuspending(object sender, SuspendingEventArgs e)
{
var deferral = e.SuspendingOperation.GetDeferral();
//we only need to store one single boolean about the status of the application, We do so in the Start and Stop method each time such methods are called
deferral.Complete();
}
// HANDLE CONNECTION
public static Boolean checkPhase(int ph)
{
lock (ConnLock)
{
return ph == phase;
}
}
private async static void PauseAndRetry(int ph, Exception ee)
{
Boolean waitingNet = false;
lastDelay *= 2;
// Probably a connection issue, ask myself to respawn
for (int i = lastDelay; i > 0; i--)
{
if (!checkPhase(ph))
{
return;
}
if (!System.Net.NetworkInformation.NetworkInterface.GetIsNetworkAvailable())
{
waitingNet = true;
listener.OnStatusChange(ph, LightstreamerConnectionHandler.CONNECTING, "Network unavailble, next check in " + i + " seconds");
}
else if (waitingNet)
{
listener.OnReconnectRequest(ph);
return;
}
else
{
listener.OnStatusChange(ph, LightstreamerConnectionHandler.CONNECTING, "Connection failed, retrying in " + i + " seconds");
}
await Task.Delay(1000);
}
listener.OnReconnectRequest(ph);
}
internal static void SetListener(ILightstreamerListener _listener)
{
listener = _listener;
}
private async static void Start(int ph)
{
if (!checkPhase(ph))
{
return;
}
Windows.Storage.ApplicationDataContainer settings = Windows.Storage.ApplicationData.Current.LocalSettings;
settings.Values["started"] = "true";
while (listener == null)
{
//or we may use a different listener that will pass the received values to
//the front-end once the front-end is ready
await Task.Delay(500);
if (!checkPhase(ph))
{
return;
}
}
if (!System.Net.NetworkInformation.NetworkInterface.GetIsNetworkAvailable())
{
PauseAndRetry(ph, null);
return;
}
try
{
if (!checkPhase(ph))
{
return;
}
listener.OnStatusChange(ph, LightstreamerConnectionHandler.CONNECTING, "Connecting to " + pushServerHost);
client.Start(pushServerHost, phase, listener);
lastDelay = 1;
if (!checkPhase(ph))
{
return;
}
client.Subscribe(ph, listener);
}
catch (PushConnException pce)
{
PauseAndRetry(ph, pce);
}
catch (PushUserException pce)
{
PauseAndRetry(ph, pce);
}
catch (SubscrException se)
{
PauseAndRetry(ph, se);
}
}
private static void Stop(int ph)
{
if (!checkPhase(ph))
{
return;
}
Windows.Storage.ApplicationDataContainer settings = Windows.Storage.ApplicationData.Current.LocalSettings;
settings.Values["started"] = "false";
client.Stop();
if (listener != null)
{
listener.OnStatusChange(ph, LightstreamerConnectionHandler.DISCONNECTED, "Disconnected");
}
}
async public static void SpawnLightstreamerClientStart()
{
int tup;
lock (ConnLock)
{
tup = ++phase;
}
ThreadPool.RunAsync((IAsyncAction operation) =>
{
Start(tup);
});
}
async public static void SpawnLightstreamerClientStop()
{
int tup;
lock (ConnLock)
{
tup = ++phase;
}
ThreadPool.RunAsync((IAsyncAction operation) =>
{
Stop(tup);
});
}
public static void StartStop(Boolean wantsConnection, Boolean startup)
{
lastDelay = 1;
// This event triggers LightStreamer Client start/stop.
if (!wantsConnection)
{
// stop
App.SpawnLightstreamerClientStop();
}
else
{
// start
App.SpawnLightstreamerClientStart();
}
}
}
}
| Lightstreamer/Lightstreamer-example-StockList-client-winrt | App.xaml.cs | C# | apache-2.0 | 8,659 |
/*
* Copyright (c) 2013-2019 Metin Kale
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.metinkale.prayer.times.gson;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.google.gson.JsonDeserializationContext;
import com.google.gson.JsonDeserializer;
import com.google.gson.JsonElement;
import com.google.gson.JsonParseException;
import com.google.gson.JsonPrimitive;
import com.google.gson.JsonSerializationContext;
import com.google.gson.JsonSerializer;
import java.lang.reflect.Type;
public class BooleanSerializer implements JsonSerializer<Boolean>, JsonDeserializer<Boolean> {
@Nullable
@Override
public JsonElement serialize(Boolean arg0, Type arg1, JsonSerializationContext arg2) {
return arg0 ? new JsonPrimitive(1) : new JsonPrimitive(0);
}
@NonNull
@Override
public Boolean deserialize(@NonNull JsonElement arg0, Type arg1, JsonDeserializationContext arg2) throws JsonParseException {
return arg0.getAsInt() == 1;
}
} | metinkale38/prayer-times-android | features/times/src/main/java/com/metinkale/prayer/times/gson/BooleanSerializer.java | Java | apache-2.0 | 1,540 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.shell.commands;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.apache.ambari.groovy.client.AmbariClient;
import org.apache.ambari.shell.completion.ConfigType;
import org.apache.ambari.shell.model.AmbariContext;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class ConfigCommandsTest {
private static final String CORE_SITE = "core-site";
@InjectMocks
private ConfigCommands configCommands;
@Mock
private AmbariClient client;
@Mock
private AmbariContext context;
@Test
public void testShowConfig() {
ConfigType configType = mock(ConfigType.class);
Map<String, Map<String, String>> mockResult = mock(Map.class);
when(configType.getName()).thenReturn(CORE_SITE);
when(client.getServiceConfigMap(anyString())).thenReturn(mockResult);
when(mockResult.get(CORE_SITE)).thenReturn(new HashMap<String, String>());
configCommands.showConfig(configType);
verify(client).getServiceConfigMap(CORE_SITE);
}
@Test
public void testSetConfigForFile() throws IOException {
ConfigType configType = mock(ConfigType.class);
File file = new File("src/test/resources/core-site.xml");
when(configType.getName()).thenReturn(CORE_SITE);
configCommands.setConfig(configType, "", file);
Map<String, String> config = new HashMap<String, String>();
config.put("fs.trash.interval", "350");
config.put("ipc.client.connection.maxidletime", "30000");
verify(client).modifyConfiguration(CORE_SITE, config);
}
@Test
public void testModifyConfig() throws IOException {
ConfigType configType = mock(ConfigType.class);
Map<String, Map<String, String>> mockResult = mock(Map.class);
Map<String, String> config = new HashMap<String, String>();
config.put("fs.trash.interval", "350");
config.put("ipc.client.connection.maxidletime", "30000");
when(configType.getName()).thenReturn(CORE_SITE);
when(mockResult.get(CORE_SITE)).thenReturn(config);
when(client.getServiceConfigMap(CORE_SITE)).thenReturn(mockResult);
configCommands.modifyConfig(configType, "fs.trash.interval", "510");
Map<String, String> config2 = new HashMap<String, String>();
config2.put("fs.trash.interval", "510");
config2.put("ipc.client.connection.maxidletime", "30000");
verify(client).modifyConfiguration(CORE_SITE, config2);
}
}
| radicalbit/ambari | ambari-shell/ambari-groovy-shell/src/test/java/org/apache/ambari/shell/commands/ConfigCommandsTest.java | Java | apache-2.0 | 3,539 |
/**
* This file is part of Simple Scrobbler.
* <p>
* https://github.com/simple-last-fm-scrobbler/sls
* <p>
* Copyright 2011 Simple Scrobbler Team
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.adam.aslfms.util.enums;
import android.content.Context;
import android.util.Log;
import com.adam.aslfms.R;
import java.util.HashMap;
import java.util.Map;
public enum AdvancedOptions {
// the values below for SAME will be ignored
SAME_AS_BATTERY(
"ao_same_as_battery", true, true, true, AdvancedOptionsWhen.AFTER_1, true, NetworkOptions.ANY, true,
R.string.advanced_options_type_same_as_battery_name),
STANDARD(
"ao_standard", true, true, true, AdvancedOptionsWhen.AFTER_1, true, NetworkOptions.ANY, true,
R.string.advanced_options_type_standard_name),
// not available for plugged in
BATTERY_SAVING(
"ao_battery", true, true, false, AdvancedOptionsWhen.AFTER_10, true, NetworkOptions.ANY, false,
R.string.advanced_options_type_battery_name),
// the values below for CUSTOM will be ignored
CUSTOM(
"ao_custom", true, true, true, AdvancedOptionsWhen.AFTER_1, true, NetworkOptions.ANY, true,
R.string.advanced_options_type_custom_name);
private final String settingsVal;
private final boolean enableActiveApp;
private final boolean enableScrobbling;
private final boolean enableNp;
private final AdvancedOptionsWhen when;
private final boolean alsoOnComplete;
private final NetworkOptions networkOptions;
private final boolean roaming;
private final int nameRID;
AdvancedOptions(String settingsVal, boolean enableActiveApp, boolean enableScrobbling, boolean enableNp, AdvancedOptionsWhen when,
boolean alsoOnComplete, NetworkOptions networkOptions, boolean roaming, int nameRID) {
this.settingsVal = settingsVal;
this.enableActiveApp = enableActiveApp;
this.enableScrobbling = enableScrobbling;
this.enableNp = enableNp;
this.when = when;
this.alsoOnComplete = alsoOnComplete;
this.networkOptions = networkOptions;
this.roaming = roaming;
this.nameRID = nameRID;
}
// these methods are intentionally package-private, they are only used
// by AppSettings
public String getSettingsVal() {
return settingsVal;
}
public boolean isActiveAppEnabled() {
return enableActiveApp;
}
public boolean isScrobblingEnabled() {
return enableScrobbling;
}
public boolean isNpEnabled() {
return enableNp;
}
public AdvancedOptionsWhen getWhen() {
return when;
}
public boolean getAlsoOnComplete() {
return alsoOnComplete;
}
public NetworkOptions getNetworkOptions() {
return networkOptions;
}
public boolean getRoaming() {
return roaming;
}
public String getName(Context ctx) {
return ctx.getString(nameRID);
}
private static final String TAG = "SLSAdvancedOptions";
private static Map<String, AdvancedOptions> mSAOMap;
static {
AdvancedOptions[] aos = AdvancedOptions.values();
mSAOMap = new HashMap<String, AdvancedOptions>(aos.length);
for (AdvancedOptions ao : aos)
mSAOMap.put(ao.getSettingsVal(), ao);
}
public static AdvancedOptions fromSettingsVal(String s) {
AdvancedOptions ao = mSAOMap.get(s);
if (ao == null) {
Log.e(TAG, "got null advanced option from settings, defaulting to standard");
ao = AdvancedOptions.STANDARD;
}
return ao;
}
} | tgwizard/sls | app/src/main/java/com/adam/aslfms/util/enums/AdvancedOptions.java | Java | apache-2.0 | 4,334 |
/**
* Copyright 2015 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as dom from './dom';
import {AmpEvents} from './amp-events';
import {CommonSignals} from './common-signals';
import {ElementStub} from './element-stub';
import {
Layout,
applyStaticLayout,
isInternalElement,
isLoadingAllowed,
} from './layout';
import {LayoutDelayMeter} from './layout-delay-meter';
import {ResourceState} from './service/resource';
import {Services} from './services';
import {Signals} from './utils/signals';
import {blockedByConsentError, isBlockedByConsent, reportError} from './error';
import {createLoaderElement} from '../src/loader.js';
import {dev, devAssert, rethrowAsync, user, userAssert} from './log';
import {getIntersectionChangeEntry} from '../src/utils/intersection-observer-polyfill';
import {getMode} from './mode';
import {htmlFor} from './static-template';
import {parseSizeList} from './size-list';
import {setStyle} from './style';
import {shouldBlockOnConsentByMeta} from '../src/consent';
import {startupChunk} from './chunk';
import {toWin} from './types';
import {tryResolve} from '../src/utils/promise';
const TAG = 'CustomElement';
/**
* @enum {number}
*/
const UpgradeState = {
NOT_UPGRADED: 1,
UPGRADED: 2,
UPGRADE_FAILED: 3,
UPGRADE_IN_PROGRESS: 4,
};
/**
* Caches whether the template tag is supported to avoid memory allocations.
* @type {boolean|undefined}
*/
let templateTagSupported;
/**
* Whether this platform supports template tags.
* @return {boolean}
*/
function isTemplateTagSupported() {
if (templateTagSupported === undefined) {
const template = self.document.createElement('template');
templateTagSupported = 'content' in template;
}
return templateTagSupported;
}
/**
* Creates a named custom element class.
*
* @param {!Window} win The window in which to register the custom element.
* @return {typeof AmpElement} The custom element class.
*/
export function createCustomElementClass(win) {
const BaseCustomElement = /** @type {typeof HTMLElement} */ (createBaseCustomElementClass(
win
));
// It's necessary to create a subclass, because the same "base" class cannot
// be registered to multiple custom elements.
class CustomAmpElement extends BaseCustomElement {}
return /** @type {typeof AmpElement} */ (CustomAmpElement);
}
/**
* Creates a base custom element class.
*
* @param {!Window} win The window in which to register the custom element.
* @return {typeof HTMLElement}
*/
function createBaseCustomElementClass(win) {
if (win.__AMP_BASE_CE_CLASS) {
return win.__AMP_BASE_CE_CLASS;
}
const htmlElement = /** @type {typeof HTMLElement} */ (win.HTMLElement);
/**
* @abstract @extends {HTMLElement}
*/
class BaseCustomElement extends htmlElement {
/** */
constructor() {
super();
this.createdCallback();
}
/**
* Called when elements is created. Sets instance vars since there is no
* constructor.
* @final
*/
createdCallback() {
// Flag "notbuilt" is removed by Resource manager when the resource is
// considered to be built. See "setBuilt" method.
/** @private {boolean} */
this.built_ = false;
/**
* Several APIs require the element to be connected to the DOM tree, but
* the CustomElement lifecycle APIs are async. This lead to subtle bugs
* that require state tracking. See #12849, https://crbug.com/821195, and
* https://bugs.webkit.org/show_bug.cgi?id=180940.
* @private {boolean}
*/
this.isConnected_ = false;
/** @private {?Promise} */
this.buildingPromise_ = null;
/** @type {string} */
this.readyState = 'loading';
/** @type {boolean} */
this.everAttached = false;
/**
* Ampdoc can only be looked up when an element is attached.
* @private {?./service/ampdoc-impl.AmpDoc}
*/
this.ampdoc_ = null;
/**
* Resources can only be looked up when an element is attached.
* @private {?./service/resources-interface.ResourcesInterface}
*/
this.resources_ = null;
/** @private {!Layout} */
this.layout_ = Layout.NODISPLAY;
/** @private {number} */
this.layoutWidth_ = -1;
/** @private {number} */
this.layoutHeight_ = -1;
/** @private {number} */
this.layoutCount_ = 0;
/** @private {boolean} */
this.isFirstLayoutCompleted_ = false;
/** @private {boolean} */
this.isInViewport_ = false;
/** @private {boolean} */
this.paused_ = false;
/** @private {string|null|undefined} */
this.mediaQuery_ = undefined;
/** @private {!./size-list.SizeList|null|undefined} */
this.sizeList_ = undefined;
/** @private {!./size-list.SizeList|null|undefined} */
this.heightsList_ = undefined;
/** @public {boolean} */
this.warnOnMissingOverflow = true;
/**
* This element can be assigned by the {@link applyStaticLayout} to a
* child element that will be used to size this element.
* @package {?Element|undefined}
*/
this.sizerElement = undefined;
/** @private {boolean|undefined} */
this.loadingDisabled_ = undefined;
/** @private {boolean|undefined} */
this.loadingState_ = undefined;
/** @private {?Element} */
this.loadingContainer_ = null;
/** @private {?Element} */
this.loadingElement_ = null;
/** @private {?Element|undefined} */
this.overflowElement_ = undefined;
/**
* The time at which this element was scheduled for layout relative to
* the epoch. This value will be set to 0 until the this element has been
* scheduled.
* Note that this value may change over time if the element is enqueued,
* then dequeued and re-enqueued by the scheduler.
* @type {number|undefined}
*/
this.layoutScheduleTime = undefined;
// Closure compiler appears to mark HTMLElement as @struct which
// disables bracket access. Force this with a type coercion.
const nonStructThis = /** @type {!Object} */ (this);
// `opt_implementationClass` is only used for tests.
let Ctor =
win.__AMP_EXTENDED_ELEMENTS &&
win.__AMP_EXTENDED_ELEMENTS[this.localName];
if (getMode().test && nonStructThis['implementationClassForTesting']) {
Ctor = nonStructThis['implementationClassForTesting'];
}
devAssert(Ctor);
/** @private {!./base-element.BaseElement} */
this.implementation_ = new Ctor(this);
/**
* An element always starts in a unupgraded state until it's added to DOM
* for the first time in which case it can be upgraded immediately or wait
* for script download or `upgradeCallback`.
* @private {!UpgradeState}
*/
this.upgradeState_ = UpgradeState.NOT_UPGRADED;
/**
* Time delay imposed by baseElement upgradeCallback. If no
* upgradeCallback specified or not yet executed, delay is 0.
* @private {number}
*/
this.upgradeDelayMs_ = 0;
/**
* Action queue is initially created and kept around until the element
* is ready to send actions directly to the implementation.
* - undefined initially
* - array if used
* - null after unspun
* @private {?Array<!./service/action-impl.ActionInvocation>|undefined}
*/
this.actionQueue_ = undefined;
/**
* Whether the element is in the template.
* @private {boolean|undefined}
*/
this.isInTemplate_ = undefined;
/** @private @const */
this.signals_ = new Signals();
const perf = Services.performanceForOrNull(win);
/** @private {boolean} */
this.perfOn_ = perf && perf.isPerformanceTrackingOn();
/** @private {?./layout-delay-meter.LayoutDelayMeter} */
this.layoutDelayMeter_ = null;
if (nonStructThis[dom.UPGRADE_TO_CUSTOMELEMENT_RESOLVER]) {
nonStructThis[dom.UPGRADE_TO_CUSTOMELEMENT_RESOLVER](nonStructThis);
delete nonStructThis[dom.UPGRADE_TO_CUSTOMELEMENT_RESOLVER];
delete nonStructThis[dom.UPGRADE_TO_CUSTOMELEMENT_PROMISE];
}
}
/** @return {!Signals} */
signals() {
return this.signals_;
}
/**
* Returns the associated ampdoc. Only available after attachment. It throws
* exception before the element is attached.
* @return {!./service/ampdoc-impl.AmpDoc}
* @final
* @package
*/
getAmpDoc() {
devAssert(this.ampdoc_, 'no ampdoc yet, since element is not attached');
return /** @type {!./service/ampdoc-impl.AmpDoc} */ (this.ampdoc_);
}
/**
* Returns Resources manager. Only available after attachment. It throws
* exception before the element is attached.
* @return {!./service/resources-interface.ResourcesInterface}
* @final
* @package
*/
getResources() {
devAssert(
this.resources_,
'no resources yet, since element is not attached'
);
return /** @type {!./service/resources-interface.ResourcesInterface} */ (this
.resources_);
}
/**
* Whether the element has been upgraded yet. Always returns false when
* the element has not yet been added to DOM. After the element has been
* added to DOM, the value depends on the `BaseElement` implementation and
* its `upgradeElement` callback.
* @return {boolean}
* @final
*/
isUpgraded() {
return this.upgradeState_ == UpgradeState.UPGRADED;
}
/** @return {!Promise} */
whenUpgraded() {
return this.signals_.whenSignal(CommonSignals.UPGRADED);
}
/**
* Upgrades the element to the provided new implementation. If element
* has already been attached, it's layout validation and attachment flows
* are repeated for the new implementation.
* @param {typeof ./base-element.BaseElement} newImplClass
* @final @package
*/
upgrade(newImplClass) {
if (this.isInTemplate_) {
return;
}
if (this.upgradeState_ != UpgradeState.NOT_UPGRADED) {
// Already upgraded or in progress or failed.
return;
}
this.implementation_ = new newImplClass(this);
if (this.everAttached) {
// Usually, we do an implementation upgrade when the element is
// attached to the DOM. But, if it hadn't yet upgraded from
// ElementStub, we couldn't. Now that it's upgraded from a stub, go
// ahead and do the full upgrade.
this.tryUpgrade_();
}
}
/**
* Time delay imposed by baseElement upgradeCallback. If no
* upgradeCallback specified or not yet executed, delay is 0.
* @return {number}
*/
getUpgradeDelayMs() {
return this.upgradeDelayMs_;
}
/**
* Completes the upgrade of the element with the provided implementation.
* @param {!./base-element.BaseElement} newImpl
* @param {number} upgradeStartTime
* @final @private
*/
completeUpgrade_(newImpl, upgradeStartTime) {
this.upgradeDelayMs_ = win.Date.now() - upgradeStartTime;
this.upgradeState_ = UpgradeState.UPGRADED;
this.implementation_ = newImpl;
this.classList.remove('amp-unresolved');
this.classList.remove('i-amphtml-unresolved');
this.implementation_.createdCallback();
this.assertLayout_();
// TODO(wg-runtime): Don't set BaseElement ivars externally.
this.implementation_.layout_ = this.layout_;
this.implementation_.firstAttachedCallback();
this.dispatchCustomEventForTesting(AmpEvents.ATTACHED);
this.getResources().upgraded(this);
this.signals_.signal(CommonSignals.UPGRADED);
}
/** @private */
assertLayout_() {
if (
this.layout_ != Layout.NODISPLAY &&
!this.implementation_.isLayoutSupported(this.layout_)
) {
userAssert(
this.getAttribute('layout'),
'The element did not specify a layout attribute. ' +
'Check https://amp.dev/documentation/guides-and-tutorials/' +
'develop/style_and_layout/control_layout and the respective ' +
'element documentation for details.'
);
userAssert(false, `Layout not supported: ${this.layout_}`);
}
}
/**
* Whether the element has been built. A built element had its
* {@link buildCallback} method successfully invoked.
* @return {boolean}
* @final
*/
isBuilt() {
return this.built_;
}
/**
* Returns the promise that's resolved when the element has been built. If
* the build fails, the resulting promise is rejected.
* @return {!Promise}
*/
whenBuilt() {
return this.signals_.whenSignal(CommonSignals.BUILT);
}
/**
* Get the priority to load the element.
* @return {number}
*/
getLayoutPriority() {
devAssert(this.isUpgraded(), 'Cannot get priority of unupgraded element');
return this.implementation_.getLayoutPriority();
}
/**
* TODO(wg-runtime, #25824): Make Resource.getLayoutBox() the source of truth.
* @return {number}
* @deprecated
*/
getLayoutWidth() {
return this.layoutWidth_;
}
/**
* Get the default action alias.
* @return {?string}
*/
getDefaultActionAlias() {
devAssert(
this.isUpgraded(),
'Cannot get default action alias of unupgraded element'
);
return this.implementation_.getDefaultActionAlias();
}
/** @return {boolean} */
isBuilding() {
return !!this.buildingPromise_;
}
/**
* Requests or requires the element to be built. The build is done by
* invoking {@link BaseElement.buildCallback} method.
*
* Can only be called on a upgraded element. May only be called from
* resource.js to ensure an element and its resource are in sync.
*
* @return {?Promise}
* @final
*/
build() {
assertNotTemplate(this);
devAssert(this.isUpgraded(), 'Cannot build unupgraded element');
if (this.buildingPromise_) {
return this.buildingPromise_;
}
return (this.buildingPromise_ = new Promise((resolve, reject) => {
const policyId = this.getConsentPolicy_();
if (!policyId) {
resolve(this.implementation_.buildCallback());
} else {
Services.consentPolicyServiceForDocOrNull(this)
.then((policy) => {
if (!policy) {
return true;
}
return policy.whenPolicyUnblock(/** @type {string} */ (policyId));
})
.then((shouldUnblock) => {
if (shouldUnblock) {
resolve(this.implementation_.buildCallback());
} else {
reject(blockedByConsentError());
}
});
}
}).then(
() => {
this.preconnect(/* onLayout */ false);
this.built_ = true;
this.classList.remove('i-amphtml-notbuilt');
this.classList.remove('amp-notbuilt');
this.signals_.signal(CommonSignals.BUILT);
if (this.isInViewport_) {
this.updateInViewport_(true);
}
if (this.actionQueue_) {
// Only schedule when the queue is not empty, which should be
// the case 99% of the time.
Services.timerFor(toWin(this.ownerDocument.defaultView)).delay(
this.dequeueActions_.bind(this),
1
);
}
if (!this.getPlaceholder()) {
const placeholder = this.createPlaceholder();
if (placeholder) {
this.appendChild(placeholder);
}
}
},
(reason) => {
this.signals_.rejectSignal(
CommonSignals.BUILT,
/** @type {!Error} */ (reason)
);
if (!isBlockedByConsent(reason)) {
reportError(reason, this);
}
throw reason;
}
));
}
/**
* Called to instruct the element to preconnect to hosts it uses during
* layout.
* @param {boolean} onLayout Whether this was called after a layout.
*/
preconnect(onLayout) {
if (onLayout) {
this.implementation_.preconnectCallback(onLayout);
} else {
// If we do early preconnects we delay them a bit. This is kind of
// an unfortunate trade off, but it seems faster, because the DOM
// operations themselves are not free and might delay
startupChunk(this.getAmpDoc(), () => {
const TAG = this.tagName;
if (!this.ownerDocument) {
dev().error(TAG, 'preconnect without ownerDocument');
return;
} else if (!this.ownerDocument.defaultView) {
dev().error(TAG, 'preconnect without defaultView');
return;
}
this.implementation_.preconnectCallback(onLayout);
});
}
}
/**
* Whether the custom element declares that it has to be fixed.
* @return {boolean}
*/
isAlwaysFixed() {
return this.implementation_.isAlwaysFixed();
}
/**
* Updates the layout box of the element.
* Should only be called by Resources.
* @param {!./layout-rect.LayoutRectDef} layoutBox
* @param {boolean} sizeChanged
*/
updateLayoutBox(layoutBox, sizeChanged = false) {
this.layoutWidth_ = layoutBox.width;
this.layoutHeight_ = layoutBox.height;
if (this.isBuilt()) {
this.onMeasure(sizeChanged);
}
}
/**
* Calls onLayoutMeasure() (and onMeasureChanged() if size changed)
* on the BaseElement implementation.
* Should only be called by Resources.
* @param {boolean} sizeChanged
*/
onMeasure(sizeChanged = false) {
devAssert(this.isBuilt());
try {
this.implementation_.onLayoutMeasure();
if (sizeChanged) {
this.implementation_.onMeasureChanged();
}
} catch (e) {
reportError(e, this);
}
}
/**
* @return {?Element}
* @private
*/
getSizer_() {
if (
this.sizerElement === undefined &&
(this.layout_ === Layout.RESPONSIVE ||
this.layout_ === Layout.INTRINSIC)
) {
// Expect sizer to exist, just not yet discovered.
this.sizerElement = this.querySelector('i-amphtml-sizer');
}
return this.sizerElement || null;
}
/**
* @param {Element} sizer
* @private
*/
resetSizer_(sizer) {
if (this.layout_ === Layout.RESPONSIVE) {
setStyle(sizer, 'paddingTop', '0');
return;
}
if (this.layout_ === Layout.INTRINSIC) {
const intrinsicSizerImg = sizer.querySelector(
'.i-amphtml-intrinsic-sizer'
);
if (!intrinsicSizerImg) {
return;
}
intrinsicSizerImg.setAttribute('src', '');
return;
}
}
/**
* If the element has a media attribute, evaluates the value as a media
* query and based on the result adds or removes the class
* `i-amphtml-hidden-by-media-query`. The class adds display:none to the
* element which in turn prevents any of the resource loading to happen for
* the element.
*
* This method is called by Resources and shouldn't be called by anyone
* else.
*
* @final
* @package
*/
applySizesAndMediaQuery() {
assertNotTemplate(this);
// Media query.
if (this.mediaQuery_ === undefined) {
this.mediaQuery_ = this.getAttribute('media') || null;
}
if (this.mediaQuery_) {
const {defaultView} = this.ownerDocument;
this.classList.toggle(
'i-amphtml-hidden-by-media-query',
!defaultView.matchMedia(this.mediaQuery_).matches
);
}
// Sizes.
if (this.sizeList_ === undefined) {
const sizesAttr = this.getAttribute('sizes');
const isDisabled = this.hasAttribute('disable-inline-width');
this.sizeList_ =
!isDisabled && sizesAttr ? parseSizeList(sizesAttr) : null;
}
if (this.sizeList_) {
setStyle(
this,
'width',
this.sizeList_.select(toWin(this.ownerDocument.defaultView))
);
}
// Heights.
if (
this.heightsList_ === undefined &&
this.layout_ === Layout.RESPONSIVE
) {
const heightsAttr = this.getAttribute('heights');
this.heightsList_ = heightsAttr
? parseSizeList(heightsAttr, /* allowPercent */ true)
: null;
}
if (this.heightsList_) {
const sizer = this.getSizer_();
if (sizer) {
setStyle(
sizer,
'paddingTop',
this.heightsList_.select(toWin(this.ownerDocument.defaultView))
);
}
}
}
/**
* Applies a size change to the element.
*
* This method is called by Resources and shouldn't be called by anyone
* else. This method must always be called in the mutation context.
*
* @param {number|undefined} newHeight
* @param {number|undefined} newWidth
* @param {!./layout-rect.LayoutMarginsDef=} opt_newMargins
* @final
* @package
*/
applySize(newHeight, newWidth, opt_newMargins) {
const sizer = this.getSizer_();
if (sizer) {
// From the moment height is changed the element becomes fully
// responsible for managing its height. Aspect ratio is no longer
// preserved.
this.sizerElement = null;
this.resetSizer_(sizer);
this.mutateOrInvoke_(() => {
if (sizer) {
dom.removeElement(sizer);
}
});
}
if (newHeight !== undefined) {
setStyle(this, 'height', newHeight, 'px');
}
if (newWidth !== undefined) {
setStyle(this, 'width', newWidth, 'px');
}
if (opt_newMargins) {
if (opt_newMargins.top != null) {
setStyle(this, 'marginTop', opt_newMargins.top, 'px');
}
if (opt_newMargins.right != null) {
setStyle(this, 'marginRight', opt_newMargins.right, 'px');
}
if (opt_newMargins.bottom != null) {
setStyle(this, 'marginBottom', opt_newMargins.bottom, 'px');
}
if (opt_newMargins.left != null) {
setStyle(this, 'marginLeft', opt_newMargins.left, 'px');
}
}
if (this.isAwaitingSize_()) {
this.sizeProvided_();
}
this.dispatchCustomEvent(AmpEvents.SIZE_CHANGED);
}
/**
* Called when the element is first connected to the DOM. Calls
* {@link firstAttachedCallback} if this is the first attachment.
*
* This callback is guarded by checks to see if the element is still
* connected. Chrome and Safari can trigger connectedCallback even when
* the node is disconnected. See #12849, https://crbug.com/821195, and
* https://bugs.webkit.org/show_bug.cgi?id=180940. Thankfully,
* connectedCallback will later be called when the disconnected root is
* connected to the document tree.
*
* @final
*/
connectedCallback() {
if (!isTemplateTagSupported() && this.isInTemplate_ === undefined) {
this.isInTemplate_ = !!dom.closestAncestorElementBySelector(
this,
'template'
);
}
if (this.isInTemplate_) {
return;
}
if (this.isConnected_ || !dom.isConnectedNode(this)) {
return;
}
this.isConnected_ = true;
if (!this.everAttached) {
this.classList.add('i-amphtml-element');
this.classList.add('i-amphtml-notbuilt');
this.classList.add('amp-notbuilt');
}
if (!this.ampdoc_) {
// Ampdoc can now be initialized.
const win = toWin(this.ownerDocument.defaultView);
const ampdocService = Services.ampdocServiceFor(win);
const ampdoc = ampdocService.getAmpDoc(this);
this.ampdoc_ = ampdoc;
// Load the pre-stubbed extension if needed.
const extensionId = this.tagName.toLowerCase();
if (
isStub(this.implementation_) &&
!ampdoc.declaresExtension(extensionId)
) {
Services.extensionsFor(win).installExtensionForDoc(
ampdoc,
extensionId
);
}
}
if (!this.resources_) {
// Resources can now be initialized since the ampdoc is now available.
this.resources_ = Services.resourcesForDoc(this.ampdoc_);
}
this.getResources().add(this);
if (this.everAttached) {
const reconstruct = this.reconstructWhenReparented();
if (reconstruct) {
this.reset_();
}
if (this.isUpgraded()) {
if (reconstruct) {
this.getResources().upgraded(this);
}
this.dispatchCustomEventForTesting(AmpEvents.ATTACHED);
}
} else {
this.everAttached = true;
try {
this.layout_ = applyStaticLayout(
this,
Services.platformFor(toWin(this.ownerDocument.defaultView)).isIe()
);
} catch (e) {
reportError(e, this);
}
if (!isStub(this.implementation_)) {
this.tryUpgrade_();
}
if (!this.isUpgraded()) {
this.classList.add('amp-unresolved');
this.classList.add('i-amphtml-unresolved');
// amp:attached is dispatched from the ElementStub class when it
// replayed the firstAttachedCallback call.
this.dispatchCustomEventForTesting(AmpEvents.STUBBED);
}
// Classically, sizes/media queries are applied just before
// Resource.measure. With IntersectionObserver, observe() is the
// equivalent which happens above in Resources.add(). Applying here
// also avoids unnecessary reinvocation during reparenting.
if (this.getResources().isIntersectionExperimentOn()) {
this.applySizesAndMediaQuery();
}
}
}
/**
* @return {boolean}
* @private
*/
isAwaitingSize_() {
return this.classList.contains('i-amphtml-layout-awaiting-size');
}
/**
* @private
*/
sizeProvided_() {
this.classList.remove('i-amphtml-layout-awaiting-size');
}
/** The Custom Elements V0 sibling to `connectedCallback`. */
attachedCallback() {
this.connectedCallback();
}
/**
* Try to upgrade the element with the provided implementation.
* @private @final
*/
tryUpgrade_() {
const impl = this.implementation_;
devAssert(!isStub(impl), 'Implementation must not be a stub');
if (this.upgradeState_ != UpgradeState.NOT_UPGRADED) {
// Already upgraded or in progress or failed.
return;
}
// The `upgradeCallback` only allows redirect once for the top-level
// non-stub class. We may allow nested upgrades later, but they will
// certainly be bad for performance.
this.upgradeState_ = UpgradeState.UPGRADE_IN_PROGRESS;
const startTime = win.Date.now();
const res = impl.upgradeCallback();
if (!res) {
// Nothing returned: the current object is the upgraded version.
this.completeUpgrade_(impl, startTime);
} else if (typeof res.then == 'function') {
// It's a promise: wait until it's done.
res
.then((upgrade) => {
this.completeUpgrade_(upgrade || impl, startTime);
})
.catch((reason) => {
this.upgradeState_ = UpgradeState.UPGRADE_FAILED;
rethrowAsync(reason);
});
} else {
// It's an actual instance: upgrade immediately.
this.completeUpgrade_(
/** @type {!./base-element.BaseElement} */ (res),
startTime
);
}
}
/**
* Called when the element is disconnected from the DOM.
*
* @final
*/
disconnectedCallback() {
this.disconnect(/* pretendDisconnected */ false);
}
/** The Custom Elements V0 sibling to `disconnectedCallback`. */
detachedCallback() {
this.disconnectedCallback();
}
/**
* Called when an element is disconnected from DOM, or when an ampDoc is
* being disconnected (the element itself may still be connected to ampDoc).
*
* This callback is guarded by checks to see if the element is still
* connected. See #12849, https://crbug.com/821195, and
* https://bugs.webkit.org/show_bug.cgi?id=180940.
* If the element is still connected to the document, you'll need to pass
* opt_pretendDisconnected.
*
* @param {boolean} pretendDisconnected Forces disconnection regardless
* of DOM isConnected.
*/
disconnect(pretendDisconnected) {
if (this.isInTemplate_ || !this.isConnected_) {
return;
}
if (!pretendDisconnected && dom.isConnectedNode(this)) {
return;
}
// This path only comes from Resource#disconnect, which deletes the
// Resource instance tied to this element. Therefore, it is no longer
// an AMP Element. But, DOM queries for i-amphtml-element assume that
// the element is tied to a Resource.
if (pretendDisconnected) {
this.classList.remove('i-amphtml-element');
}
this.isConnected_ = false;
this.getResources().remove(this);
this.implementation_.detachedCallback();
}
/**
* Dispatches a custom event.
*
* @param {string} name
* @param {!Object=} opt_data Event data.
* @final
*/
dispatchCustomEvent(name, opt_data) {
const data = opt_data || {};
// Constructors of events need to come from the correct window. Sigh.
const event = this.ownerDocument.createEvent('Event');
event.data = data;
event.initEvent(name, /* bubbles */ true, /* cancelable */ true);
this.dispatchEvent(event);
}
/**
* Dispatches a custom event only in testing environment.
*
* @param {string} name
* @param {!Object=} opt_data Event data.
* @final
*/
dispatchCustomEventForTesting(name, opt_data) {
if (!getMode().test) {
return;
}
this.dispatchCustomEvent(name, opt_data);
}
/**
* Whether the element can pre-render.
* @return {boolean}
* @final
*/
prerenderAllowed() {
return this.implementation_.prerenderAllowed();
}
/**
* Whether the element has render-blocking service.
* @return {boolean}
* @final
*/
isBuildRenderBlocking() {
return this.implementation_.isBuildRenderBlocking();
}
/**
* Creates a placeholder for the element.
* @return {?Element}
* @final
*/
createPlaceholder() {
return this.implementation_.createPlaceholderCallback();
}
/**
* Creates a loader logo.
* @return {{
* content: (!Element|undefined),
* color: (string|undefined),
* }}
* @final
*/
createLoaderLogo() {
return this.implementation_.createLoaderLogoCallback();
}
/**
* Whether the element should ever render when it is not in viewport.
* @return {boolean|number}
* @final
*/
renderOutsideViewport() {
return this.implementation_.renderOutsideViewport();
}
/**
* Whether the element should render outside of renderOutsideViewport when
* the scheduler is idle.
* @return {boolean|number}
* @final
*/
idleRenderOutsideViewport() {
return this.implementation_.idleRenderOutsideViewport();
}
/**
* Returns a previously measured layout box adjusted to the viewport. This
* mainly affects fixed-position elements that are adjusted to be always
* relative to the document position in the viewport.
* @return {!./layout-rect.LayoutRectDef}
* @final
*/
getLayoutBox() {
return this.getResource_().getLayoutBox();
}
/**
* Returns a previously measured layout box relative to the page. The
* fixed-position elements are relative to the top of the document.
* @return {!./layout-rect.LayoutRectDef}
* @final
*/
getPageLayoutBox() {
return this.getResource_().getPageLayoutBox();
}
/**
* @return {?Element}
* @final
*/
getOwner() {
return this.getResource_().getOwner();
}
/**
* Returns a change entry for that should be compatible with
* IntersectionObserverEntry.
* @return {!IntersectionObserverEntry} A change entry.
* @final
*/
getIntersectionChangeEntry() {
const box = this.implementation_.getIntersectionElementLayoutBox();
const owner = this.getOwner();
const viewportBox = this.implementation_.getViewport().getRect();
// TODO(jridgewell, #4826): We may need to make this recursive.
const ownerBox = owner && owner.getLayoutBox();
return getIntersectionChangeEntry(box, ownerBox, viewportBox);
}
/**
* Returns the resource of the element.
* @return {!./service/resource.Resource}
* @private
*/
getResource_() {
return this.getResources().getResourceForElement(this);
}
/**
* Returns the resource ID of the element.
* @return {number}
*/
getResourceId() {
return this.getResource_().getId();
}
/**
* The runtime calls this method to determine if {@link layoutCallback}
* should be called again when layout changes.
* @return {boolean}
* @package @final
*/
isRelayoutNeeded() {
return this.implementation_.isRelayoutNeeded();
}
/**
* Returns reference to upgraded implementation.
* @param {boolean} waitForBuild If true, waits for element to be built before
* resolving the returned Promise. Default is true.
* @return {!Promise<!./base-element.BaseElement>}
*/
getImpl(waitForBuild = true) {
const waitFor = waitForBuild ? this.whenBuilt() : this.whenUpgraded();
return waitFor.then(() => this.implementation_);
}
/**
* Returns the layout of the element.
* @return {!Layout}
*/
getLayout() {
return this.layout_;
}
/**
* Instructs the element to layout its content and load its resources if
* necessary by calling the {@link BaseElement.layoutCallback} method that
* should be implemented by BaseElement subclasses. Must return a promise
* that will yield when the layout and associated loadings are complete.
*
* This method is always called for the first layout, but for subsequent
* layouts the runtime consults {@link isRelayoutNeeded} method.
*
* Can only be called on a upgraded and built element.
*
* @return {!Promise}
* @package @final
*/
layoutCallback() {
assertNotTemplate(this);
devAssert(this.isBuilt(), 'Must be built to receive viewport events');
this.dispatchCustomEventForTesting(AmpEvents.LOAD_START);
const isLoadEvent = this.layoutCount_ == 0; // First layout is "load".
this.signals_.reset(CommonSignals.UNLOAD);
if (isLoadEvent) {
this.signals_.signal(CommonSignals.LOAD_START);
}
if (this.perfOn_) {
this.getLayoutDelayMeter_().startLayout();
}
const promise = tryResolve(() => this.implementation_.layoutCallback());
this.preconnect(/* onLayout */ true);
this.classList.add('i-amphtml-layout');
return promise.then(
() => {
if (isLoadEvent) {
this.signals_.signal(CommonSignals.LOAD_END);
}
this.readyState = 'complete';
this.layoutCount_++;
this.toggleLoading(false, {cleanup: true});
// Check if this is the first success layout that needs
// to call firstLayoutCompleted.
if (!this.isFirstLayoutCompleted_) {
this.implementation_.firstLayoutCompleted();
this.isFirstLayoutCompleted_ = true;
this.dispatchCustomEventForTesting(AmpEvents.LOAD_END);
}
},
(reason) => {
// add layoutCount_ by 1 despite load fails or not
if (isLoadEvent) {
this.signals_.rejectSignal(
CommonSignals.LOAD_END,
/** @type {!Error} */ (reason)
);
}
this.layoutCount_++;
this.toggleLoading(false, {cleanup: true});
throw reason;
}
);
}
/**
* Whether the resource is currently visible in the viewport.
* @return {boolean}
* @final @package
*/
isInViewport() {
return this.isInViewport_;
}
/**
* Instructs the resource that it entered or exited the visible viewport.
*
* Can only be called on a upgraded and built element.
*
* @param {boolean} inViewport Whether the element has entered or exited
* the visible viewport.
* @final @package
*/
viewportCallback(inViewport) {
assertNotTemplate(this);
if (inViewport == this.isInViewport_) {
return;
}
// TODO(dvoytenko, #9177): investigate/cleanup viewport signals for
// elements in dead iframes.
if (!this.ownerDocument || !this.ownerDocument.defaultView) {
return;
}
this.isInViewport_ = inViewport;
if (this.layoutCount_ == 0) {
if (!inViewport) {
this.toggleLoading(false);
} else {
// Set a minimum delay in case the element loads very fast or if it
// leaves the viewport.
const loadingStartTime = win.Date.now();
Services.timerFor(toWin(this.ownerDocument.defaultView)).delay(() => {
// TODO(dvoytenko, #9177): cleanup `this.ownerDocument.defaultView`
// once investigation is complete. It appears that we get a lot of
// errors here once the iframe is destroyed due to timer.
if (
this.isInViewport_ &&
this.ownerDocument &&
this.ownerDocument.defaultView &&
this.layoutCount_ === 0 // Ensures that layoutCallback hasn't completed in this 100ms window.
) {
this.toggleLoading(true, {startTime: loadingStartTime});
}
}, 100);
}
}
if (this.isBuilt()) {
this.updateInViewport_(inViewport);
}
}
/**
* @param {boolean} inViewport
* @private
*/
updateInViewport_(inViewport) {
this.implementation_.inViewport_ = inViewport;
this.implementation_.viewportCallback(inViewport);
if (inViewport && this.perfOn_) {
this.getLayoutDelayMeter_().enterViewport();
}
}
/**
* Whether the resource is currently paused.
* @return {boolean}
* @final @package
*/
isPaused() {
return this.paused_;
}
/**
* Requests the resource to stop its activity when the document goes into
* inactive state. The scope is up to the actual component. Among other
* things the active playback of video or audio content must be stopped.
*
* @package @final
*/
pauseCallback() {
assertNotTemplate(this);
if (this.paused_) {
return;
}
this.paused_ = true;
this.viewportCallback(false);
if (this.isBuilt()) {
this.implementation_.pauseCallback();
}
}
/**
* Requests the resource to resume its activity when the document returns
* from an inactive state. The scope is up to the actual component. Among
* other things the active playback of video or audio content may be
* resumed.
*
* @package @final
*/
resumeCallback() {
assertNotTemplate(this);
if (!this.paused_) {
return;
}
this.paused_ = false;
if (this.isBuilt()) {
this.implementation_.resumeCallback();
}
}
/**
* Requests the element to unload any expensive resources when the element
* goes into non-visible state. The scope is up to the actual component.
*
* Calling this method on unbuilt or unupgraded element has no effect.
*
* @return {boolean}
* @package @final
*/
unlayoutCallback() {
assertNotTemplate(this);
if (!this.isBuilt()) {
return false;
}
this.signals_.signal(CommonSignals.UNLOAD);
const isReLayoutNeeded = this.implementation_.unlayoutCallback();
if (isReLayoutNeeded) {
this.reset_();
}
this.dispatchCustomEventForTesting(AmpEvents.UNLOAD);
return isReLayoutNeeded;
}
/** @private */
reset_() {
this.layoutCount_ = 0;
this.isFirstLayoutCompleted_ = false;
this.signals_.reset(CommonSignals.RENDER_START);
this.signals_.reset(CommonSignals.LOAD_START);
this.signals_.reset(CommonSignals.LOAD_END);
this.signals_.reset(CommonSignals.INI_LOAD);
}
/**
* Whether to call {@link unlayoutCallback} when pausing the element.
* Certain elements cannot properly pause (like amp-iframes with unknown
* video content), and so we must unlayout to stop playback.
*
* @return {boolean}
* @package @final
*/
unlayoutOnPause() {
return this.implementation_.unlayoutOnPause();
}
/**
* Whether the element needs to be reconstructed after it has been
* re-parented. Many elements cannot survive fully the reparenting and
* are better to be reconstructed from scratch.
*
* @return {boolean}
* @package @final
*/
reconstructWhenReparented() {
return this.implementation_.reconstructWhenReparented();
}
/**
* Collapses the element, and notifies its owner (if there is one) that the
* element is no longer present.
*/
collapse() {
this.implementation_./*OK*/ collapse();
}
/**
* Called every time an owned AmpElement collapses itself.
* @param {!AmpElement} element
*/
collapsedCallback(element) {
this.implementation_.collapsedCallback(element);
}
/**
* Expands the element, and notifies its owner (if there is one) that the
* element is now present.
*/
expand() {
this.implementation_./*OK*/ expand();
}
/**
* Called every time an owned AmpElement expands itself.
* @param {!AmpElement} element
*/
expandedCallback(element) {
this.implementation_.expandedCallback(element);
}
/**
* Called when one or more attributes are mutated.
* Note: Must be called inside a mutate context.
* Note: Boolean attributes have a value of `true` and `false` when
* present and missing, respectively.
* @param {!JsonObject<string, (null|boolean|string|number|Array|Object)>} mutations
*/
mutatedAttributesCallback(mutations) {
this.implementation_.mutatedAttributesCallback(mutations);
}
/**
* Enqueues the action with the element. If element has been upgraded and
* built, the action is dispatched to the implementation right away.
* Otherwise the invocation is enqueued until the implementation is ready
* to receive actions.
* @param {!./service/action-impl.ActionInvocation} invocation
* @final
*/
enqueAction(invocation) {
assertNotTemplate(this);
if (!this.isBuilt()) {
if (this.actionQueue_ === undefined) {
this.actionQueue_ = [];
}
devAssert(this.actionQueue_).push(invocation);
} else {
this.executionAction_(invocation, false);
}
}
/**
* Dequeues events from the queue and dispatches them to the implementation
* with "deferred" flag.
* @private
*/
dequeueActions_() {
if (!this.actionQueue_) {
return;
}
const actionQueue = devAssert(this.actionQueue_);
this.actionQueue_ = null;
// Notice, the actions are currently not de-duped.
actionQueue.forEach((invocation) => {
this.executionAction_(invocation, true);
});
}
/**
* Executes the action immediately. All errors are consumed and reported.
* @param {!./service/action-impl.ActionInvocation} invocation
* @param {boolean} deferred
* @final
* @private
*/
executionAction_(invocation, deferred) {
try {
this.implementation_.executeAction(invocation, deferred);
} catch (e) {
rethrowAsync(
'Action execution failed:',
e,
invocation.node.tagName,
invocation.method
);
}
}
/**
* Get the consent policy to follow.
* @return {?string}
*/
getConsentPolicy_() {
let policyId = this.getAttribute('data-block-on-consent');
if (policyId === null) {
if (shouldBlockOnConsentByMeta(this)) {
policyId = 'default';
this.setAttribute('data-block-on-consent', policyId);
} else {
// data-block-on-consent attribute not set
return null;
}
}
if (policyId == '' || policyId == 'default') {
// data-block-on-consent value not set, up to individual element
// Note: data-block-on-consent and data-block-on-consent='default' is
// treated exactly the same
return this.implementation_.getConsentPolicy();
}
return policyId;
}
/**
* Returns the original nodes of the custom element without any service
* nodes that could have been added for markup. These nodes can include
* Text, Comment and other child nodes.
* @return {!Array<!Node>}
* @package @final
*/
getRealChildNodes() {
return dom.childNodes(this, (node) => !isInternalOrServiceNode(node));
}
/**
* Returns the original children of the custom element without any service
* nodes that could have been added for markup.
* @return {!Array<!Element>}
* @package @final
*/
getRealChildren() {
return dom.childElements(
this,
(element) => !isInternalOrServiceNode(element)
);
}
/**
* Returns an optional placeholder element for this custom element.
* @return {?Element}
* @package @final
*/
getPlaceholder() {
return dom.lastChildElement(this, (el) => {
return (
el.hasAttribute('placeholder') &&
// Denylist elements that has a native placeholder property
// like input and textarea. These are not allowed to be AMP
// placeholders.
!isInputPlaceholder(el)
);
});
}
/**
* Hides or shows the placeholder, if available.
* @param {boolean} show
* @package @final
*/
togglePlaceholder(show) {
assertNotTemplate(this);
if (show) {
const placeholder = this.getPlaceholder();
if (placeholder) {
dev().assertElement(placeholder).classList.remove('amp-hidden');
}
} else {
const placeholders = dom.childElementsByAttr(this, 'placeholder');
for (let i = 0; i < placeholders.length; i++) {
// Don't toggle elements with a native placeholder property
// e.g. input, textarea
if (isInputPlaceholder(placeholders[i])) {
continue;
}
placeholders[i].classList.add('amp-hidden');
}
}
}
/**
* Returns an optional fallback element for this custom element.
* @return {?Element}
* @package @final
*/
getFallback() {
return dom.childElementByAttr(this, 'fallback');
}
/**
* Hides or shows the fallback, if available. This function must only
* be called inside a mutate context.
* @param {boolean} show
* @package @final
*/
toggleFallback(show) {
assertNotTemplate(this);
const resourceState = this.getResource_().getState();
// Do not show fallback before layout
if (
show &&
(resourceState == ResourceState.NOT_BUILT ||
resourceState == ResourceState.NOT_LAID_OUT ||
resourceState == ResourceState.READY_FOR_LAYOUT)
) {
return;
}
// This implementation is notably less efficient then placeholder
// toggling. The reasons for this are: (a) "not supported" is the state of
// the whole element, (b) some relayout is expected and (c) fallback
// condition would be rare.
this.classList.toggle('amp-notsupported', show);
if (show == true) {
const fallbackElement = this.getFallback();
if (fallbackElement) {
Services.ownersForDoc(this.getAmpDoc()).scheduleLayout(
this,
fallbackElement
);
}
}
}
/**
* An implementation can call this method to signal to the element that
* it has started rendering.
* @package @final
*/
renderStarted() {
this.signals_.signal(CommonSignals.RENDER_START);
this.togglePlaceholder(false);
this.toggleLoading(false);
}
/**
* Whether the loading can be shown for this element.
* @return {boolean}
* @private
*/
isLoadingEnabled_() {
// No loading indicator will be shown if either one of these conditions
// true:
// 1. The document is A4A.
// 2. `noloading` attribute is specified;
// 3. The element has already been laid out, and does not support reshowing the indicator (include having loading
// error);
// 4. The element is too small or has not yet been measured;
// 5. The element has not been allowlisted;
// 6. The element is an internal node (e.g. `placeholder` or `fallback`);
// 7. The element's layout is not nodisplay.
if (this.isInA4A()) {
return false;
}
if (this.loadingDisabled_ === undefined) {
this.loadingDisabled_ = this.hasAttribute('noloading');
}
const laidOut =
this.layoutCount_ > 0 || this.signals_.get(CommonSignals.RENDER_START);
if (
this.loadingDisabled_ ||
(laidOut && !this.implementation_.isLoadingReused()) ||
this.layoutWidth_ <= 0 || // Layout is not ready or invisible
!isLoadingAllowed(this) ||
isInternalOrServiceNode(this) ||
this.layout_ == Layout.NODISPLAY
) {
return false;
}
return true;
}
/**
* @return {boolean}
*/
isInA4A() {
return (
// in FIE
(this.ampdoc_ && this.ampdoc_.win != this.ownerDocument.defaultView) ||
// in inabox
getMode().runtime == 'inabox'
);
}
/**
* Creates a loading object. The caller must ensure that loading can
* actually be shown. This method must also be called in the mutate
* context.
* @private
* @param {number=} startTime
*/
prepareLoading_(startTime) {
if (!this.isLoadingEnabled_()) {
return;
}
if (!this.loadingContainer_) {
const doc = this.ownerDocument;
devAssert(doc);
const container = htmlFor(/** @type {!Document} */ (doc))`
<div class="i-amphtml-loading-container i-amphtml-fill-content
amp-hidden"></div>`;
const loadingElement = createLoaderElement(
this.getAmpDoc(),
this,
this.layoutWidth_,
this.layoutHeight_,
startTime
);
container.appendChild(loadingElement);
this.appendChild(container);
this.loadingContainer_ = container;
this.loadingElement_ = loadingElement;
}
}
/**
* Turns the loading indicator on or off.
* @param {boolean} state
* @param {{cleanup:(boolean|undefined), startTime:(number|undefined)}=} opt_options
* @public @final
*/
toggleLoading(state, opt_options) {
const cleanup = opt_options && opt_options.cleanup;
const startTime = opt_options && opt_options.startTime;
assertNotTemplate(this);
if (state === this.loadingState_ && !opt_options) {
// Loading state is the same.
return;
}
this.loadingState_ = state;
if (!state && !this.loadingContainer_) {
return;
}
// Check if loading should be shown.
if (state && !this.isLoadingEnabled_()) {
this.loadingState_ = false;
return;
}
this.mutateOrInvoke_(
() => {
let state = this.loadingState_;
// Repeat "loading enabled" check because it could have changed while
// waiting for vsync.
if (state && !this.isLoadingEnabled_()) {
state = false;
}
if (state) {
this.prepareLoading_(startTime);
}
if (!this.loadingContainer_) {
return;
}
this.loadingContainer_.classList.toggle('amp-hidden', !state);
this.loadingElement_.classList.toggle('amp-active', state);
if (!state && cleanup && !this.implementation_.isLoadingReused()) {
const loadingContainer = this.loadingContainer_;
this.loadingContainer_ = null;
this.loadingElement_ = null;
this.mutateOrInvoke_(
() => {
dom.removeElement(loadingContainer);
},
undefined,
true
);
}
},
undefined,
/* skipRemeasure */ true
);
}
/**
* Returns an optional overflow element for this custom element.
* @return {!./layout-delay-meter.LayoutDelayMeter}
*/
getLayoutDelayMeter_() {
if (!this.layoutDelayMeter_) {
this.layoutDelayMeter_ = new LayoutDelayMeter(
toWin(this.ownerDocument.defaultView),
this.getLayoutPriority()
);
}
return this.layoutDelayMeter_;
}
/**
* Returns an optional overflow element for this custom element.
* @return {?Element}
*/
getOverflowElement() {
if (this.overflowElement_ === undefined) {
this.overflowElement_ = dom.childElementByAttr(this, 'overflow');
if (this.overflowElement_) {
if (!this.overflowElement_.hasAttribute('tabindex')) {
this.overflowElement_.setAttribute('tabindex', '0');
}
if (!this.overflowElement_.hasAttribute('role')) {
this.overflowElement_.setAttribute('role', 'button');
}
}
}
return this.overflowElement_;
}
/**
* Hides or shows the overflow, if available. This function must only
* be called inside a mutate context.
* @param {boolean} overflown
* @param {number|undefined} requestedHeight
* @param {number|undefined} requestedWidth
* @package @final
*/
overflowCallback(overflown, requestedHeight, requestedWidth) {
this.getOverflowElement();
if (!this.overflowElement_) {
if (overflown && this.warnOnMissingOverflow) {
user().warn(
TAG,
'Cannot resize element and overflow is not available',
this
);
}
} else {
this.overflowElement_.classList.toggle('amp-visible', overflown);
if (overflown) {
this.overflowElement_.onclick = () => {
const mutator = Services.mutatorForDoc(this.getAmpDoc());
mutator.forceChangeSize(this, requestedHeight, requestedWidth);
mutator.mutateElement(this, () => {
this.overflowCallback(
/* overflown */ false,
requestedHeight,
requestedWidth
);
});
};
} else {
this.overflowElement_.onclick = null;
}
}
}
/**
* Mutates the element using resources if available.
*
* @param {function()} mutator
* @param {?Element=} opt_element
* @param {boolean=} opt_skipRemeasure
*/
mutateOrInvoke_(mutator, opt_element, opt_skipRemeasure = false) {
if (this.ampdoc_) {
Services.mutatorForDoc(this.getAmpDoc()).mutateElement(
opt_element || this,
mutator,
opt_skipRemeasure
);
} else {
mutator();
}
}
}
win.__AMP_BASE_CE_CLASS = BaseCustomElement;
return /** @type {typeof HTMLElement} */ (win.__AMP_BASE_CE_CLASS);
}
/**
* @param {!Element} element
* @return {boolean}
*/
function isInputPlaceholder(element) {
return 'placeholder' in element;
}
/** @param {!Element} element */
function assertNotTemplate(element) {
devAssert(!element.isInTemplate_, 'Must never be called in template');
}
/**
* Whether the implementation is a stub.
* @param {?./base-element.BaseElement} impl
* @return {boolean}
*/
function isStub(impl) {
return impl instanceof ElementStub;
}
/**
* Returns "true" for internal AMP nodes or for placeholder elements.
* @param {!Node} node
* @return {boolean}
*/
function isInternalOrServiceNode(node) {
if (isInternalElement(node)) {
return true;
}
if (
node.tagName &&
(node.hasAttribute('placeholder') ||
node.hasAttribute('fallback') ||
node.hasAttribute('overflow'))
) {
return true;
}
return false;
}
/**
* Creates a new custom element class prototype.
*
* @param {!Window} win The window in which to register the custom element.
* @param {(typeof ./base-element.BaseElement)=} opt_implementationClass For testing only.
* @return {!Object} Prototype of element.
*/
export function createAmpElementForTesting(win, opt_implementationClass) {
const Element = createCustomElementClass(win);
if (getMode().test && opt_implementationClass) {
Element.prototype.implementationClassForTesting = opt_implementationClass;
}
return Element;
}
| adup-tech/amphtml | src/custom-element.js | JavaScript | apache-2.0 | 58,872 |
// UserController for AngularJS
ecollabroApp.controller('userController', ['$scope', 'securityService', function ($scope, securityService) {
$scope.user = {};
$scope.activeRoles = [];
$scope.userId = 0;
//Method initialize
$scope.initialize = function (userId) {
$scope.userId = userId;
includeChangePassword($scope, securityService, 'admin');
$scope.loadUser();
};
//Method loadUser
$scope.loadUser = function () {
if ($scope.userId == 0) {
$scope.user.IsActive = true;
$scope.loadActiveRoles();
return;
}
securityService.getUser($scope.userId).then(function (resp) {
if (resp.businessException == null) {
$scope.user = resp.result;
$scope.loadActiveRoles();
}
else {
showMessage("divSummaryMessageUser", resp.businessException.ExceptionMessage, "danger");
}
});
};
//Method saveUser
$scope.saveUser = function () {
if (!$("#frmUser").valid()) {
return;
}
else {
if ($scope.user.UserRoles == null || $scope.user.UserRoles.length == 0) {
showMessage("divSummaryMessageUser", "Select at-least one role for user!", "danger");
return;
}
}
securityService.saveUser($scope.user).then(function (resp) {
if (resp.businessException == null) {
$scope.user.UserId = resp.result.Id;
var divUsers = document.getElementById("divUsers");
if (divUsers) {
showUsers(resp.result.Message); // calling parent's method
}
else {
showMessage("divSummaryMessageUser", resp.result.Message, "success");
}
}
else {
showMessage("divSummaryMessageUser", resp.businessException.ExceptionMessage, "danger");
}
});
};
//Method restPassword
$scope.resetPassword = function () {
bootbox.confirm("This will reset User's password and will send email with new credential? Are you sure to reset the password for selected User?", function (result) {
if (result) {
securityService.resetUserPassword($scope.user.UserId).then(function (resp) {
if (resp.businessException == null) {
showMessage("divSummaryMessageUser", resp.result, "success");
}
else {
showMessage("divSummaryMessageUser", resp.businessException.ExceptionMessage, "danger");
}
});
}
});
};
//Method confirmUser
$scope.confirmUser = function () {
securityService.confirmUser($scope.user.UserId).then(function (resp) {
if (resp.businessException == null) {
showMessage("divSummaryMessageUser", resp.result, "success");
$scope.user.IsConfirmed = true;
}
else {
showMessage("divSummaryMessageUser", resp.businessException.ExceptionMessage, "danger");
}
});
};
//Method unlockUser
$scope.unlockUser = function () {
securityService.unlockUser($scope.user.UserId).then(function (resp) {
if (resp.businessException == null) {
showMessage("divSummaryMessageUser", resp.result, "success");
$scope.user.IsLocked = false;
}
else {
showMessage("divSummaryMessageUser", resp.businessException.ExceptionMessage, "danger");
}
});
};
//Method approveUser
$scope.approveUser = function () {
securityService.approveUser($scope.user.UserId).then(function (resp) {
if (resp.businessException == null) {
showMessage("divSummaryMessageUser", resp.result, "success");
$scope.user.IsApproved = true;
}
else {
showMessage("divSummaryMessageUser", resp.businessException.ExceptionMessage, "danger");
}
});
};
//Method getActiveRole
$scope.getActiveRole = function (roleId) {
var activeRole = null;
for (var ctr = 0; ctr < $scope.activeRoles.length; ctr++) {
if ($scope.activeRoles[ctr].RoleId == roleId) {
activeRole = $scope.activeRoles[ctr];
break;
}
}
return activeRole;
}
// Method loadAactiveRoles
$scope.loadActiveRoles = function () {
securityService.getActiveRoles($scope.user).then(function (resp) {
if (resp.businessException == null) {
$scope.activeRoles = resp.result.data;
if ($scope.user.UserRoles != null && $scope.user.UserRoles.length > 0) {
var existingRoles = $scope.user.UserRoles;
$scope.user.UserRoles = [];
for (var ctr = 0; ctr < existingRoles.length; ctr++) {
var activeRole = $scope.getActiveRole(existingRoles[ctr].RoleId);
if (activeRole != null)
$scope.user.UserRoles.push(activeRole);
}
}
}
else {
showMessage("divSummaryMessageUser", resp.businessException.ExceptionMessage, "danger");
}
});
};
//method openUsers
$scope.openUsers = function () {
location.href = "/security/users";
};
}]); | eCollobro/eCollabro | eCollabro.Web/obj/Release/Package/PackageTmp/app/controllers/user/user.js | JavaScript | apache-2.0 | 5,849 |
/*
* Copyright 2020 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.datacollector.aster;
import com.streamsets.datacollector.task.Task;
import com.streamsets.lib.security.http.aster.AsterServiceHook;
/**
* Synchronizes entitlements with Aster.
*
* To trigger a synchronization immediately, call {@link #run()}.
*/
public interface EntitlementSyncTask extends Task, AsterServiceHook {
/**
* Immediately get the latest entitlement, and block until this task is complete.
*
* @return true if entitlement was changed, false otherwise
*/
boolean syncEntitlement();
}
| streamsets/datacollector | container/src/main/java/com/streamsets/datacollector/aster/EntitlementSyncTask.java | Java | apache-2.0 | 1,139 |
/*
* Copyright 2011-2015 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.internal.eventbus.net4j;
import java.util.Set;
import org.eclipse.net4j.signal.IndicationWithResponse;
import org.eclipse.net4j.util.io.ExtendedDataInputStream;
import org.eclipse.net4j.util.io.ExtendedDataOutputStream;
import com.b2international.snowowl.eventbus.net4j.EventBusConstants;
/**
* @since 3.1
*/
public class HandlerChangeIndication extends IndicationWithResponse {
public HandlerChangeIndication(EventBusProtocol protocol, short signalID) {
super(protocol, signalID);
}
@Override
protected void indicating(ExtendedDataInputStream in) throws Exception {
final Object set = in.readObject();
if (set instanceof Set) {
if (getID() == EventBusConstants.HANDLER_UNREGISTRATION) {
getProtocol().unregisterAddressBook((Set<String>)set);
} else {
getProtocol().registerAddressBook((Set<String>)set);
}
}
}
@Override
protected void responding(ExtendedDataOutputStream out) throws Exception {
switch (getID()) {
case EventBusConstants.HANDLER_INIT: {
out.writeObject(getProtocol().getInfraStructure().getAddressBook());
break;
}
case EventBusConstants.HANDLER_REGISTRATION:
case EventBusConstants.HANDLER_UNREGISTRATION:
out.writeBoolean(true);
break;
default:
throw new IllegalArgumentException("Unknown signalID: " + getID());
}
}
@Override
public EventBusProtocol getProtocol() {
return (EventBusProtocol) super.getProtocol();
}
} | IHTSDO/snow-owl | net4j/com.b2international.snowowl.eventbus/src/com/b2international/snowowl/internal/eventbus/net4j/HandlerChangeIndication.java | Java | apache-2.0 | 2,079 |
// "Qualify the call with 'A.B.this'" "true"
class A {
static class B {
class C {
String name(String key) {
return name(<caret>);
}
}
String name() {
return "";
}
}
}
| jwren/intellij-community | java/java-tests/testData/codeInsight/daemonCodeAnalyzer/quickFix/qualifyMethodCall/beforeNonStaticMethodFromNonStaticContext2.java | Java | apache-2.0 | 214 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.filters;
import java.io.IOException;
import jakarta.servlet.Filter;
import jakarta.servlet.FilterChain;
import jakarta.servlet.ServletException;
import jakarta.servlet.ServletRequest;
import jakarta.servlet.ServletResponse;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpSession;
/**
* A {@link jakarta.servlet.Filter} that initializes the {@link HttpSession} for
* the {@link HttpServletRequest} by calling its getSession() method.
* <p>
* This is required for some operations with WebSocket requests, where it is
* too late to initialize the HttpSession object, and the current Java WebSocket
* specification does not provide a way to do so.
*/
public class SessionInitializerFilter implements Filter {
/**
* Calls {@link HttpServletRequest}'s getSession() to initialize the
* HttpSession and continues processing the chain.
*
* @param request The request to process
* @param response The response associated with the request
* @param chain Provides access to the next filter in the chain for this
* filter to pass the request and response to for further
* processing
* @throws IOException if an I/O error occurs during this filter's
* processing of the request
* @throws ServletException if the processing fails for any other reason
*/
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
throws IOException, ServletException {
((HttpServletRequest)request).getSession();
chain.doFilter(request, response);
}
}
| apache/tomcat | java/org/apache/catalina/filters/SessionInitializerFilter.java | Java | apache-2.0 | 2,509 |
package org.ovirt.engine.ui.common.widget;
import org.ovirt.engine.ui.common.widget.dialog.SimpleDialogButton;
import com.google.gwt.core.client.GWT;
import com.google.gwt.resources.client.ImageResource;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.user.client.ui.ButtonBase;
import com.google.gwt.user.client.ui.Widget;
public class UiCommandButton extends AbstractUiCommandButton {
interface WidgetUiBinder extends UiBinder<Widget, UiCommandButton> {
WidgetUiBinder uiBinder = GWT.create(WidgetUiBinder.class);
}
@UiField
SimpleDialogButton button;
public UiCommandButton() {
initWidget(WidgetUiBinder.uiBinder.createAndBindUi(this));
}
public UiCommandButton(String label) {
this(label, null);
}
public UiCommandButton(ImageResource image) {
this("", image); //$NON-NLS-1$
}
public UiCommandButton(String label, ImageResource image) {
this();
setLabel(label);
setImage(image);
}
@Override
protected ButtonBase getButtonWidget() {
return button;
}
public void setImage(ImageResource image) {
button.setImage(image);
}
public void setCustomContentStyle(String customStyle) {
button.setCustomContentStyle(customStyle);
}
}
| jbeecham/ovirt-engine | frontend/webadmin/modules/gwt-common/src/main/java/org/ovirt/engine/ui/common/widget/UiCommandButton.java | Java | apache-2.0 | 1,364 |
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.backends.jglfw;
import static com.badlogic.jglfw.Glfw.*;
import java.awt.Color;
import java.awt.FlowLayout;
import java.awt.event.WindowEvent;
import java.awt.event.WindowFocusListener;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTextField;
import javax.swing.OverlayLayout;
import javax.swing.SwingUtilities;
import javax.swing.border.EmptyBorder;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import com.badlogic.gdx.Input;
import com.badlogic.gdx.InputProcessor;
import com.badlogic.gdx.InputProcessorQueue;
import com.badlogic.gdx.graphics.Pixmap;
import com.badlogic.jglfw.GlfwCallbackAdapter;
/** An implementation of the {@link Input} interface hooking GLFW panel for input.
* @author mzechner
* @author Nathan Sweet */
public class JglfwInput implements Input {
final JglfwApplication app;
final InputProcessorQueue processorQueue;
InputProcessor processor;
int pressedKeys = 0;
boolean justTouched;
int deltaX, deltaY;
long currentEventTime;
public JglfwInput (final JglfwApplication app, boolean queueEvents) {
this.app = app;
InputProcessor inputProcessor = new InputProcessor() {
private int mouseX, mouseY;
public boolean keyDown (int keycode) {
pressedKeys++;
app.graphics.requestRendering();
return processor != null ? processor.keyDown(keycode) : false;
}
public boolean keyUp (int keycode) {
pressedKeys--;
app.graphics.requestRendering();
return processor != null ? processor.keyUp(keycode) : false;
}
public boolean keyTyped (char character) {
app.graphics.requestRendering();
return processor != null ? processor.keyTyped(character) : false;
}
public boolean touchDown (int screenX, int screenY, int pointer, int button) {
justTouched = true;
app.graphics.requestRendering();
return processor != null ? processor.touchDown(screenX, screenY, pointer, button) : false;
}
public boolean touchUp (int screenX, int screenY, int pointer, int button) {
app.graphics.requestRendering();
return processor != null ? processor.touchUp(screenX, screenY, pointer, button) : false;
}
public boolean touchDragged (int screenX, int screenY, int pointer) {
deltaX = screenX - mouseX;
deltaY = screenY - mouseY;
mouseX = screenX;
mouseY = screenY;
app.graphics.requestRendering();
return processor != null ? processor.touchDragged(mouseX, mouseY, 0) : false;
}
public boolean mouseMoved (int screenX, int screenY) {
deltaX = screenX - mouseX;
deltaY = screenY - mouseX;
mouseX = screenX;
mouseY = screenY;
app.graphics.requestRendering();
return processor != null ? processor.mouseMoved(mouseX, mouseY) : false;
}
public boolean scrolled (int amount) {
app.graphics.requestRendering();
return processor != null ? processor.scrolled(amount) : false;
}
};
if (queueEvents)
inputProcessor = processorQueue = new InputProcessorQueue(inputProcessor);
else
processorQueue = null;
app.getCallbacks().add(new GlfwInputProcessor(inputProcessor));
}
public void update () {
justTouched = false;
if (processorQueue != null)
processorQueue.drain(); // Main loop is handled elsewhere and events are queued.
else {
currentEventTime = System.nanoTime();
glfwPollEvents(); // Use GLFW main loop to process events.
}
}
public float getAccelerometerX () {
return 0;
}
public float getAccelerometerY () {
return 0;
}
public float getAccelerometerZ () {
return 0;
}
public int getX () {
return glfwGetCursorPosX(app.graphics.window);
}
public int getX (int pointer) {
return pointer > 0 ? 0 : getX();
}
public int getY () {
return glfwGetCursorPosY(app.graphics.window);
}
public int getY (int pointer) {
return pointer > 0 ? 0 : getY();
}
public int getDeltaX () {
return deltaX;
}
public int getDeltaX (int pointer) {
return pointer > 0 ? 0 : deltaX;
}
public int getDeltaY () {
return deltaY;
}
public int getDeltaY (int pointer) {
return pointer > 0 ? 0 : deltaY;
}
public boolean isTouched () {
return glfwGetMouseButton(app.graphics.window, 0) || glfwGetMouseButton(app.graphics.window, 1)
|| glfwGetMouseButton(app.graphics.window, 2);
}
public boolean isTouched (int pointer) {
return pointer > 0 ? false : isTouched();
}
public boolean justTouched () {
return justTouched;
}
public boolean isButtonPressed (int button) {
return glfwGetMouseButton(app.graphics.window, button);
}
public boolean isKeyPressed (int key) {
if (key == Input.Keys.ANY_KEY) return pressedKeys > 0;
if (key == Input.Keys.SYM)
return glfwGetKey(app.graphics.window, GLFW_KEY_LEFT_SUPER) || glfwGetKey(app.graphics.window, GLFW_KEY_RIGHT_SUPER);
return glfwGetKey(app.graphics.window, getJglfwKeyCode(key));
}
public void setOnscreenKeyboardVisible (boolean visible) {
}
public void vibrate (int milliseconds) {
}
public void vibrate (long[] pattern, int repeat) {
}
public void cancelVibrate () {
}
public float getAzimuth () {
return 0;
}
public float getPitch () {
return 0;
}
public float getRoll () {
return 0;
}
public void getRotationMatrix (float[] matrix) {
}
public long getCurrentEventTime () {
return processorQueue != null ? processorQueue.getCurrentEventTime() : currentEventTime;
}
public void setCatchBackKey (boolean catchBack) {
}
public void setCatchMenuKey (boolean catchMenu) {
}
public void setInputProcessor (InputProcessor processor) {
this.processor = processor;
}
public InputProcessor getInputProcessor () {
return processor;
}
public boolean isPeripheralAvailable (Peripheral peripheral) {
return peripheral == Peripheral.HardwareKeyboard;
}
public int getRotation () {
return 0;
}
public Orientation getNativeOrientation () {
return Orientation.Landscape;
}
public void setCursorCatched (boolean captured) {
glfwSetInputMode(app.graphics.window, GLFW_CURSOR_MODE, captured ? GLFW_CURSOR_CAPTURED : GLFW_CURSOR_NORMAL);
}
public boolean isCursorCatched () {
return glfwGetInputMode(app.graphics.window, GLFW_CURSOR_MODE) == GLFW_CURSOR_CAPTURED;
}
public void setCursorPosition (int x, int y) {
glfwSetCursorPos(app.graphics.window, x, y);
}
@Override
public void setCursorImage (Pixmap pixmap, int xHotspot, int yHotspot) {
}
public void getTextInput (final TextInputListener listener, final String title, final String text) {
SwingUtilities.invokeLater(new Runnable() {
public void run () {
final String output = JOptionPane.showInputDialog(null, title, text);
app.postRunnable(new Runnable() {
public void run () {
if (output != null)
listener.input(output);
else
listener.canceled();
}
});
}
});
}
public void getPlaceholderTextInput (final TextInputListener listener, final String title, final String placeholder) {
SwingUtilities.invokeLater(new Runnable() {
public void run () {
JPanel panel = new JPanel(new FlowLayout());
JPanel textPanel = new JPanel() {
public boolean isOptimizedDrawingEnabled () {
return false;
};
};
textPanel.setLayout(new OverlayLayout(textPanel));
panel.add(textPanel);
final JTextField textField = new JTextField(20);
textField.setAlignmentX(0.0f);
textPanel.add(textField);
final JLabel placeholderLabel = new JLabel(placeholder);
placeholderLabel.setForeground(Color.GRAY);
placeholderLabel.setAlignmentX(0.0f);
textPanel.add(placeholderLabel, 0);
textField.getDocument().addDocumentListener(new DocumentListener() {
public void removeUpdate (DocumentEvent event) {
this.updated();
}
public void insertUpdate (DocumentEvent event) {
this.updated();
}
public void changedUpdate (DocumentEvent event) {
this.updated();
}
private void updated () {
placeholderLabel.setVisible(textField.getText().length() == 0);
}
});
JOptionPane pane = new JOptionPane(panel, JOptionPane.QUESTION_MESSAGE, JOptionPane.OK_CANCEL_OPTION, null, null,
null);
pane.setComponentOrientation(JOptionPane.getRootFrame().getComponentOrientation());
pane.selectInitialValue();
placeholderLabel.setBorder(new EmptyBorder(textField.getBorder().getBorderInsets(textField)));
JDialog dialog = pane.createDialog(null, title);
dialog.addWindowFocusListener(new WindowFocusListener() {
public void windowLostFocus (WindowEvent arg0) {
}
public void windowGainedFocus (WindowEvent arg0) {
textField.requestFocusInWindow();
}
});
dialog.setVisible(true);
dialog.dispose();
Object selectedValue = pane.getValue();
if (selectedValue != null && (selectedValue instanceof Integer) && (Integer)selectedValue == JOptionPane.OK_OPTION)
listener.input(textField.getText());
else
listener.canceled();
}
});
}
static char characterForKeyCode (int key) {
// Map certain key codes to character codes.
switch (key) {
case Keys.BACKSPACE:
return 8;
case Keys.TAB:
return '\t';
case Keys.FORWARD_DEL:
return 127;
}
return 0;
}
static public int getGdxKeyCode (int lwjglKeyCode) {
switch (lwjglKeyCode) {
case GLFW_KEY_SPACE:
return Input.Keys.SPACE;
case GLFW_KEY_APOSTROPHE:
return Input.Keys.APOSTROPHE;
case GLFW_KEY_COMMA:
return Input.Keys.COMMA;
case GLFW_KEY_MINUS:
return Input.Keys.MINUS;
case GLFW_KEY_PERIOD:
return Input.Keys.PERIOD;
case GLFW_KEY_SLASH:
return Input.Keys.SLASH;
case GLFW_KEY_0:
return Input.Keys.NUM_0;
case GLFW_KEY_1:
return Input.Keys.NUM_1;
case GLFW_KEY_2:
return Input.Keys.NUM_2;
case GLFW_KEY_3:
return Input.Keys.NUM_3;
case GLFW_KEY_4:
return Input.Keys.NUM_4;
case GLFW_KEY_5:
return Input.Keys.NUM_5;
case GLFW_KEY_6:
return Input.Keys.NUM_6;
case GLFW_KEY_7:
return Input.Keys.NUM_7;
case GLFW_KEY_8:
return Input.Keys.NUM_8;
case GLFW_KEY_9:
return Input.Keys.NUM_9;
case GLFW_KEY_SEMICOLON:
return Input.Keys.SEMICOLON;
case GLFW_KEY_EQUAL:
return Input.Keys.EQUALS;
case GLFW_KEY_A:
return Input.Keys.A;
case GLFW_KEY_B:
return Input.Keys.B;
case GLFW_KEY_C:
return Input.Keys.C;
case GLFW_KEY_D:
return Input.Keys.D;
case GLFW_KEY_E:
return Input.Keys.E;
case GLFW_KEY_F:
return Input.Keys.F;
case GLFW_KEY_G:
return Input.Keys.G;
case GLFW_KEY_H:
return Input.Keys.H;
case GLFW_KEY_I:
return Input.Keys.I;
case GLFW_KEY_J:
return Input.Keys.J;
case GLFW_KEY_K:
return Input.Keys.K;
case GLFW_KEY_L:
return Input.Keys.L;
case GLFW_KEY_M:
return Input.Keys.M;
case GLFW_KEY_N:
return Input.Keys.N;
case GLFW_KEY_O:
return Input.Keys.O;
case GLFW_KEY_P:
return Input.Keys.P;
case GLFW_KEY_Q:
return Input.Keys.Q;
case GLFW_KEY_R:
return Input.Keys.R;
case GLFW_KEY_S:
return Input.Keys.S;
case GLFW_KEY_T:
return Input.Keys.T;
case GLFW_KEY_U:
return Input.Keys.U;
case GLFW_KEY_V:
return Input.Keys.V;
case GLFW_KEY_W:
return Input.Keys.W;
case GLFW_KEY_X:
return Input.Keys.X;
case GLFW_KEY_Y:
return Input.Keys.Y;
case GLFW_KEY_Z:
return Input.Keys.Z;
case GLFW_KEY_LEFT_BRACKET:
return Input.Keys.LEFT_BRACKET;
case GLFW_KEY_BACKSLASH:
return Input.Keys.BACKSLASH;
case GLFW_KEY_RIGHT_BRACKET:
return Input.Keys.RIGHT_BRACKET;
case GLFW_KEY_GRAVE_ACCENT:
return Input.Keys.GRAVE;
case GLFW_KEY_WORLD_1:
case GLFW_KEY_WORLD_2:
return Input.Keys.UNKNOWN;
case GLFW_KEY_ESCAPE:
return Input.Keys.ESCAPE;
case GLFW_KEY_ENTER:
return Input.Keys.ENTER;
case GLFW_KEY_TAB:
return Input.Keys.TAB;
case GLFW_KEY_BACKSPACE:
return Input.Keys.BACKSPACE;
case GLFW_KEY_INSERT:
return Input.Keys.INSERT;
case GLFW_KEY_DELETE:
return Input.Keys.FORWARD_DEL;
case GLFW_KEY_RIGHT:
return Input.Keys.RIGHT;
case GLFW_KEY_LEFT:
return Input.Keys.LEFT;
case GLFW_KEY_DOWN:
return Input.Keys.DOWN;
case GLFW_KEY_UP:
return Input.Keys.UP;
case GLFW_KEY_PAGE_UP:
return Input.Keys.PAGE_UP;
case GLFW_KEY_PAGE_DOWN:
return Input.Keys.PAGE_DOWN;
case GLFW_KEY_HOME:
return Input.Keys.HOME;
case GLFW_KEY_END:
return Input.Keys.END;
case GLFW_KEY_CAPS_LOCK:
case GLFW_KEY_SCROLL_LOCK:
case GLFW_KEY_NUM_LOCK:
case GLFW_KEY_PRINT_SCREEN:
case GLFW_KEY_PAUSE:
return Input.Keys.UNKNOWN;
case GLFW_KEY_F1:
return Input.Keys.F1;
case GLFW_KEY_F2:
return Input.Keys.F2;
case GLFW_KEY_F3:
return Input.Keys.F3;
case GLFW_KEY_F4:
return Input.Keys.F4;
case GLFW_KEY_F5:
return Input.Keys.F5;
case GLFW_KEY_F6:
return Input.Keys.F6;
case GLFW_KEY_F7:
return Input.Keys.F7;
case GLFW_KEY_F8:
return Input.Keys.F8;
case GLFW_KEY_F9:
return Input.Keys.F9;
case GLFW_KEY_F10:
return Input.Keys.F10;
case GLFW_KEY_F11:
return Input.Keys.F11;
case GLFW_KEY_F12:
return Input.Keys.F12;
case GLFW_KEY_F13:
case GLFW_KEY_F14:
case GLFW_KEY_F15:
case GLFW_KEY_F16:
case GLFW_KEY_F17:
case GLFW_KEY_F18:
case GLFW_KEY_F19:
case GLFW_KEY_F20:
case GLFW_KEY_F21:
case GLFW_KEY_F22:
case GLFW_KEY_F23:
case GLFW_KEY_F24:
case GLFW_KEY_F25:
return Input.Keys.UNKNOWN;
case GLFW_KEY_KP_0:
return Input.Keys.NUMPAD_0;
case GLFW_KEY_KP_1:
return Input.Keys.NUMPAD_1;
case GLFW_KEY_KP_2:
return Input.Keys.NUMPAD_2;
case GLFW_KEY_KP_3:
return Input.Keys.NUMPAD_3;
case GLFW_KEY_KP_4:
return Input.Keys.NUMPAD_4;
case GLFW_KEY_KP_5:
return Input.Keys.NUMPAD_5;
case GLFW_KEY_KP_6:
return Input.Keys.NUMPAD_6;
case GLFW_KEY_KP_7:
return Input.Keys.NUMPAD_7;
case GLFW_KEY_KP_8:
return Input.Keys.NUMPAD_8;
case GLFW_KEY_KP_9:
return Input.Keys.NUMPAD_9;
case GLFW_KEY_KP_DECIMAL:
return Input.Keys.PERIOD;
case GLFW_KEY_KP_DIVIDE:
return Input.Keys.SLASH;
case GLFW_KEY_KP_MULTIPLY:
return Input.Keys.STAR;
case GLFW_KEY_KP_SUBTRACT:
return Input.Keys.MINUS;
case GLFW_KEY_KP_ADD:
return Input.Keys.PLUS;
case GLFW_KEY_KP_ENTER:
return Input.Keys.ENTER;
case GLFW_KEY_KP_EQUAL:
return Input.Keys.EQUALS;
case GLFW_KEY_LEFT_SHIFT:
return Input.Keys.SHIFT_LEFT;
case GLFW_KEY_LEFT_CONTROL:
return Input.Keys.CONTROL_LEFT;
case GLFW_KEY_LEFT_ALT:
return Input.Keys.ALT_LEFT;
case GLFW_KEY_LEFT_SUPER:
return Input.Keys.SYM;
case GLFW_KEY_RIGHT_SHIFT:
return Input.Keys.SHIFT_RIGHT;
case GLFW_KEY_RIGHT_CONTROL:
return Input.Keys.CONTROL_RIGHT;
case GLFW_KEY_RIGHT_ALT:
return Input.Keys.ALT_RIGHT;
case GLFW_KEY_RIGHT_SUPER:
return Input.Keys.SYM;
case GLFW_KEY_MENU:
return Input.Keys.MENU;
default:
return Input.Keys.UNKNOWN;
}
}
static public int getJglfwKeyCode (int gdxKeyCode) {
switch (gdxKeyCode) {
case Input.Keys.SPACE:
return GLFW_KEY_SPACE;
case Input.Keys.APOSTROPHE:
return GLFW_KEY_APOSTROPHE;
case Input.Keys.COMMA:
return GLFW_KEY_COMMA;
case Input.Keys.PERIOD:
return GLFW_KEY_PERIOD;
case Input.Keys.NUM_0:
return GLFW_KEY_0;
case Input.Keys.NUM_1:
return GLFW_KEY_1;
case Input.Keys.NUM_2:
return GLFW_KEY_2;
case Input.Keys.NUM_3:
return GLFW_KEY_3;
case Input.Keys.NUM_4:
return GLFW_KEY_4;
case Input.Keys.NUM_5:
return GLFW_KEY_5;
case Input.Keys.NUM_6:
return GLFW_KEY_6;
case Input.Keys.NUM_7:
return GLFW_KEY_7;
case Input.Keys.NUM_8:
return GLFW_KEY_8;
case Input.Keys.NUM_9:
return GLFW_KEY_9;
case Input.Keys.SEMICOLON:
return GLFW_KEY_SEMICOLON;
case Input.Keys.EQUALS:
return GLFW_KEY_EQUAL;
case Input.Keys.A:
return GLFW_KEY_A;
case Input.Keys.B:
return GLFW_KEY_B;
case Input.Keys.C:
return GLFW_KEY_C;
case Input.Keys.D:
return GLFW_KEY_D;
case Input.Keys.E:
return GLFW_KEY_E;
case Input.Keys.F:
return GLFW_KEY_F;
case Input.Keys.G:
return GLFW_KEY_G;
case Input.Keys.H:
return GLFW_KEY_H;
case Input.Keys.I:
return GLFW_KEY_I;
case Input.Keys.J:
return GLFW_KEY_J;
case Input.Keys.K:
return GLFW_KEY_K;
case Input.Keys.L:
return GLFW_KEY_L;
case Input.Keys.M:
return GLFW_KEY_M;
case Input.Keys.N:
return GLFW_KEY_N;
case Input.Keys.O:
return GLFW_KEY_O;
case Input.Keys.P:
return GLFW_KEY_P;
case Input.Keys.Q:
return GLFW_KEY_Q;
case Input.Keys.R:
return GLFW_KEY_R;
case Input.Keys.S:
return GLFW_KEY_S;
case Input.Keys.T:
return GLFW_KEY_T;
case Input.Keys.U:
return GLFW_KEY_U;
case Input.Keys.V:
return GLFW_KEY_V;
case Input.Keys.W:
return GLFW_KEY_W;
case Input.Keys.X:
return GLFW_KEY_X;
case Input.Keys.Y:
return GLFW_KEY_Y;
case Input.Keys.Z:
return GLFW_KEY_Z;
case Input.Keys.LEFT_BRACKET:
return GLFW_KEY_LEFT_BRACKET;
case Input.Keys.BACKSLASH:
return GLFW_KEY_BACKSLASH;
case Input.Keys.RIGHT_BRACKET:
return GLFW_KEY_RIGHT_BRACKET;
case Input.Keys.GRAVE:
return GLFW_KEY_GRAVE_ACCENT;
case Input.Keys.ESCAPE:
return GLFW_KEY_ESCAPE;
case Input.Keys.ENTER:
return GLFW_KEY_ENTER;
case Input.Keys.TAB:
return GLFW_KEY_TAB;
case Input.Keys.BACKSPACE:
return GLFW_KEY_BACKSPACE;
case Input.Keys.INSERT:
return GLFW_KEY_INSERT;
case Input.Keys.FORWARD_DEL:
return GLFW_KEY_DELETE;
case Input.Keys.RIGHT:
return GLFW_KEY_RIGHT;
case Input.Keys.LEFT:
return GLFW_KEY_LEFT;
case Input.Keys.DOWN:
return GLFW_KEY_DOWN;
case Input.Keys.UP:
return GLFW_KEY_UP;
case Input.Keys.PAGE_UP:
return GLFW_KEY_PAGE_UP;
case Input.Keys.PAGE_DOWN:
return GLFW_KEY_PAGE_DOWN;
case Input.Keys.HOME:
return GLFW_KEY_HOME;
case Input.Keys.END:
return GLFW_KEY_END;
case Input.Keys.F1:
return GLFW_KEY_F1;
case Input.Keys.F2:
return GLFW_KEY_F2;
case Input.Keys.F3:
return GLFW_KEY_F3;
case Input.Keys.F4:
return GLFW_KEY_F4;
case Input.Keys.F5:
return GLFW_KEY_F5;
case Input.Keys.F6:
return GLFW_KEY_F6;
case Input.Keys.F7:
return GLFW_KEY_F7;
case Input.Keys.F8:
return GLFW_KEY_F8;
case Input.Keys.F9:
return GLFW_KEY_F9;
case Input.Keys.F10:
return GLFW_KEY_F10;
case Input.Keys.F11:
return GLFW_KEY_F11;
case Input.Keys.F12:
return GLFW_KEY_F12;
case Input.Keys.NUMPAD_0:
return GLFW_KEY_KP_0;
case Input.Keys.NUMPAD_1:
return GLFW_KEY_KP_1;
case Input.Keys.NUMPAD_2:
return GLFW_KEY_KP_2;
case Input.Keys.NUMPAD_3:
return GLFW_KEY_KP_3;
case Input.Keys.NUMPAD_4:
return GLFW_KEY_KP_4;
case Input.Keys.NUMPAD_5:
return GLFW_KEY_KP_5;
case Input.Keys.NUMPAD_6:
return GLFW_KEY_KP_6;
case Input.Keys.NUMPAD_7:
return GLFW_KEY_KP_7;
case Input.Keys.NUMPAD_8:
return GLFW_KEY_KP_8;
case Input.Keys.NUMPAD_9:
return GLFW_KEY_KP_9;
case Input.Keys.SLASH:
return GLFW_KEY_KP_DIVIDE;
case Input.Keys.STAR:
return GLFW_KEY_KP_MULTIPLY;
case Input.Keys.MINUS:
return GLFW_KEY_KP_SUBTRACT;
case Input.Keys.PLUS:
return GLFW_KEY_KP_ADD;
case Input.Keys.SHIFT_LEFT:
return GLFW_KEY_LEFT_SHIFT;
case Input.Keys.CONTROL_LEFT:
return GLFW_KEY_LEFT_CONTROL;
case Input.Keys.ALT_LEFT:
return GLFW_KEY_LEFT_ALT;
case Input.Keys.SYM:
return GLFW_KEY_LEFT_SUPER;
case Input.Keys.SHIFT_RIGHT:
return GLFW_KEY_RIGHT_SHIFT;
case Input.Keys.CONTROL_RIGHT:
return GLFW_KEY_RIGHT_CONTROL;
case Input.Keys.ALT_RIGHT:
return GLFW_KEY_RIGHT_ALT;
case Input.Keys.MENU:
return GLFW_KEY_MENU;
default:
return 0;
}
}
/** Receives GLFW input and calls InputProcessor methods.
* @author Nathan Sweet */
static class GlfwInputProcessor extends GlfwCallbackAdapter {
private int mouseX, mouseY, mousePressed;
private char lastCharacter;
private InputProcessor processor;
public GlfwInputProcessor (InputProcessor processor) {
if (processor == null) throw new IllegalArgumentException("processor cannot be null.");
this.processor = processor;
}
public void key (long window, int key, int action) {
switch (action) {
case GLFW_PRESS:
key = getGdxKeyCode(key);
processor.keyDown(key);
lastCharacter = 0;
char character = characterForKeyCode(key);
if (character != 0) character(window, character);
break;
case GLFW_RELEASE:
processor.keyUp(getGdxKeyCode(key));
break;
case GLFW_REPEAT:
if (lastCharacter != 0) processor.keyTyped(lastCharacter);
break;
}
}
public void character (long window, char character) {
lastCharacter = character;
processor.keyTyped(character);
}
public void scroll (long window, double scrollX, double scrollY) {
processor.scrolled((int)-Math.signum(scrollY));
}
public void mouseButton (long window, int button, boolean pressed) {
if (pressed) {
mousePressed++;
processor.touchDown(mouseX, mouseY, 0, button);
} else {
mousePressed = Math.max(0, mousePressed - 1);
processor.touchUp(mouseX, mouseY, 0, button);
}
}
public void cursorPos (long window, int x, int y) {
mouseX = x;
mouseY = y;
if (mousePressed > 0)
processor.touchDragged(x, y, 0);
else
processor.mouseMoved(x, y);
}
}
}
| lordjone/libgdx | backends/gdx-backend-jglfw/src/com/badlogic/gdx/backends/jglfw/JglfwInput.java | Java | apache-2.0 | 22,979 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.qpid.protonj2.codec;
import org.apache.qpid.protonj2.buffer.ProtonBuffer;
/**
* Retains Encoder state information either between calls or across encode iterations.
*/
public interface EncoderState {
/**
* @return the Encoder instance that create this state object.
*/
Encoder getEncoder();
/**
* Resets any intermediate state back to default values.
*
* @return this {@link EncoderState} instance.
*/
EncoderState reset();
/**
* Encodes the given sequence of characters in UTF8 to the given buffer.
*
* @param buffer
* A ProtonBuffer where the UTF-8 encoded bytes should be written.
* @param sequence
* A {@link CharSequence} representing the UTF-8 bytes to encode
*
* @return a reference to the encoding buffer for chaining
*
* @throws EncodeException if an error occurs while encoding the {@link CharSequence}
*/
ProtonBuffer encodeUTF8(ProtonBuffer buffer, CharSequence sequence) throws EncodeException;
}
| tabish121/proton4j | protonj2/src/main/java/org/apache/qpid/protonj2/codec/EncoderState.java | Java | apache-2.0 | 1,858 |
#!/usr/bin/env python
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines input readers for MapReduce."""
__all__ = [
"AbstractDatastoreInputReader",
"ALLOW_CHECKPOINT",
"BadReaderParamsError",
"BlobstoreLineInputReader",
"BlobstoreZipInputReader",
"BlobstoreZipLineInputReader",
"COUNTER_IO_READ_BYTES",
"COUNTER_IO_READ_MSEC",
"DatastoreEntityInputReader",
"DatastoreInputReader",
"DatastoreKeyInputReader",
"GoogleCloudStorageInputReader",
"GoogleCloudStorageRecordInputReader",
"RandomStringInputReader",
"RawDatastoreInputReader",
"Error",
"InputReader",
"LogInputReader",
"NamespaceInputReader",
"GoogleCloudStorageLineInputReader",
"GoogleCloudStorageZipInputReader",
"GoogleCloudStorageZipLineInputReader"
]
# pylint: disable=protected-access
import base64
import copy
import logging
import pickle
import random
import string
import StringIO
import time
import zipfile
from google.net.proto import ProtocolBuffer
from google.appengine.ext import ndb
from google.appengine.api import datastore
from google.appengine.api import logservice
from google.appengine.api.logservice import log_service_pb
from google.appengine.ext import blobstore
from google.appengine.ext import db
from google.appengine.ext import key_range
from google.appengine.ext.db import metadata
from mapreduce import context
from mapreduce import datastore_range_iterators as db_iters
from mapreduce import errors
from mapreduce import json_util
from mapreduce import key_ranges
from mapreduce import kv_pb
from mapreduce import model
from mapreduce import namespace_range
from mapreduce import operation
from mapreduce import property_range
from mapreduce import records
from mapreduce import util
# TODO(user): Cleanup imports if/when cloudstorage becomes part of runtime.
try:
# Check if the full cloudstorage package exists. The stub part is in runtime.
cloudstorage = None
import cloudstorage
if hasattr(cloudstorage, "_STUB"):
cloudstorage = None
except ImportError:
pass # CloudStorage library not available
# Attempt to load cloudstorage from the bundle (availble in some tests)
if cloudstorage is None:
try:
import cloudstorage
except ImportError:
pass # CloudStorage library really not available
# Classes moved to errors module. Copied here for compatibility.
Error = errors.Error
BadReaderParamsError = errors.BadReaderParamsError
# Counter name for number of bytes read.
COUNTER_IO_READ_BYTES = "io-read-bytes"
# Counter name for milliseconds spent reading data.
COUNTER_IO_READ_MSEC = "io-read-msec"
# Special value that can be yielded by InputReaders if they want to give the
# framework an opportunity to save the state of the mapreduce without having
# to yield an actual value to the handler.
ALLOW_CHECKPOINT = object()
"""
InputReader's lifecycle is the following:
0) validate called to validate mapper specification.
1) split_input splits the input for each shard.
2) __init__ is called for each shard. It takes the input, including ranges,
sent by the split_input.
3) from_json()/to_json() are used to persist writer's state across
multiple slices.
4) __str__ is the string representation of the reader.
5) next is called to send one piece of data to the user defined mapper.
It will continue to return data until it reaches the end of the range
specified in the split_input
"""
class InputReader(json_util.JsonMixin):
"""Abstract base class for input readers.
InputReaders have the following properties:
* They are created by using the split_input method to generate a set of
InputReaders from a MapperSpec.
* They generate inputs to the mapper via the iterator interface.
* After creation, they can be serialized and resumed using the JsonMixin
interface.
* They are cast to string for a user-readable description; it may be
valuable to implement __str__.
"""
# When expand_parameters is False, then value yielded by reader is passed
# to handler as is. If it's true, then *value is passed, expanding arguments
# and letting handler be a multi-parameter function.
expand_parameters = False
# Mapreduce parameters.
_APP_PARAM = "_app"
NAMESPACE_PARAM = "namespace"
NAMESPACES_PARAM = "namespaces" # Obsolete.
def __iter__(self):
return self
def next(self):
"""Returns the next input from this input reader as a key, value pair.
Returns:
The next input from this input reader.
"""
raise NotImplementedError("next() not implemented in %s" % self.__class__)
@classmethod
def from_json(cls, input_shard_state):
"""Creates an instance of the InputReader for the given input shard state.
Args:
input_shard_state: The InputReader state as a dict-like object.
Returns:
An instance of the InputReader configured using the values of json.
"""
raise NotImplementedError("from_json() not implemented in %s" % cls)
def to_json(self):
"""Returns an input shard state for the remaining inputs.
Returns:
A json-izable version of the remaining InputReader.
"""
raise NotImplementedError("to_json() not implemented in %s" %
self.__class__)
@classmethod
def split_input(cls, mapper_spec):
"""Returns a list of input readers.
This method creates a list of input readers, each for one shard.
It attempts to split inputs among readers evenly.
Args:
mapper_spec: model.MapperSpec specifies the inputs and additional
parameters to define the behavior of input readers.
Returns:
A list of InputReaders. None or [] when no input data can be found.
"""
raise NotImplementedError("split_input() not implemented in %s" % cls)
@classmethod
def validate(cls, mapper_spec):
"""Validates mapper spec and all mapper parameters.
Input reader parameters are expected to be passed as "input_reader"
subdictionary in mapper_spec.params.
Pre 1.6.4 API mixes input reader parameters with all other parameters. Thus
to be compatible, input reader check mapper_spec.params as well and
issue a warning if "input_reader" subdicationary is not present.
Args:
mapper_spec: The MapperSpec for this InputReader.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
if mapper_spec.input_reader_class() != cls:
raise BadReaderParamsError("Input reader class mismatch")
def _get_params(mapper_spec, allowed_keys=None, allow_old=True):
"""Obtain input reader parameters.
Utility function for input readers implementation. Fetches parameters
from mapreduce specification giving appropriate usage warnings.
Args:
mapper_spec: The MapperSpec for the job
allowed_keys: set of all allowed keys in parameters as strings. If it is not
None, then parameters are expected to be in a separate "input_reader"
subdictionary of mapper_spec parameters.
allow_old: Allow parameters to exist outside of the input_reader
subdictionary for compatability.
Returns:
mapper parameters as dict
Raises:
BadReaderParamsError: if parameters are invalid/missing or not allowed.
"""
if "input_reader" not in mapper_spec.params:
message = ("Input reader's parameters should be specified in "
"input_reader subdictionary.")
if not allow_old or allowed_keys:
raise errors.BadReaderParamsError(message)
params = mapper_spec.params
params = dict((str(n), v) for n, v in params.iteritems())
else:
if not isinstance(mapper_spec.params.get("input_reader"), dict):
raise errors.BadReaderParamsError(
"Input reader parameters should be a dictionary")
params = mapper_spec.params.get("input_reader")
params = dict((str(n), v) for n, v in params.iteritems())
if allowed_keys:
params_diff = set(params.keys()) - allowed_keys
if params_diff:
raise errors.BadReaderParamsError(
"Invalid input_reader parameters: %s" % ",".join(params_diff))
return params
class AbstractDatastoreInputReader(InputReader):
"""Abstract class for datastore input readers."""
# Number of entities to fetch at once while doing scanning.
_BATCH_SIZE = 50
# Maximum number of shards we'll create.
_MAX_SHARD_COUNT = 256
# The maximum number of namespaces that will be sharded by datastore key
# before switching to a strategy where sharding is done lexographically by
# namespace.
MAX_NAMESPACES_FOR_KEY_SHARD = 10
# reader parameters.
ENTITY_KIND_PARAM = "entity_kind"
KEYS_ONLY_PARAM = "keys_only"
BATCH_SIZE_PARAM = "batch_size"
KEY_RANGE_PARAM = "key_range"
FILTERS_PARAM = "filters"
_KEY_RANGE_ITER_CLS = db_iters.AbstractKeyRangeIterator
def __init__(self, iterator):
"""Create new DatastoreInputReader object.
This is internal constructor. Use split_input to create readers instead.
Args:
iterator: an iterator that generates objects for this input reader.
"""
self._iter = iterator
def __iter__(self):
"""Yields whatever internal iterator yields."""
for o in self._iter:
yield o
def __str__(self):
"""Returns the string representation of this InputReader."""
return repr(self._iter)
def to_json(self):
"""Serializes input reader to json compatible format.
Returns:
all the data in json-compatible map.
"""
return self._iter.to_json()
@classmethod
def from_json(cls, json):
"""Create new DatastoreInputReader from json, encoded by to_json.
Args:
json: json representation of DatastoreInputReader.
Returns:
an instance of DatastoreInputReader with all data deserialized from json.
"""
return cls(db_iters.RangeIteratorFactory.from_json(json))
@classmethod
def _get_query_spec(cls, mapper_spec):
"""Construct a model.QuerySpec from model.MapperSpec."""
params = _get_params(mapper_spec)
entity_kind = params[cls.ENTITY_KIND_PARAM]
filters = params.get(cls.FILTERS_PARAM)
app = params.get(cls._APP_PARAM)
ns = params.get(cls.NAMESPACE_PARAM)
return model.QuerySpec(
entity_kind=cls._get_raw_entity_kind(entity_kind),
keys_only=bool(params.get(cls.KEYS_ONLY_PARAM, False)),
filters=filters,
batch_size=int(params.get(cls.BATCH_SIZE_PARAM, cls._BATCH_SIZE)),
model_class_path=entity_kind,
app=app,
ns=ns)
@classmethod
def split_input(cls, mapper_spec):
"""Inherit doc."""
shard_count = mapper_spec.shard_count
query_spec = cls._get_query_spec(mapper_spec)
namespaces = None
if query_spec.ns is not None:
k_ranges = cls._to_key_ranges_by_shard(
query_spec.app, [query_spec.ns], shard_count, query_spec)
else:
ns_keys = namespace_range.get_namespace_keys(
query_spec.app, cls.MAX_NAMESPACES_FOR_KEY_SHARD + 1)
# No namespace means the app may have some data but those data are not
# visible yet. Just return.
if not ns_keys:
return
# If the number of ns is small, we shard each ns by key and assign each
# shard a piece of a ns.
elif len(ns_keys) <= cls.MAX_NAMESPACES_FOR_KEY_SHARD:
namespaces = [ns_key.name() or "" for ns_key in ns_keys]
k_ranges = cls._to_key_ranges_by_shard(
query_spec.app, namespaces, shard_count, query_spec)
# When number of ns is large, we can only split lexicographically by ns.
else:
ns_ranges = namespace_range.NamespaceRange.split(n=shard_count,
contiguous=False,
can_query=lambda: True,
_app=query_spec.app)
k_ranges = [key_ranges.KeyRangesFactory.create_from_ns_range(ns_range)
for ns_range in ns_ranges]
iters = [db_iters.RangeIteratorFactory.create_key_ranges_iterator(
r, query_spec, cls._KEY_RANGE_ITER_CLS) for r in k_ranges]
return [cls(i) for i in iters]
@classmethod
def _to_key_ranges_by_shard(cls, app, namespaces, shard_count, query_spec):
"""Get a list of key_ranges.KeyRanges objects, one for each shard.
This method uses scatter index to split each namespace into pieces
and assign those pieces to shards.
Args:
app: app_id in str.
namespaces: a list of namespaces in str.
shard_count: number of shards to split.
query_spec: model.QuerySpec.
Returns:
a list of key_ranges.KeyRanges objects.
"""
key_ranges_by_ns = []
# Split each ns into n splits. If a ns doesn't have enough scatter to
# split into n, the last few splits are None.
for namespace in namespaces:
ranges = cls._split_ns_by_scatter(
shard_count,
namespace,
query_spec.entity_kind,
app)
# The nth split of each ns will be assigned to the nth shard.
# Shuffle so that None are not all by the end.
random.shuffle(ranges)
key_ranges_by_ns.append(ranges)
# KeyRanges from different namespaces might be very different in size.
# Use round robin to make sure each shard can have at most one split
# or a None from a ns.
ranges_by_shard = [[] for _ in range(shard_count)]
for ranges in key_ranges_by_ns:
for i, k_range in enumerate(ranges):
if k_range:
ranges_by_shard[i].append(k_range)
key_ranges_by_shard = []
for ranges in ranges_by_shard:
if ranges:
key_ranges_by_shard.append(key_ranges.KeyRangesFactory.create_from_list(
ranges))
return key_ranges_by_shard
@classmethod
def _split_ns_by_scatter(cls,
shard_count,
namespace,
raw_entity_kind,
app):
"""Split a namespace by scatter index into key_range.KeyRange.
TODO(user): Power this with key_range.KeyRange.compute_split_points.
Args:
shard_count: number of shards.
namespace: namespace name to split. str.
raw_entity_kind: low level datastore API entity kind.
app: app id in str.
Returns:
A list of key_range.KeyRange objects. If there are not enough entities to
splits into requested shards, the returned list will contain KeyRanges
ordered lexicographically with any Nones appearing at the end.
"""
if shard_count == 1:
# With one shard we don't need to calculate any split points at all.
return [key_range.KeyRange(namespace=namespace, _app=app)]
ds_query = datastore.Query(kind=raw_entity_kind,
namespace=namespace,
_app=app,
keys_only=True)
ds_query.Order("__scatter__")
oversampling_factor = 32
random_keys = ds_query.Get(shard_count * oversampling_factor)
if not random_keys:
# There are no entities with scatter property. We have no idea
# how to split.
return ([key_range.KeyRange(namespace=namespace, _app=app)] +
[None] * (shard_count - 1))
random_keys.sort()
if len(random_keys) >= shard_count:
# We've got a lot of scatter values. Sample them down.
random_keys = cls._choose_split_points(random_keys, shard_count)
k_ranges = []
k_ranges.append(key_range.KeyRange(
key_start=None,
key_end=random_keys[0],
direction=key_range.KeyRange.ASC,
include_start=False,
include_end=False,
namespace=namespace,
_app=app))
for i in range(0, len(random_keys) - 1):
k_ranges.append(key_range.KeyRange(
key_start=random_keys[i],
key_end=random_keys[i + 1],
direction=key_range.KeyRange.ASC,
include_start=True,
include_end=False,
namespace=namespace,
_app=app))
k_ranges.append(key_range.KeyRange(
key_start=random_keys[-1],
key_end=None,
direction=key_range.KeyRange.ASC,
include_start=True,
include_end=False,
namespace=namespace,
_app=app))
if len(k_ranges) < shard_count:
# We need to have as many shards as it was requested. Add some Nones.
k_ranges += [None] * (shard_count - len(k_ranges))
return k_ranges
@classmethod
def _choose_split_points(cls, sorted_keys, shard_count):
"""Returns the best split points given a random set of datastore.Keys."""
assert len(sorted_keys) >= shard_count
index_stride = len(sorted_keys) / float(shard_count)
return [sorted_keys[int(round(index_stride * i))]
for i in range(1, shard_count)]
@classmethod
def validate(cls, mapper_spec):
"""Inherit docs."""
params = _get_params(mapper_spec)
if cls.ENTITY_KIND_PARAM not in params:
raise BadReaderParamsError("Missing input reader parameter 'entity_kind'")
if cls.BATCH_SIZE_PARAM in params:
try:
batch_size = int(params[cls.BATCH_SIZE_PARAM])
if batch_size < 1:
raise BadReaderParamsError("Bad batch size: %s" % batch_size)
except ValueError, e:
raise BadReaderParamsError("Bad batch size: %s" % e)
try:
bool(params.get(cls.KEYS_ONLY_PARAM, False))
except:
raise BadReaderParamsError("keys_only expects a boolean value but got %s",
params[cls.KEYS_ONLY_PARAM])
if cls.NAMESPACE_PARAM in params:
if not isinstance(params[cls.NAMESPACE_PARAM],
(str, unicode, type(None))):
raise BadReaderParamsError(
"Expected a single namespace string")
if cls.NAMESPACES_PARAM in params:
raise BadReaderParamsError("Multiple namespaces are no longer supported")
if cls.FILTERS_PARAM in params:
filters = params[cls.FILTERS_PARAM]
if not isinstance(filters, list):
raise BadReaderParamsError("Expected list for filters parameter")
for f in filters:
if not isinstance(f, (tuple, list)):
raise BadReaderParamsError("Filter should be a tuple or list: %s", f)
if len(f) != 3:
raise BadReaderParamsError("Filter should be a 3-tuple: %s", f)
prop, op, _ = f
if not isinstance(prop, basestring):
raise BadReaderParamsError("Property should be string: %s", prop)
if not isinstance(op, basestring):
raise BadReaderParamsError("Operator should be string: %s", op)
@classmethod
def _get_raw_entity_kind(cls, entity_kind_or_model_classpath):
"""Returns the entity kind to use with low level datastore calls.
Args:
entity_kind_or_model_classpath: user specified entity kind or model
classpath.
Returns:
the entity kind in str to use with low level datastore calls.
"""
return entity_kind_or_model_classpath
class RawDatastoreInputReader(AbstractDatastoreInputReader):
"""Iterates over an entity kind and yields datastore.Entity."""
_KEY_RANGE_ITER_CLS = db_iters.KeyRangeEntityIterator
@classmethod
def validate(cls, mapper_spec):
"""Inherit docs."""
super(RawDatastoreInputReader, cls).validate(mapper_spec)
params = _get_params(mapper_spec)
entity_kind = params[cls.ENTITY_KIND_PARAM]
if "." in entity_kind:
logging.warning(
". detected in entity kind %s specified for reader %s."
"Assuming entity kind contains the dot.",
entity_kind, cls.__name__)
if cls.FILTERS_PARAM in params:
filters = params[cls.FILTERS_PARAM]
for f in filters:
if f[1] != "=":
raise BadReaderParamsError(
"Only equality filters are supported: %s", f)
class DatastoreInputReader(AbstractDatastoreInputReader):
"""Iterates over a Model and yields model instances.
Supports both db.model and ndb.model.
"""
_KEY_RANGE_ITER_CLS = db_iters.KeyRangeModelIterator
@classmethod
def _get_raw_entity_kind(cls, model_classpath):
entity_type = util.for_name(model_classpath)
if isinstance(entity_type, db.Model):
return entity_type.kind()
elif isinstance(entity_type, (ndb.Model, ndb.MetaModel)):
# pylint: disable=protected-access
return entity_type._get_kind()
else:
return util.get_short_name(model_classpath)
@classmethod
def validate(cls, mapper_spec):
"""Inherit docs."""
super(DatastoreInputReader, cls).validate(mapper_spec)
params = _get_params(mapper_spec)
entity_kind = params[cls.ENTITY_KIND_PARAM]
# Fail fast if Model cannot be located.
try:
model_class = util.for_name(entity_kind)
except ImportError, e:
raise BadReaderParamsError("Bad entity kind: %s" % e)
if cls.FILTERS_PARAM in params:
filters = params[cls.FILTERS_PARAM]
if issubclass(model_class, db.Model):
cls._validate_filters(filters, model_class)
else:
cls._validate_filters_ndb(filters, model_class)
property_range.PropertyRange(filters, entity_kind)
@classmethod
def _validate_filters(cls, filters, model_class):
"""Validate user supplied filters.
Validate filters are on existing properties and filter values
have valid semantics.
Args:
filters: user supplied filters. Each filter should be a list or tuple of
format (<property_name_as_str>, <query_operator_as_str>,
<value_of_certain_type>). Value type is up to the property's type.
model_class: the db.Model class for the entity type to apply filters on.
Raises:
BadReaderParamsError: if any filter is invalid in any way.
"""
if not filters:
return
properties = model_class.properties()
for f in filters:
prop, _, val = f
if prop not in properties:
raise errors.BadReaderParamsError(
"Property %s is not defined for entity type %s",
prop, model_class.kind())
# Validate the value of each filter. We need to know filters have
# valid value to carry out splits.
try:
properties[prop].validate(val)
except db.BadValueError, e:
raise errors.BadReaderParamsError(e)
@classmethod
# pylint: disable=protected-access
def _validate_filters_ndb(cls, filters, model_class):
"""Validate ndb.Model filters."""
if not filters:
return
properties = model_class._properties
for f in filters:
prop, _, val = f
if prop not in properties:
raise errors.BadReaderParamsError(
"Property %s is not defined for entity type %s",
prop, model_class._get_kind())
# Validate the value of each filter. We need to know filters have
# valid value to carry out splits.
try:
properties[prop]._do_validate(val)
except db.BadValueError, e:
raise errors.BadReaderParamsError(e)
@classmethod
def split_input(cls, mapper_spec):
"""Inherit docs."""
shard_count = mapper_spec.shard_count
query_spec = cls._get_query_spec(mapper_spec)
if not property_range.should_shard_by_property_range(query_spec.filters):
return super(DatastoreInputReader, cls).split_input(mapper_spec)
p_range = property_range.PropertyRange(query_spec.filters,
query_spec.model_class_path)
p_ranges = p_range.split(shard_count)
# User specified a namespace.
if query_spec.ns:
ns_range = namespace_range.NamespaceRange(
namespace_start=query_spec.ns,
namespace_end=query_spec.ns,
_app=query_spec.app)
ns_ranges = [copy.copy(ns_range) for _ in p_ranges]
else:
ns_keys = namespace_range.get_namespace_keys(
query_spec.app, cls.MAX_NAMESPACES_FOR_KEY_SHARD + 1)
if not ns_keys:
return
# User doesn't specify ns but the number of ns is small.
# We still split by property range.
if len(ns_keys) <= cls.MAX_NAMESPACES_FOR_KEY_SHARD:
ns_ranges = [namespace_range.NamespaceRange(_app=query_spec.app)
for _ in p_ranges]
# Lots of namespaces. Split by ns.
else:
ns_ranges = namespace_range.NamespaceRange.split(n=shard_count,
contiguous=False,
can_query=lambda: True,
_app=query_spec.app)
p_ranges = [copy.copy(p_range) for _ in ns_ranges]
assert len(p_ranges) == len(ns_ranges)
iters = [
db_iters.RangeIteratorFactory.create_property_range_iterator(
p, ns, query_spec) for p, ns in zip(p_ranges, ns_ranges)]
return [cls(i) for i in iters]
class DatastoreKeyInputReader(RawDatastoreInputReader):
"""Iterate over an entity kind and yields datastore.Key."""
_KEY_RANGE_ITER_CLS = db_iters.KeyRangeKeyIterator
# For backward compatibility.
DatastoreEntityInputReader = RawDatastoreInputReader
# TODO(user): Remove this after the only dependency GroomerMarkReader is
class _OldAbstractDatastoreInputReader(InputReader):
"""Abstract base class for classes that iterate over datastore entities.
Concrete subclasses must implement _iter_key_range(self, k_range). See the
docstring for that method for details.
"""
# Number of entities to fetch at once while doing scanning.
_BATCH_SIZE = 50
# Maximum number of shards we'll create.
_MAX_SHARD_COUNT = 256
# __scatter__ oversampling factor
_OVERSAMPLING_FACTOR = 32
# The maximum number of namespaces that will be sharded by datastore key
# before switching to a strategy where sharding is done lexographically by
# namespace.
MAX_NAMESPACES_FOR_KEY_SHARD = 10
# Mapreduce parameters.
ENTITY_KIND_PARAM = "entity_kind"
KEYS_ONLY_PARAM = "keys_only"
BATCH_SIZE_PARAM = "batch_size"
KEY_RANGE_PARAM = "key_range"
NAMESPACE_RANGE_PARAM = "namespace_range"
CURRENT_KEY_RANGE_PARAM = "current_key_range"
FILTERS_PARAM = "filters"
# TODO(user): Add support for arbitrary queries. It's not possible to
# support them without cursors since right now you can't even serialize query
# definition.
# pylint: disable=redefined-outer-name
def __init__(self,
entity_kind,
key_ranges=None,
ns_range=None,
batch_size=_BATCH_SIZE,
current_key_range=None,
filters=None):
"""Create new AbstractDatastoreInputReader object.
This is internal constructor. Use split_query in a concrete class instead.
Args:
entity_kind: entity kind as string.
key_ranges: a sequence of key_range.KeyRange instances to process. Only
one of key_ranges or ns_range can be non-None.
ns_range: a namespace_range.NamespaceRange to process. Only one of
key_ranges or ns_range can be non-None.
batch_size: size of read batch as int.
current_key_range: the current key_range.KeyRange being processed.
filters: optional list of filters to apply to the query. Each filter is
a tuple: (<property_name_as_str>, <query_operation_as_str>, <value>).
User filters are applied first.
"""
assert key_ranges is not None or ns_range is not None, (
"must specify one of 'key_ranges' or 'ns_range'")
assert key_ranges is None or ns_range is None, (
"can't specify both 'key_ranges ' and 'ns_range'")
self._entity_kind = entity_kind
# Reverse the KeyRanges so they can be processed in order as a stack of
# work items.
self._key_ranges = key_ranges and list(reversed(key_ranges))
self._ns_range = ns_range
self._batch_size = int(batch_size)
self._current_key_range = current_key_range
self._filters = filters
@classmethod
def _get_raw_entity_kind(cls, entity_kind):
if "." in entity_kind:
logging.warning(
". detected in entity kind %s specified for reader %s."
"Assuming entity kind contains the dot.",
entity_kind, cls.__name__)
return entity_kind
def __iter__(self):
"""Iterates over the given KeyRanges or NamespaceRange.
This method iterates over the given KeyRanges or NamespaceRange and sets
the self._current_key_range to the KeyRange currently being processed. It
then delegates to the _iter_key_range method to yield that actual
results.
Yields:
Forwards the objects yielded by the subclasses concrete _iter_key_range()
method. The caller must consume the result yielded because self.to_json()
will not include it.
"""
if self._key_ranges is not None:
for o in self._iter_key_ranges():
yield o
elif self._ns_range is not None:
for o in self._iter_ns_range():
yield o
else:
assert False, "self._key_ranges and self._ns_range are both None"
def _iter_key_ranges(self):
"""Iterates over self._key_ranges, delegating to self._iter_key_range()."""
while True:
if self._current_key_range is None:
if self._key_ranges:
self._current_key_range = self._key_ranges.pop()
# The most recently popped key_range may be None, so continue here
# to find the next keyrange that's valid.
continue
else:
break
for key, o in self._iter_key_range(
copy.deepcopy(self._current_key_range)):
# The caller must consume yielded values so advancing the KeyRange
# before yielding is safe.
self._current_key_range.advance(key)
yield o
self._current_key_range = None
def _iter_ns_range(self):
"""Iterates over self._ns_range, delegating to self._iter_key_range()."""
while True:
if self._current_key_range is None:
query = self._ns_range.make_datastore_query()
namespace_result = query.Get(1)
if not namespace_result:
break
namespace = namespace_result[0].name() or ""
self._current_key_range = key_range.KeyRange(
namespace=namespace, _app=self._ns_range.app)
yield ALLOW_CHECKPOINT
for key, o in self._iter_key_range(
copy.deepcopy(self._current_key_range)):
# The caller must consume yielded values so advancing the KeyRange
# before yielding is safe.
self._current_key_range.advance(key)
yield o
if (self._ns_range.is_single_namespace or
self._current_key_range.namespace == self._ns_range.namespace_end):
break
self._ns_range = self._ns_range.with_start_after(
self._current_key_range.namespace)
self._current_key_range = None
def _iter_key_range(self, k_range):
"""Yields a db.Key and the value that should be yielded by self.__iter__().
Args:
k_range: The key_range.KeyRange to iterate over.
Yields:
A 2-tuple containing the last db.Key processed and the value that should
be yielded by __iter__. The returned db.Key will be used to determine the
InputReader's current position in self._current_key_range.
"""
raise NotImplementedError("_iter_key_range() not implemented in %s" %
self.__class__)
def __str__(self):
"""Returns the string representation of this InputReader."""
if self._ns_range is None:
return repr(self._key_ranges)
else:
return repr(self._ns_range)
@classmethod
def _choose_split_points(cls, sorted_keys, shard_count):
"""Returns the best split points given a random set of db.Keys."""
assert len(sorted_keys) >= shard_count
index_stride = len(sorted_keys) / float(shard_count)
return [sorted_keys[int(round(index_stride * i))]
for i in range(1, shard_count)]
# TODO(user): use query splitting functionality when it becomes available
# instead.
@classmethod
def _split_input_from_namespace(cls, app, namespace, entity_kind,
shard_count):
"""Helper for _split_input_from_params.
If there are not enough Entities to make all of the given shards, the
returned list of KeyRanges will include Nones. The returned list will
contain KeyRanges ordered lexographically with any Nones appearing at the
end.
Args:
app: the app.
namespace: the namespace.
entity_kind: entity kind as string.
shard_count: the number of shards.
Returns:
KeyRange objects.
"""
raw_entity_kind = cls._get_raw_entity_kind(entity_kind)
if shard_count == 1:
# With one shard we don't need to calculate any splitpoints at all.
return [key_range.KeyRange(namespace=namespace, _app=app)]
ds_query = datastore.Query(kind=raw_entity_kind,
namespace=namespace,
_app=app,
keys_only=True)
ds_query.Order("__scatter__")
random_keys = ds_query.Get(shard_count * cls._OVERSAMPLING_FACTOR)
if not random_keys:
# There are no entities with scatter property. We have no idea
# how to split.
return ([key_range.KeyRange(namespace=namespace, _app=app)] +
[None] * (shard_count - 1))
random_keys.sort()
if len(random_keys) >= shard_count:
# We've got a lot of scatter values. Sample them down.
random_keys = cls._choose_split_points(random_keys, shard_count)
# pylint: disable=redefined-outer-name
key_ranges = []
key_ranges.append(key_range.KeyRange(
key_start=None,
key_end=random_keys[0],
direction=key_range.KeyRange.ASC,
include_start=False,
include_end=False,
namespace=namespace,
_app=app))
for i in range(0, len(random_keys) - 1):
key_ranges.append(key_range.KeyRange(
key_start=random_keys[i],
key_end=random_keys[i + 1],
direction=key_range.KeyRange.ASC,
include_start=True,
include_end=False,
namespace=namespace,
_app=app))
key_ranges.append(key_range.KeyRange(
key_start=random_keys[-1],
key_end=None,
direction=key_range.KeyRange.ASC,
include_start=True,
include_end=False,
namespace=namespace,
_app=app))
if len(key_ranges) < shard_count:
# We need to have as many shards as it was requested. Add some Nones.
key_ranges += [None] * (shard_count - len(key_ranges))
return key_ranges
@classmethod
def _split_input_from_params(cls, app, namespaces, entity_kind_name,
params, shard_count):
"""Return input reader objects. Helper for split_input."""
# pylint: disable=redefined-outer-name
key_ranges = [] # KeyRanges for all namespaces
for namespace in namespaces:
key_ranges.extend(
cls._split_input_from_namespace(app,
namespace,
entity_kind_name,
shard_count))
# Divide the KeyRanges into shard_count shards. The KeyRanges for different
# namespaces might be very different in size so the assignment of KeyRanges
# to shards is done round-robin.
shared_ranges = [[] for _ in range(shard_count)]
for i, k_range in enumerate(key_ranges):
shared_ranges[i % shard_count].append(k_range)
batch_size = int(params.get(cls.BATCH_SIZE_PARAM, cls._BATCH_SIZE))
return [cls(entity_kind_name,
key_ranges=key_ranges,
ns_range=None,
batch_size=batch_size)
for key_ranges in shared_ranges if key_ranges]
@classmethod
def validate(cls, mapper_spec):
"""Validates mapper spec and all mapper parameters.
Args:
mapper_spec: The MapperSpec for this InputReader.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
if mapper_spec.input_reader_class() != cls:
raise BadReaderParamsError("Input reader class mismatch")
params = _get_params(mapper_spec)
if cls.ENTITY_KIND_PARAM not in params:
raise BadReaderParamsError("Missing mapper parameter 'entity_kind'")
if cls.BATCH_SIZE_PARAM in params:
try:
batch_size = int(params[cls.BATCH_SIZE_PARAM])
if batch_size < 1:
raise BadReaderParamsError("Bad batch size: %s" % batch_size)
except ValueError, e:
raise BadReaderParamsError("Bad batch size: %s" % e)
if cls.NAMESPACE_PARAM in params:
if not isinstance(params[cls.NAMESPACE_PARAM],
(str, unicode, type(None))):
raise BadReaderParamsError(
"Expected a single namespace string")
if cls.NAMESPACES_PARAM in params:
raise BadReaderParamsError("Multiple namespaces are no longer supported")
if cls.FILTERS_PARAM in params:
filters = params[cls.FILTERS_PARAM]
if not isinstance(filters, list):
raise BadReaderParamsError("Expected list for filters parameter")
for f in filters:
if not isinstance(f, (tuple, list)):
raise BadReaderParamsError("Filter should be a tuple or list: %s", f)
if len(f) != 3:
raise BadReaderParamsError("Filter should be a 3-tuple: %s", f)
if not isinstance(f[0], basestring):
raise BadReaderParamsError("First element should be string: %s", f)
if f[1] != "=":
raise BadReaderParamsError(
"Only equality filters are supported: %s", f)
@classmethod
def split_input(cls, mapper_spec):
"""Splits query into shards without fetching query results.
Tries as best as it can to split the whole query result set into equal
shards. Due to difficulty of making the perfect split, resulting shards'
sizes might differ significantly from each other.
Args:
mapper_spec: MapperSpec with params containing 'entity_kind'.
May have 'namespace' in the params as a string containing a single
namespace. If specified then the input reader will only yield values
in the given namespace. If 'namespace' is not given then values from
all namespaces will be yielded. May also have 'batch_size' in the params
to specify the number of entities to process in each batch.
Returns:
A list of InputReader objects. If the query results are empty then the
empty list will be returned. Otherwise, the list will always have a length
equal to number_of_shards but may be padded with Nones if there are too
few results for effective sharding.
"""
params = _get_params(mapper_spec)
entity_kind_name = params[cls.ENTITY_KIND_PARAM]
batch_size = int(params.get(cls.BATCH_SIZE_PARAM, cls._BATCH_SIZE))
shard_count = mapper_spec.shard_count
namespace = params.get(cls.NAMESPACE_PARAM)
app = params.get(cls._APP_PARAM)
filters = params.get(cls.FILTERS_PARAM)
if namespace is None:
# It is difficult to efficiently shard large numbers of namespaces because
# there can be an arbitrary number of them. So the strategy is:
# 1. if there are a small number of namespaces in the datastore then
# generate one KeyRange per namespace per shard and assign each shard a
# KeyRange for every namespace. This should lead to nearly perfect
# sharding.
# 2. if there are a large number of namespaces in the datastore then
# generate one NamespaceRange per worker. This can lead to very bad
# sharding because namespaces can contain very different numbers of
# entities and each NamespaceRange may contain very different numbers
# of namespaces.
namespace_query = datastore.Query("__namespace__",
keys_only=True,
_app=app)
namespace_keys = namespace_query.Get(
limit=cls.MAX_NAMESPACES_FOR_KEY_SHARD + 1)
if len(namespace_keys) > cls.MAX_NAMESPACES_FOR_KEY_SHARD:
ns_ranges = namespace_range.NamespaceRange.split(n=shard_count,
contiguous=True,
_app=app)
return [cls(entity_kind_name,
key_ranges=None,
ns_range=ns_range,
batch_size=batch_size,
filters=filters)
for ns_range in ns_ranges]
elif not namespace_keys:
return [cls(entity_kind_name,
key_ranges=None,
ns_range=namespace_range.NamespaceRange(_app=app),
batch_size=shard_count,
filters=filters)]
else:
namespaces = [namespace_key.name() or ""
for namespace_key in namespace_keys]
else:
namespaces = [namespace]
readers = cls._split_input_from_params(
app, namespaces, entity_kind_name, params, shard_count)
if filters:
for reader in readers:
reader._filters = filters
return readers
def to_json(self):
"""Serializes all the data in this query range into json form.
Returns:
all the data in json-compatible map.
"""
if self._key_ranges is None:
key_ranges_json = None
else:
key_ranges_json = []
for k in self._key_ranges:
if k:
key_ranges_json.append(k.to_json())
else:
key_ranges_json.append(None)
if self._ns_range is None:
namespace_range_json = None
else:
namespace_range_json = self._ns_range.to_json_object()
if self._current_key_range is None:
current_key_range_json = None
else:
current_key_range_json = self._current_key_range.to_json()
json_dict = {self.KEY_RANGE_PARAM: key_ranges_json,
self.NAMESPACE_RANGE_PARAM: namespace_range_json,
self.CURRENT_KEY_RANGE_PARAM: current_key_range_json,
self.ENTITY_KIND_PARAM: self._entity_kind,
self.BATCH_SIZE_PARAM: self._batch_size,
self.FILTERS_PARAM: self._filters}
return json_dict
@classmethod
def from_json(cls, json):
"""Create new DatastoreInputReader from the json, encoded by to_json.
Args:
json: json map representation of DatastoreInputReader.
Returns:
an instance of DatastoreInputReader with all data deserialized from json.
"""
if json[cls.KEY_RANGE_PARAM] is None:
# pylint: disable=redefined-outer-name
key_ranges = None
else:
key_ranges = []
for k in json[cls.KEY_RANGE_PARAM]:
if k:
key_ranges.append(key_range.KeyRange.from_json(k))
else:
key_ranges.append(None)
if json[cls.NAMESPACE_RANGE_PARAM] is None:
ns_range = None
else:
ns_range = namespace_range.NamespaceRange.from_json_object(
json[cls.NAMESPACE_RANGE_PARAM])
if json[cls.CURRENT_KEY_RANGE_PARAM] is None:
current_key_range = None
else:
current_key_range = key_range.KeyRange.from_json(
json[cls.CURRENT_KEY_RANGE_PARAM])
return cls(
json[cls.ENTITY_KIND_PARAM],
key_ranges,
ns_range,
json[cls.BATCH_SIZE_PARAM],
current_key_range,
filters=json.get(cls.FILTERS_PARAM))
class BlobstoreLineInputReader(InputReader):
"""Input reader for a newline delimited blob in Blobstore."""
# TODO(user): Should we set this based on MAX_BLOB_FETCH_SIZE?
_BLOB_BUFFER_SIZE = 64000
# Maximum number of shards to allow.
_MAX_SHARD_COUNT = 256
# Maximum number of blobs to allow.
_MAX_BLOB_KEYS_COUNT = 246
# Mapreduce parameters.
BLOB_KEYS_PARAM = "blob_keys"
# Serialization parmaeters.
INITIAL_POSITION_PARAM = "initial_position"
END_POSITION_PARAM = "end_position"
BLOB_KEY_PARAM = "blob_key"
def __init__(self, blob_key, start_position, end_position):
"""Initializes this instance with the given blob key and character range.
This BlobstoreInputReader will read from the first record starting after
strictly after start_position until the first record ending at or after
end_position (exclusive). As an exception, if start_position is 0, then
this InputReader starts reading at the first record.
Args:
blob_key: the BlobKey that this input reader is processing.
start_position: the position to start reading at.
end_position: a position in the last record to read.
"""
self._blob_key = blob_key
self._blob_reader = blobstore.BlobReader(blob_key,
self._BLOB_BUFFER_SIZE,
start_position)
self._end_position = end_position
self._has_iterated = False
self._read_before_start = bool(start_position)
def next(self):
"""Returns the next input from as an (offset, line) tuple."""
self._has_iterated = True
if self._read_before_start:
self._blob_reader.readline()
self._read_before_start = False
start_position = self._blob_reader.tell()
if start_position > self._end_position:
raise StopIteration()
line = self._blob_reader.readline()
if not line:
raise StopIteration()
return start_position, line.rstrip("\n")
def to_json(self):
"""Returns an json-compatible input shard spec for remaining inputs."""
new_pos = self._blob_reader.tell()
if self._has_iterated:
new_pos -= 1
return {self.BLOB_KEY_PARAM: self._blob_key,
self.INITIAL_POSITION_PARAM: new_pos,
self.END_POSITION_PARAM: self._end_position}
def __str__(self):
"""Returns the string representation of this BlobstoreLineInputReader."""
return "blobstore.BlobKey(%r):[%d, %d]" % (
self._blob_key, self._blob_reader.tell(), self._end_position)
@classmethod
def from_json(cls, json):
"""Instantiates an instance of this InputReader for the given shard spec."""
return cls(json[cls.BLOB_KEY_PARAM],
json[cls.INITIAL_POSITION_PARAM],
json[cls.END_POSITION_PARAM])
@classmethod
def validate(cls, mapper_spec):
"""Validates mapper spec and all mapper parameters.
Args:
mapper_spec: The MapperSpec for this InputReader.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
if mapper_spec.input_reader_class() != cls:
raise BadReaderParamsError("Mapper input reader class mismatch")
params = _get_params(mapper_spec)
if cls.BLOB_KEYS_PARAM not in params:
raise BadReaderParamsError("Must specify 'blob_keys' for mapper input")
blob_keys = params[cls.BLOB_KEYS_PARAM]
if isinstance(blob_keys, basestring):
# This is a mechanism to allow multiple blob keys (which do not contain
# commas) in a single string. It may go away.
blob_keys = blob_keys.split(",")
if len(blob_keys) > cls._MAX_BLOB_KEYS_COUNT:
raise BadReaderParamsError("Too many 'blob_keys' for mapper input")
if not blob_keys:
raise BadReaderParamsError("No 'blob_keys' specified for mapper input")
for blob_key in blob_keys:
blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key))
if not blob_info:
raise BadReaderParamsError("Could not find blobinfo for key %s" %
blob_key)
@classmethod
def split_input(cls, mapper_spec):
"""Returns a list of shard_count input_spec_shards for input_spec.
Args:
mapper_spec: The mapper specification to split from. Must contain
'blob_keys' parameter with one or more blob keys.
Returns:
A list of BlobstoreInputReaders corresponding to the specified shards.
"""
params = _get_params(mapper_spec)
blob_keys = params[cls.BLOB_KEYS_PARAM]
if isinstance(blob_keys, basestring):
# This is a mechanism to allow multiple blob keys (which do not contain
# commas) in a single string. It may go away.
blob_keys = blob_keys.split(",")
blob_sizes = {}
for blob_key in blob_keys:
blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key))
blob_sizes[blob_key] = blob_info.size
shard_count = min(cls._MAX_SHARD_COUNT, mapper_spec.shard_count)
shards_per_blob = shard_count // len(blob_keys)
if shards_per_blob == 0:
shards_per_blob = 1
chunks = []
for blob_key, blob_size in blob_sizes.items():
blob_chunk_size = blob_size // shards_per_blob
for i in xrange(shards_per_blob - 1):
chunks.append(BlobstoreLineInputReader.from_json(
{cls.BLOB_KEY_PARAM: blob_key,
cls.INITIAL_POSITION_PARAM: blob_chunk_size * i,
cls.END_POSITION_PARAM: blob_chunk_size * (i + 1)}))
chunks.append(BlobstoreLineInputReader.from_json(
{cls.BLOB_KEY_PARAM: blob_key,
cls.INITIAL_POSITION_PARAM: blob_chunk_size * (shards_per_blob - 1),
cls.END_POSITION_PARAM: blob_size}))
return chunks
class BlobstoreZipInputReader(InputReader):
"""Input reader for files from a zip archive stored in the Blobstore.
Each instance of the reader will read the TOC, from the end of the zip file,
and then only the contained files which it is responsible for.
"""
# Maximum number of shards to allow.
_MAX_SHARD_COUNT = 256
# Mapreduce parameters.
BLOB_KEY_PARAM = "blob_key"
START_INDEX_PARAM = "start_index"
END_INDEX_PARAM = "end_index"
def __init__(self, blob_key, start_index, end_index,
_reader=blobstore.BlobReader):
"""Initializes this instance with the given blob key and file range.
This BlobstoreZipInputReader will read from the file with index start_index
up to but not including the file with index end_index.
Args:
blob_key: the BlobKey that this input reader is processing.
start_index: the index of the first file to read.
end_index: the index of the first file that will not be read.
_reader: a callable that returns a file-like object for reading blobs.
Used for dependency injection.
"""
self._blob_key = blob_key
self._start_index = start_index
self._end_index = end_index
self._reader = _reader
self._zip = None
self._entries = None
def next(self):
"""Returns the next input from this input reader as (ZipInfo, opener) tuple.
Returns:
The next input from this input reader, in the form of a 2-tuple.
The first element of the tuple is a zipfile.ZipInfo object.
The second element of the tuple is a zero-argument function that, when
called, returns the complete body of the file.
"""
if not self._zip:
self._zip = zipfile.ZipFile(self._reader(self._blob_key))
# Get a list of entries, reversed so we can pop entries off in order
self._entries = self._zip.infolist()[self._start_index:self._end_index]
self._entries.reverse()
if not self._entries:
raise StopIteration()
entry = self._entries.pop()
self._start_index += 1
return (entry, lambda: self._read(entry))
def _read(self, entry):
"""Read entry content.
Args:
entry: zip file entry as zipfile.ZipInfo.
Returns:
Entry content as string.
"""
start_time = time.time()
content = self._zip.read(entry.filename)
ctx = context.get()
if ctx:
operation.counters.Increment(COUNTER_IO_READ_BYTES, len(content))(ctx)
operation.counters.Increment(
COUNTER_IO_READ_MSEC, int((time.time() - start_time) * 1000))(ctx)
return content
@classmethod
def from_json(cls, json):
"""Creates an instance of the InputReader for the given input shard state.
Args:
json: The InputReader state as a dict-like object.
Returns:
An instance of the InputReader configured using the values of json.
"""
return cls(json[cls.BLOB_KEY_PARAM],
json[cls.START_INDEX_PARAM],
json[cls.END_INDEX_PARAM])
def to_json(self):
"""Returns an input shard state for the remaining inputs.
Returns:
A json-izable version of the remaining InputReader.
"""
return {self.BLOB_KEY_PARAM: self._blob_key,
self.START_INDEX_PARAM: self._start_index,
self.END_INDEX_PARAM: self._end_index}
def __str__(self):
"""Returns the string representation of this BlobstoreZipInputReader."""
return "blobstore.BlobKey(%r):[%d, %d]" % (
self._blob_key, self._start_index, self._end_index)
@classmethod
def validate(cls, mapper_spec):
"""Validates mapper spec and all mapper parameters.
Args:
mapper_spec: The MapperSpec for this InputReader.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
if mapper_spec.input_reader_class() != cls:
raise BadReaderParamsError("Mapper input reader class mismatch")
params = _get_params(mapper_spec)
if cls.BLOB_KEY_PARAM not in params:
raise BadReaderParamsError("Must specify 'blob_key' for mapper input")
blob_key = params[cls.BLOB_KEY_PARAM]
blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key))
if not blob_info:
raise BadReaderParamsError("Could not find blobinfo for key %s" %
blob_key)
@classmethod
def split_input(cls, mapper_spec, _reader=blobstore.BlobReader):
"""Returns a list of input shard states for the input spec.
Args:
mapper_spec: The MapperSpec for this InputReader. Must contain
'blob_key' parameter with one blob key.
_reader: a callable that returns a file-like object for reading blobs.
Used for dependency injection.
Returns:
A list of InputReaders spanning files within the zip.
"""
params = _get_params(mapper_spec)
blob_key = params[cls.BLOB_KEY_PARAM]
zip_input = zipfile.ZipFile(_reader(blob_key))
zfiles = zip_input.infolist()
total_size = sum(x.file_size for x in zfiles)
num_shards = min(mapper_spec.shard_count, cls._MAX_SHARD_COUNT)
size_per_shard = total_size // num_shards
# Break the list of files into sublists, each of approximately
# size_per_shard bytes.
shard_start_indexes = [0]
current_shard_size = 0
for i, fileinfo in enumerate(zfiles):
current_shard_size += fileinfo.file_size
if current_shard_size >= size_per_shard:
shard_start_indexes.append(i + 1)
current_shard_size = 0
if shard_start_indexes[-1] != len(zfiles):
shard_start_indexes.append(len(zfiles))
return [cls(blob_key, start_index, end_index, _reader)
for start_index, end_index
in zip(shard_start_indexes, shard_start_indexes[1:])]
class BlobstoreZipLineInputReader(InputReader):
"""Input reader for newline delimited files in zip archives from Blobstore.
This has the same external interface as the BlobstoreLineInputReader, in that
it takes a list of blobs as its input and yields lines to the reader.
However the blobs themselves are expected to be zip archives of line delimited
files instead of the files themselves.
This is useful as many line delimited files gain greatly from compression.
"""
# Maximum number of shards to allow.
_MAX_SHARD_COUNT = 256
# Maximum number of blobs to allow.
_MAX_BLOB_KEYS_COUNT = 246
# Mapreduce parameters.
BLOB_KEYS_PARAM = "blob_keys"
# Serialization parameters.
BLOB_KEY_PARAM = "blob_key"
START_FILE_INDEX_PARAM = "start_file_index"
END_FILE_INDEX_PARAM = "end_file_index"
OFFSET_PARAM = "offset"
def __init__(self, blob_key, start_file_index, end_file_index, offset,
_reader=blobstore.BlobReader):
"""Initializes this instance with the given blob key and file range.
This BlobstoreZipLineInputReader will read from the file with index
start_file_index up to but not including the file with index end_file_index.
It will return lines starting at offset within file[start_file_index]
Args:
blob_key: the BlobKey that this input reader is processing.
start_file_index: the index of the first file to read within the zip.
end_file_index: the index of the first file that will not be read.
offset: the byte offset within blob_key.zip[start_file_index] to start
reading. The reader will continue to the end of the file.
_reader: a callable that returns a file-like object for reading blobs.
Used for dependency injection.
"""
self._blob_key = blob_key
self._start_file_index = start_file_index
self._end_file_index = end_file_index
self._initial_offset = offset
self._reader = _reader
self._zip = None
self._entries = None
self._filestream = None
@classmethod
def validate(cls, mapper_spec):
"""Validates mapper spec and all mapper parameters.
Args:
mapper_spec: The MapperSpec for this InputReader.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
if mapper_spec.input_reader_class() != cls:
raise BadReaderParamsError("Mapper input reader class mismatch")
params = _get_params(mapper_spec)
if cls.BLOB_KEYS_PARAM not in params:
raise BadReaderParamsError("Must specify 'blob_keys' for mapper input")
blob_keys = params[cls.BLOB_KEYS_PARAM]
if isinstance(blob_keys, basestring):
# This is a mechanism to allow multiple blob keys (which do not contain
# commas) in a single string. It may go away.
blob_keys = blob_keys.split(",")
if len(blob_keys) > cls._MAX_BLOB_KEYS_COUNT:
raise BadReaderParamsError("Too many 'blob_keys' for mapper input")
if not blob_keys:
raise BadReaderParamsError("No 'blob_keys' specified for mapper input")
for blob_key in blob_keys:
blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key))
if not blob_info:
raise BadReaderParamsError("Could not find blobinfo for key %s" %
blob_key)
@classmethod
def split_input(cls, mapper_spec, _reader=blobstore.BlobReader):
"""Returns a list of input readers for the input spec.
Args:
mapper_spec: The MapperSpec for this InputReader. Must contain
'blob_keys' parameter with one or more blob keys.
_reader: a callable that returns a file-like object for reading blobs.
Used for dependency injection.
Returns:
A list of InputReaders spanning the subfiles within the blobs.
There will be at least one reader per blob, but it will otherwise
attempt to keep the expanded size even.
"""
params = _get_params(mapper_spec)
blob_keys = params[cls.BLOB_KEYS_PARAM]
if isinstance(blob_keys, basestring):
# This is a mechanism to allow multiple blob keys (which do not contain
# commas) in a single string. It may go away.
blob_keys = blob_keys.split(",")
blob_files = {}
total_size = 0
for blob_key in blob_keys:
zip_input = zipfile.ZipFile(_reader(blob_key))
blob_files[blob_key] = zip_input.infolist()
total_size += sum(x.file_size for x in blob_files[blob_key])
shard_count = min(cls._MAX_SHARD_COUNT, mapper_spec.shard_count)
# We can break on both blob key and file-within-zip boundaries.
# A shard will span at minimum a single blob key, but may only
# handle a few files within a blob.
size_per_shard = total_size // shard_count
readers = []
for blob_key in blob_keys:
bfiles = blob_files[blob_key]
current_shard_size = 0
start_file_index = 0
next_file_index = 0
for fileinfo in bfiles:
next_file_index += 1
current_shard_size += fileinfo.file_size
if current_shard_size >= size_per_shard:
readers.append(cls(blob_key, start_file_index, next_file_index, 0,
_reader))
current_shard_size = 0
start_file_index = next_file_index
if current_shard_size != 0:
readers.append(cls(blob_key, start_file_index, next_file_index, 0,
_reader))
return readers
def next(self):
"""Returns the next line from this input reader as (lineinfo, line) tuple.
Returns:
The next input from this input reader, in the form of a 2-tuple.
The first element of the tuple describes the source, it is itself
a tuple (blobkey, filenumber, byteoffset).
The second element of the tuple is the line found at that offset.
"""
if not self._filestream:
if not self._zip:
self._zip = zipfile.ZipFile(self._reader(self._blob_key))
# Get a list of entries, reversed so we can pop entries off in order
self._entries = self._zip.infolist()[self._start_file_index:
self._end_file_index]
self._entries.reverse()
if not self._entries:
raise StopIteration()
entry = self._entries.pop()
value = self._zip.read(entry.filename)
self._filestream = StringIO.StringIO(value)
if self._initial_offset:
self._filestream.seek(self._initial_offset)
self._filestream.readline()
start_position = self._filestream.tell()
line = self._filestream.readline()
if not line:
# Done with this file in the zip. Move on to the next file.
self._filestream.close()
self._filestream = None
self._start_file_index += 1
self._initial_offset = 0
return self.next()
return ((self._blob_key, self._start_file_index, start_position),
line.rstrip("\n"))
def _next_offset(self):
"""Return the offset of the next line to read."""
if self._filestream:
offset = self._filestream.tell()
if offset:
offset -= 1
else:
offset = self._initial_offset
return offset
def to_json(self):
"""Returns an input shard state for the remaining inputs.
Returns:
A json-izable version of the remaining InputReader.
"""
return {self.BLOB_KEY_PARAM: self._blob_key,
self.START_FILE_INDEX_PARAM: self._start_file_index,
self.END_FILE_INDEX_PARAM: self._end_file_index,
self.OFFSET_PARAM: self._next_offset()}
@classmethod
def from_json(cls, json, _reader=blobstore.BlobReader):
"""Creates an instance of the InputReader for the given input shard state.
Args:
json: The InputReader state as a dict-like object.
_reader: For dependency injection.
Returns:
An instance of the InputReader configured using the values of json.
"""
return cls(json[cls.BLOB_KEY_PARAM],
json[cls.START_FILE_INDEX_PARAM],
json[cls.END_FILE_INDEX_PARAM],
json[cls.OFFSET_PARAM],
_reader)
def __str__(self):
"""Returns the string representation of this reader.
Returns:
string blobkey:[start file num, end file num]:current offset.
"""
return "blobstore.BlobKey(%r):[%d, %d]:%d" % (
self._blob_key, self._start_file_index, self._end_file_index,
self._next_offset())
class RandomStringInputReader(InputReader):
"""RandomStringInputReader generates random strings as output.
Primary usage is to populate output with testing entries.
"""
# Total number of entries this reader should generate.
COUNT = "count"
# Length of the generated strings.
STRING_LENGTH = "string_length"
DEFAULT_STRING_LENGTH = 10
def __init__(self, count, string_length):
"""Initialize input reader.
Args:
count: number of entries this shard should generate.
string_length: the length of generated random strings.
"""
self._count = count
self._string_length = string_length
def __iter__(self):
ctx = context.get()
while self._count:
self._count -= 1
start_time = time.time()
content = "".join(random.choice(string.ascii_lowercase)
for _ in range(self._string_length))
if ctx:
operation.counters.Increment(
COUNTER_IO_READ_MSEC, int((time.time() - start_time) * 1000))(ctx)
operation.counters.Increment(COUNTER_IO_READ_BYTES, len(content))(ctx)
yield content
@classmethod
def split_input(cls, mapper_spec):
params = _get_params(mapper_spec)
count = params[cls.COUNT]
string_length = cls.DEFAULT_STRING_LENGTH
if cls.STRING_LENGTH in params:
string_length = params[cls.STRING_LENGTH]
shard_count = mapper_spec.shard_count
count_per_shard = count // shard_count
mr_input_readers = [
cls(count_per_shard, string_length) for _ in range(shard_count)]
left = count - count_per_shard * shard_count
if left > 0:
mr_input_readers.append(cls(left, string_length))
return mr_input_readers
@classmethod
def validate(cls, mapper_spec):
if mapper_spec.input_reader_class() != cls:
raise BadReaderParamsError("Mapper input reader class mismatch")
params = _get_params(mapper_spec)
if cls.COUNT not in params:
raise BadReaderParamsError("Must specify %s" % cls.COUNT)
if not isinstance(params[cls.COUNT], int):
raise BadReaderParamsError("%s should be an int but is %s" %
(cls.COUNT, type(params[cls.COUNT])))
if params[cls.COUNT] <= 0:
raise BadReaderParamsError("%s should be a positive int")
if cls.STRING_LENGTH in params and not (
isinstance(params[cls.STRING_LENGTH], int) and
params[cls.STRING_LENGTH] > 0):
raise BadReaderParamsError("%s should be a positive int but is %s" %
(cls.STRING_LENGTH, params[cls.STRING_LENGTH]))
if (not isinstance(mapper_spec.shard_count, int) or
mapper_spec.shard_count <= 0):
raise BadReaderParamsError(
"shard_count should be a positive int but is %s" %
mapper_spec.shard_count)
@classmethod
def from_json(cls, json):
return cls(json[cls.COUNT], json[cls.STRING_LENGTH])
def to_json(self):
return {self.COUNT: self._count, self.STRING_LENGTH: self._string_length}
# TODO(user): This reader always produces only one shard, because
# namespace entities use the mix of ids/names, and KeyRange-based splitting
# doesn't work satisfactory in this case.
# It's possible to implement specific splitting functionality for the reader
# instead of reusing generic one. Meanwhile 1 shard is enough for our
# applications.
class NamespaceInputReader(InputReader):
"""An input reader to iterate over namespaces.
This reader yields namespace names as string.
It will always produce only one shard.
"""
NAMESPACE_RANGE_PARAM = "namespace_range"
BATCH_SIZE_PARAM = "batch_size"
_BATCH_SIZE = 10
def __init__(self, ns_range, batch_size=_BATCH_SIZE):
self.ns_range = ns_range
self._batch_size = batch_size
def to_json(self):
"""Serializes all the data in this query range into json form.
Returns:
all the data in json-compatible map.
"""
return {self.NAMESPACE_RANGE_PARAM: self.ns_range.to_json_object(),
self.BATCH_SIZE_PARAM: self._batch_size}
@classmethod
def from_json(cls, json):
"""Create new DatastoreInputReader from the json, encoded by to_json.
Args:
json: json map representation of DatastoreInputReader.
Returns:
an instance of DatastoreInputReader with all data deserialized from json.
"""
return cls(
namespace_range.NamespaceRange.from_json_object(
json[cls.NAMESPACE_RANGE_PARAM]),
json[cls.BATCH_SIZE_PARAM])
@classmethod
def validate(cls, mapper_spec):
"""Validates mapper spec.
Args:
mapper_spec: The MapperSpec for this InputReader.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
if mapper_spec.input_reader_class() != cls:
raise BadReaderParamsError("Input reader class mismatch")
params = _get_params(mapper_spec)
if cls.BATCH_SIZE_PARAM in params:
try:
batch_size = int(params[cls.BATCH_SIZE_PARAM])
if batch_size < 1:
raise BadReaderParamsError("Bad batch size: %s" % batch_size)
except ValueError, e:
raise BadReaderParamsError("Bad batch size: %s" % e)
@classmethod
def split_input(cls, mapper_spec):
"""Returns a list of input readers for the input spec.
Args:
mapper_spec: The MapperSpec for this InputReader.
Returns:
A list of InputReaders.
"""
batch_size = int(_get_params(mapper_spec).get(
cls.BATCH_SIZE_PARAM, cls._BATCH_SIZE))
shard_count = mapper_spec.shard_count
namespace_ranges = namespace_range.NamespaceRange.split(shard_count,
contiguous=True)
return [NamespaceInputReader(ns_range, batch_size)
for ns_range in namespace_ranges]
def __iter__(self):
while True:
keys = self.ns_range.make_datastore_query().Get(limit=self._batch_size)
if not keys:
break
for key in keys:
namespace = metadata.Namespace.key_to_namespace(key)
self.ns_range = self.ns_range.with_start_after(namespace)
yield namespace
def __str__(self):
return repr(self.ns_range)
class LogInputReader(InputReader):
"""Input reader for a time range of logs via the Logs Reader API.
The number of input shards may be specified by the SHARDS_PARAM mapper
parameter. A starting and ending time (in seconds since the Unix epoch) are
required to generate time ranges over which to shard the input.
"""
# Parameters directly mapping to those available via logservice.fetch().
START_TIME_PARAM = "start_time"
END_TIME_PARAM = "end_time"
MINIMUM_LOG_LEVEL_PARAM = "minimum_log_level"
INCLUDE_INCOMPLETE_PARAM = "include_incomplete"
INCLUDE_APP_LOGS_PARAM = "include_app_logs"
VERSION_IDS_PARAM = "version_ids"
MODULE_VERSIONS_PARAM = "module_versions"
# Semi-hidden parameters used only internally or for privileged applications.
_OFFSET_PARAM = "offset"
_PROTOTYPE_REQUEST_PARAM = "prototype_request"
_PARAMS = frozenset([START_TIME_PARAM, END_TIME_PARAM, _OFFSET_PARAM,
MINIMUM_LOG_LEVEL_PARAM, INCLUDE_INCOMPLETE_PARAM,
INCLUDE_APP_LOGS_PARAM, VERSION_IDS_PARAM,
MODULE_VERSIONS_PARAM, _PROTOTYPE_REQUEST_PARAM])
_KWARGS = frozenset([_OFFSET_PARAM, _PROTOTYPE_REQUEST_PARAM])
def __init__(self,
start_time=None,
end_time=None,
minimum_log_level=None,
include_incomplete=False,
include_app_logs=False,
version_ids=None,
module_versions=None,
**kwargs):
"""Constructor.
Args:
start_time: The earliest request completion or last-update time of logs
that should be mapped over, in seconds since the Unix epoch.
end_time: The latest request completion or last-update time that logs
should be mapped over, in seconds since the Unix epoch.
minimum_log_level: An application log level which serves as a filter on
the requests mapped over--requests with no application log at or above
the specified level will be omitted, even if include_app_logs is False.
include_incomplete: Whether or not to include requests that have started
but not yet finished, as a boolean. Defaults to False.
include_app_logs: Whether or not to include application level logs in the
mapped logs, as a boolean. Defaults to False.
version_ids: A list of version ids whose logs should be read. This can not
be used with module_versions
module_versions: A list of tuples containing a module and version id
whose logs should be read. This can not be used with version_ids
**kwargs: A dictionary of keywords associated with this input reader.
"""
InputReader.__init__(self) # pylint: disable=non-parent-init-called
# The rule for __params is that its contents will always be suitable as
# input to logservice.fetch().
self.__params = dict(kwargs)
if start_time is not None:
self.__params[self.START_TIME_PARAM] = start_time
if end_time is not None:
self.__params[self.END_TIME_PARAM] = end_time
if minimum_log_level is not None:
self.__params[self.MINIMUM_LOG_LEVEL_PARAM] = minimum_log_level
if include_incomplete is not None:
self.__params[self.INCLUDE_INCOMPLETE_PARAM] = include_incomplete
if include_app_logs is not None:
self.__params[self.INCLUDE_APP_LOGS_PARAM] = include_app_logs
if version_ids:
self.__params[self.VERSION_IDS_PARAM] = version_ids
if module_versions:
self.__params[self.MODULE_VERSIONS_PARAM] = module_versions
# Any submitted prototype_request will be in encoded form.
if self._PROTOTYPE_REQUEST_PARAM in self.__params:
prototype_request = log_service_pb.LogReadRequest(
self.__params[self._PROTOTYPE_REQUEST_PARAM])
self.__params[self._PROTOTYPE_REQUEST_PARAM] = prototype_request
def __iter__(self):
"""Iterates over logs in a given range of time.
Yields:
A RequestLog containing all the information for a single request.
"""
for log in logservice.fetch(**self.__params):
self.__params[self._OFFSET_PARAM] = log.offset
yield log
@classmethod
def from_json(cls, json):
"""Creates an instance of the InputReader for the given input shard's state.
Args:
json: The InputReader state as a dict-like object.
Returns:
An instance of the InputReader configured using the given JSON parameters.
"""
# Strip out unrecognized parameters, as introduced by b/5960884.
params = dict((str(k), v) for k, v in json.iteritems()
if k in cls._PARAMS)
# This is not symmetric with to_json() wrt. PROTOTYPE_REQUEST_PARAM because
# the constructor parameters need to be JSON-encodable, so the decoding
# needs to happen there anyways.
if cls._OFFSET_PARAM in params:
params[cls._OFFSET_PARAM] = base64.b64decode(params[cls._OFFSET_PARAM])
return cls(**params)
def to_json(self):
"""Returns an input shard state for the remaining inputs.
Returns:
A JSON serializable version of the remaining input to read.
"""
params = dict(self.__params) # Shallow copy.
if self._PROTOTYPE_REQUEST_PARAM in params:
prototype_request = params[self._PROTOTYPE_REQUEST_PARAM]
params[self._PROTOTYPE_REQUEST_PARAM] = prototype_request.Encode()
if self._OFFSET_PARAM in params:
params[self._OFFSET_PARAM] = base64.b64encode(params[self._OFFSET_PARAM])
return params
@classmethod
def split_input(cls, mapper_spec):
"""Returns a list of input readers for the given input specification.
Args:
mapper_spec: The MapperSpec for this InputReader.
Returns:
A list of InputReaders.
"""
params = _get_params(mapper_spec)
shard_count = mapper_spec.shard_count
# Pick out the overall start and end times and time step per shard.
start_time = params[cls.START_TIME_PARAM]
end_time = params[cls.END_TIME_PARAM]
seconds_per_shard = (end_time - start_time) / shard_count
# Create a LogInputReader for each shard, modulating the params as we go.
shards = []
for _ in xrange(shard_count - 1):
params[cls.END_TIME_PARAM] = (params[cls.START_TIME_PARAM] +
seconds_per_shard)
shards.append(LogInputReader(**params))
params[cls.START_TIME_PARAM] = params[cls.END_TIME_PARAM]
# Create a final shard to complete the time range.
params[cls.END_TIME_PARAM] = end_time
return shards + [LogInputReader(**params)]
@classmethod
def validate(cls, mapper_spec):
"""Validates the mapper's specification and all necessary parameters.
Args:
mapper_spec: The MapperSpec to be used with this InputReader.
Raises:
BadReaderParamsError: If the user fails to specify both a starting time
and an ending time, or if the starting time is later than the ending
time.
"""
if mapper_spec.input_reader_class() != cls:
raise errors.BadReaderParamsError("Input reader class mismatch")
params = _get_params(mapper_spec, allowed_keys=cls._PARAMS)
if (cls.VERSION_IDS_PARAM not in params and
cls.MODULE_VERSIONS_PARAM not in params):
raise errors.BadReaderParamsError("Must specify a list of version ids or "
"module/version ids for mapper input")
if (cls.VERSION_IDS_PARAM in params and
cls.MODULE_VERSIONS_PARAM in params):
raise errors.BadReaderParamsError("Can not supply both version ids or "
"module/version ids. Use only one.")
if (cls.START_TIME_PARAM not in params or
params[cls.START_TIME_PARAM] is None):
raise errors.BadReaderParamsError("Must specify a starting time for "
"mapper input")
if cls.END_TIME_PARAM not in params or params[cls.END_TIME_PARAM] is None:
params[cls.END_TIME_PARAM] = time.time()
if params[cls.START_TIME_PARAM] >= params[cls.END_TIME_PARAM]:
raise errors.BadReaderParamsError("The starting time cannot be later "
"than or the same as the ending time.")
if cls._PROTOTYPE_REQUEST_PARAM in params:
try:
params[cls._PROTOTYPE_REQUEST_PARAM] = log_service_pb.LogReadRequest(
params[cls._PROTOTYPE_REQUEST_PARAM])
except (TypeError, ProtocolBuffer.ProtocolBufferDecodeError):
raise errors.BadReaderParamsError("The prototype request must be "
"parseable as a LogReadRequest.")
# Pass the parameters to logservice.fetch() to verify any underlying
# constraints on types or values. This only constructs an iterator, it
# doesn't trigger any requests for actual log records.
try:
logservice.fetch(**params)
except logservice.InvalidArgumentError, e:
raise errors.BadReaderParamsError("One or more parameters are not valid "
"inputs to logservice.fetch(): %s" % e)
def __str__(self):
"""Returns the string representation of this LogInputReader."""
params = []
for key in sorted(self.__params.keys()):
value = self.__params[key]
if key is self._PROTOTYPE_REQUEST_PARAM:
params.append("%s='%s'" % (key, value))
elif key is self._OFFSET_PARAM:
params.append("%s='%s'" % (key, value))
else:
params.append("%s=%s" % (key, value))
return "LogInputReader(%s)" % ", ".join(params)
# pylint: disable=too-many-instance-attributes
class _GoogleCloudStorageInputReader(InputReader):
"""Input reader from Google Cloud Storage using the cloudstorage library.
This class is expected to be subclassed with a reader that understands
user-level records.
Required configuration in the mapper_spec.input_reader dictionary.
BUCKET_NAME_PARAM: name of the bucket to use (with no extra delimiters or
suffixed such as directories.
OBJECT_NAMES_PARAM: a list of object names or prefixes. All objects must be
in the BUCKET_NAME_PARAM bucket. If the name ends with a * it will be
treated as prefix and all objects with matching names will be read.
Entries should not start with a slash unless that is part of the object's
name. An example list could be:
["my-1st-input-file", "directory/my-2nd-file", "some/other/dir/input-*"]
To retrieve all files "*" will match every object in the bucket. If a file
is listed twice or is covered by multiple prefixes it will be read twice,
there is no deduplication.
Optional configuration in the mapper_sec.input_reader dictionary.
BUFFER_SIZE_PARAM: the size of the read buffer for each file handle.
DELIMITER_PARAM: if specified, turn on the shallow splitting mode.
The delimiter is used as a path separator to designate directory
hierarchy. Matching of prefixes from OBJECT_NAME_PARAM
will stop at the first directory instead of matching
all files under the directory. This allows MR to process bucket with
hundreds of thousands of files.
FAIL_ON_MISSING_INPUT: if specified and True, the MR will fail if any of
the input files are missing. Missing files will be skipped otherwise.
"""
# Supported parameters
BUCKET_NAME_PARAM = "bucket_name"
OBJECT_NAMES_PARAM = "objects"
BUFFER_SIZE_PARAM = "buffer_size"
DELIMITER_PARAM = "delimiter"
FAIL_ON_MISSING_INPUT = "fail_on_missing_input"
# Internal parameters
_ACCOUNT_ID_PARAM = "account_id"
# Other internal configuration constants
_JSON_PICKLE = "pickle"
_JSON_FAIL_ON_MISSING_INPUT = "fail_on_missing_input"
_STRING_MAX_FILES_LISTED = 10 # Max files shown in the str representation
# Input reader can also take in start and end filenames and do
# listbucket. This saves space but has two cons.
# 1. Files to read are less well defined: files can be added or removed over
# the lifetime of the MR job.
# 2. A shard has to process files from a contiguous namespace.
# May introduce staggering shard.
def __init__(self, filenames, index=0, buffer_size=None, _account_id=None,
delimiter=None):
"""Initialize a GoogleCloudStorageInputReader instance.
Args:
filenames: A list of Google Cloud Storage filenames of the form
'/bucket/objectname'.
index: Index of the next filename to read.
buffer_size: The size of the read buffer, None to use default.
_account_id: Internal use only. See cloudstorage documentation.
delimiter: Delimiter used as path separator. See class doc for details.
"""
self._filenames = filenames
self._index = index
self._buffer_size = buffer_size
self._account_id = _account_id
self._delimiter = delimiter
self._bucket = None
self._bucket_iter = None
# True iff we should fail on missing input (see class doc above). Set to
# None in constructor and overwritten in split_input and from_json.
# fail_on_missing_input is not parameter of the constructor to avoid
# breaking classes inheriting from _GoogleCloudStorageInputReader and
# overriding the constructor.
self._fail_on_missing_input = None
def _next_file(self):
"""Find next filename.
self._filenames may need to be expanded via listbucket.
Returns:
None if no more file is left. Filename otherwise.
"""
while True:
if self._bucket_iter:
try:
return self._bucket_iter.next().filename
except StopIteration:
self._bucket_iter = None
self._bucket = None
if self._index >= len(self._filenames):
return
filename = self._filenames[self._index]
self._index += 1
if self._delimiter is None or not filename.endswith(self._delimiter):
return filename
self._bucket = cloudstorage.listbucket(filename,
delimiter=self._delimiter)
self._bucket_iter = iter(self._bucket)
@classmethod
def get_params(cls, mapper_spec, allowed_keys=None, allow_old=True):
"""Extracts the parameters from the mapper_spec.
Extends the existing get_params
Returns:
Returns a dictionary with all the mapper parameters
"""
params = _get_params(mapper_spec, allowed_keys, allow_old)
# Use the bucket_name defined in mapper_spec params if one was not defined
# specifically in the input_reader params.
if (mapper_spec.params.get(cls.BUCKET_NAME_PARAM) is not None and
params.get(cls.BUCKET_NAME_PARAM) is None):
params[cls.BUCKET_NAME_PARAM] = mapper_spec.params[cls.BUCKET_NAME_PARAM]
return params
@classmethod
def validate(cls, mapper_spec):
"""Validate mapper specification.
Args:
mapper_spec: an instance of model.MapperSpec
Raises:
BadReaderParamsError: if the specification is invalid for any reason such
as missing the bucket name or providing an invalid bucket name.
"""
reader_spec = cls.get_params(mapper_spec, allow_old=False)
# Bucket Name is required
if cls.BUCKET_NAME_PARAM not in reader_spec:
raise errors.BadReaderParamsError(
"%s is required for Google Cloud Storage" %
cls.BUCKET_NAME_PARAM)
try:
cloudstorage.validate_bucket_name(
reader_spec[cls.BUCKET_NAME_PARAM])
except ValueError, error:
raise errors.BadReaderParamsError("Bad bucket name, %s" % (error))
# Object Name(s) are required
if cls.OBJECT_NAMES_PARAM not in reader_spec:
raise errors.BadReaderParamsError(
"%s is required for Google Cloud Storage" %
cls.OBJECT_NAMES_PARAM)
filenames = reader_spec[cls.OBJECT_NAMES_PARAM]
if not isinstance(filenames, list):
raise errors.BadReaderParamsError(
"Object name list is not a list but a %s" %
filenames.__class__.__name__)
for filename in filenames:
if not isinstance(filename, basestring):
raise errors.BadReaderParamsError(
"Object name is not a string but a %s" %
filename.__class__.__name__)
if cls.DELIMITER_PARAM in reader_spec:
delimiter = reader_spec[cls.DELIMITER_PARAM]
if not isinstance(delimiter, basestring):
raise errors.BadReaderParamsError(
"%s is not a string but a %s" %
(cls.DELIMITER_PARAM, type(delimiter)))
#pylint: disable=too-many-locals
@classmethod
def split_input(cls, mapper_spec):
"""Returns a list of input readers.
An equal number of input files are assigned to each shard (+/- 1). If there
are fewer files than shards, fewer than the requested number of shards will
be used. Input files are currently never split (although for some formats
could be and may be split in a future implementation).
Args:
mapper_spec: an instance of model.MapperSpec.
Returns:
A list of InputReaders. None when no input data can be found.
"""
reader_spec = cls.get_params(mapper_spec, allow_old=False)
bucket = reader_spec[cls.BUCKET_NAME_PARAM]
filenames = reader_spec[cls.OBJECT_NAMES_PARAM]
delimiter = reader_spec.get(cls.DELIMITER_PARAM)
account_id = reader_spec.get(cls._ACCOUNT_ID_PARAM)
buffer_size = reader_spec.get(cls.BUFFER_SIZE_PARAM)
fail_on_missing_input = reader_spec.get(cls.FAIL_ON_MISSING_INPUT)
# Gather the complete list of files (expanding wildcards)
all_filenames = []
for filename in filenames:
if filename.endswith("*"):
all_filenames.extend(
[file_stat.filename for file_stat in cloudstorage.listbucket(
"/" + bucket + "/" + filename[:-1], delimiter=delimiter,
_account_id=account_id)])
else:
all_filenames.append("/%s/%s" % (bucket, filename))
# Split into shards
readers = []
for shard in range(0, mapper_spec.shard_count):
shard_filenames = all_filenames[shard::mapper_spec.shard_count]
if shard_filenames:
reader = cls(
shard_filenames, buffer_size=buffer_size, _account_id=account_id,
delimiter=delimiter)
reader._fail_on_missing_input = fail_on_missing_input
readers.append(reader)
return readers
@classmethod
def from_json(cls, state):
obj = pickle.loads(state[cls._JSON_PICKLE])
# fail_on_missing_input might not be set - default to False.
obj._fail_on_missing_input = state.get(
cls._JSON_FAIL_ON_MISSING_INPUT, False)
if obj._bucket:
obj._bucket_iter = iter(obj._bucket)
return obj
def to_json(self):
before_iter = self._bucket_iter
self._bucket_iter = None
try:
return {
self._JSON_PICKLE: pickle.dumps(self),
# self._fail_on_missing_input gets pickled but we save it separately
# and override it in from_json to deal with version flipping.
self._JSON_FAIL_ON_MISSING_INPUT:
getattr(self, "_fail_on_missing_input", False)
}
return {self._JSON_PICKLE: pickle.dumps(self)}
finally:
self._bucket_itr = before_iter
def next(self):
"""Returns the next input from this input reader, a block of bytes.
Non existent files will be logged and skipped. The file might have been
removed after input splitting.
Returns:
The next input from this input reader in the form of a cloudstorage
ReadBuffer that supports a File-like interface (read, readline, seek,
tell, and close). An error may be raised if the file can not be opened.
Raises:
StopIteration: The list of files has been exhausted.
"""
options = {}
if self._buffer_size:
options["read_buffer_size"] = self._buffer_size
if self._account_id:
options["_account_id"] = self._account_id
while True:
filename = self._next_file()
if filename is None:
raise StopIteration()
try:
start_time = time.time()
handle = cloudstorage.open(filename, **options)
ctx = context.get()
if ctx:
operation.counters.Increment(
COUNTER_IO_READ_MSEC, int((time.time() - start_time) * 1000))(ctx)
return handle
except cloudstorage.NotFoundError:
# Fail the job if we're strict on missing input.
if getattr(self, "_fail_on_missing_input", False):
raise errors.FailJobError(
"File missing in GCS, aborting: %s" % filename)
# Move on otherwise.
logging.warning("File %s may have been removed. Skipping file.",
filename)
def __str__(self):
# Only show a limited number of files individually for readability
num_files = len(self._filenames)
if num_files > self._STRING_MAX_FILES_LISTED:
names = "%s...%s + %d not shown" % (
",".join(self._filenames[0:self._STRING_MAX_FILES_LISTED - 1]),
self._filenames[-1],
num_files - self._STRING_MAX_FILES_LISTED)
else:
names = ",".join(self._filenames)
if self._index > num_files:
status = "EOF"
else:
status = "Next %s (%d of %d)" % (
self._filenames[self._index],
self._index + 1, # +1 for human 1-indexing
num_files)
return "CloudStorage [%s, %s]" % (status, names)
GoogleCloudStorageInputReader = _GoogleCloudStorageInputReader
class _GoogleCloudStorageRecordInputReader(_GoogleCloudStorageInputReader):
"""Read data from a Google Cloud Storage file using LevelDB format.
See the _GoogleCloudStorageOutputWriter for additional configuration options.
"""
def __getstate__(self):
result = self.__dict__.copy()
# record reader may not exist if reader has not been used
if "_record_reader" in result:
# RecordsReader has no buffering, it can safely be reconstructed after
# deserialization
result.pop("_record_reader")
return result
def next(self):
"""Returns the next input from this input reader, a record.
Returns:
The next input from this input reader in the form of a record read from
an LevelDB file.
Raises:
StopIteration: The ordered set records has been exhausted.
"""
while True:
if not hasattr(self, "_cur_handle") or self._cur_handle is None:
# If there are no more files, StopIteration is raised here
self._cur_handle = super(_GoogleCloudStorageRecordInputReader,
self).next()
if not hasattr(self, "_record_reader") or self._record_reader is None:
self._record_reader = records.RecordsReader(self._cur_handle)
try:
start_time = time.time()
content = self._record_reader.read()
ctx = context.get()
if ctx:
operation.counters.Increment(COUNTER_IO_READ_BYTES, len(content))(ctx)
operation.counters.Increment(
COUNTER_IO_READ_MSEC, int((time.time() - start_time) * 1000))(ctx)
return content
except EOFError:
self._cur_handle = None
self._record_reader = None
GoogleCloudStorageRecordInputReader = _GoogleCloudStorageRecordInputReader
class _ReducerReader(_GoogleCloudStorageRecordInputReader):
"""Reader to read KeyValues records from GCS."""
expand_parameters = True
def __init__(self, filenames, index=0, buffer_size=None, _account_id=None,
delimiter=None):
super(_ReducerReader, self).__init__(filenames, index, buffer_size,
_account_id, delimiter)
self.current_key = None
self.current_values = None
def __iter__(self):
ctx = context.get()
combiner = None
if ctx:
combiner_spec = ctx.mapreduce_spec.mapper.params.get("combiner_spec")
if combiner_spec:
combiner = util.handler_for_name(combiner_spec)
try:
while True:
binary_record = super(_ReducerReader, self).next()
proto = kv_pb.KeyValues()
proto.ParseFromString(binary_record)
to_yield = None
if self.current_key is not None and self.current_key != proto.key():
to_yield = (self.current_key, self.current_values)
self.current_key = None
self.current_values = None
if self.current_key is None:
self.current_key = proto.key()
self.current_values = []
if combiner:
combiner_result = combiner(
self.current_key, proto.value_list(), self.current_values)
if not util.is_generator(combiner_result):
raise errors.BadCombinerOutputError(
"Combiner %s should yield values instead of returning them "
"(%s)" % (combiner, combiner_result))
self.current_values = []
for value in combiner_result:
if isinstance(value, operation.Operation):
value(ctx)
else:
# With combiner the current values always come from the combiner.
self.current_values.append(value)
# Check-point after each combiner call is run only when there's
# nothing that needs to be yielded below. Otherwise allowing a
# check-point here would cause the current to_yield data to be lost.
if not to_yield:
yield ALLOW_CHECKPOINT
else:
# Without combiner we just accumulate values.
self.current_values.extend(proto.value_list())
if to_yield:
yield to_yield
# Check-point after each key is yielded.
yield ALLOW_CHECKPOINT
except StopIteration:
pass
# There may be some accumulated values left at the end of an input file
# so be sure to yield those too.
if self.current_key is not None:
to_yield = (self.current_key, self.current_values)
self.current_key = None
self.current_values = None
yield to_yield
@staticmethod
def encode_data(data):
"""Encodes the given data, which may have include raw bytes.
Works around limitations in JSON encoding, which cannot handle raw bytes.
Args:
data: the data to encode.
Returns:
The data encoded.
"""
return base64.b64encode(pickle.dumps(data))
@staticmethod
def decode_data(data):
"""Decodes data encoded with the encode_data function."""
return pickle.loads(base64.b64decode(data))
def to_json(self):
"""Returns an input shard state for the remaining inputs.
Returns:
A json-izable version of the remaining InputReader.
"""
result = super(_ReducerReader, self).to_json()
result["current_key"] = self.encode_data(self.current_key)
result["current_values"] = self.encode_data(self.current_values)
return result
@classmethod
def from_json(cls, json):
"""Creates an instance of the InputReader for the given input shard state.
Args:
json: The InputReader state as a dict-like object.
Returns:
An instance of the InputReader configured using the values of json.
"""
result = super(_ReducerReader, cls).from_json(json)
result.current_key = _ReducerReader.decode_data(json["current_key"])
result.current_values = _ReducerReader.decode_data(json["current_values"])
return result
# pylint: disable=too-many-instance-attributes
class GoogleCloudStorageLineInputReader(InputReader):
"""Input reader for a newline delimited file in Google Cloud Storage.
Required configuration in the mapper_spec.input_reader dictionary.
bucket_name : name of the bucket to use (with no extra delimiters or
suffixed such as directories.
objects : a list of object names or prefixes. All objects must be
in the bucket_name. If the name ends with a * it will be
treated as prefix and all objects with matching names will be read.
Entries should not start with a slash unless that is part of the object's
name. An example list could be:
['my-1st-input-file', 'directory/my-2nd-file', 'some/other/dir/input-*']
To retrieve all files '*' will match every object in the bucket. If a file
is listed twice or is covered by multiple prefixes it will be read twice,
there is no deduplication.
Optional configuration in the mapper_sec.input_reader dictionary.
buffer_size : the size of the read buffer for each file handle.
delimiter : if specified, turn on the shallow splitting mode.
The delimiter is used as a path separator to designate directory
hierarchy. Matching of prefixes from objects
will stop at the first directory instead of matching
all files under the directory. This allows MR to process bucket with
hundreds of thousands of files.
Outputs:
A tuple containing an other tuple and the Line
((File name, start position), line)
File name : Name of the file the data came from
start position : Files index position for the start of the data
line : The data read till a '\n' was reached
"""
# Supported parameters
BUCKET_NAME_PARAM = 'bucket_name'
OBJECT_NAMES_PARAM = 'objects'
BUFFER_SIZE_PARAM = 'buffer_size'
DELIMITER_PARAM = 'delimiter'
# Internal parameters
# Maximum number of shards to allow.
_MAX_SHARD_COUNT = 256
_ACCOUNT_ID_PARAM = 'account_id'
# Serialization parameters.
INITIAL_POSITION_PARAM = 'initial_position'
END_POSITION_PARAM = 'end_position'
@classmethod
def validate(cls, mapper_spec):
"""Validates mapper spec and all mapper parameters.
Args:
mapper_spec: The MapperSpec for this InputReader.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
# Reads the parameters sent to the mapper
reader_spec = _get_params(mapper_spec, allow_old=False)
# Bucket Name is required
if cls.BUCKET_NAME_PARAM not in reader_spec:
raise errors.BadReaderParamsError(
'%s is required for Google Cloud Storage' %
cls.BUCKET_NAME_PARAM)
try:
cloudstorage.validate_bucket_name(
reader_spec[cls.BUCKET_NAME_PARAM])
except ValueError, error:
raise errors.BadReaderParamsError('Bad bucket name, %s' % (error))
# Object Name(s) are required
if cls.OBJECT_NAMES_PARAM not in reader_spec:
raise errors.BadReaderParamsError(
'%s is required for Google Cloud Storage' %
cls.OBJECT_NAMES_PARAM)
file_names = reader_spec[cls.OBJECT_NAMES_PARAM]
if not isinstance(file_names, list):
raise errors.BadReaderParamsError(
'Object name list is not a list but a %s' %
file_names.__class__.__name__)
for file_name in file_names:
if not isinstance(file_name, basestring):
raise errors.BadReaderParamsError(
'Object name is not a string but a %s' %
file_name.__class__.__name__)
if cls.DELIMITER_PARAM in reader_spec:
delimiter = reader_spec[cls.DELIMITER_PARAM]
if not isinstance(delimiter, basestring):
raise errors.BadReaderParamsError(
'%s is not a string but a %s' %
(cls.DELIMITER_PARAM, type(delimiter)))
# pylint: disable=too-many-locals
@classmethod
def split_input(cls, mapper_spec):
"""Returns a list of shard_count input_spec_shards for input_spec.
Args:
mapper_spec: The mapper specification to split from. Must contain
'objects' parameter with one or more file_names.
Returns:
A list of GCSInputReaders corresponding to the specified shards.
"""
reader_spec = _get_params(mapper_spec, allow_old=False)
bucket = reader_spec[cls.BUCKET_NAME_PARAM]
file_names = reader_spec[cls.OBJECT_NAMES_PARAM]
delimiter = reader_spec.get(cls.DELIMITER_PARAM)
account_id = reader_spec.get(cls._ACCOUNT_ID_PARAM)
buffer_size = reader_spec.get(cls.BUFFER_SIZE_PARAM)
# Gather the complete list of files (expanding wildcards)
all_file_names = []
for file_name in file_names:
if file_name.endswith('*'):
all_file_names.extend(
[file_stat for file_stat in cloudstorage.listbucket(
'/' + bucket + '/' + file_name[:-1], delimiter=delimiter,
_account_id=account_id)])
else:
try:
all_file_names.append(cloudstorage
.stat(('/%s/%s') % (bucket, file_name)))
except cloudstorage.NotFoundError:
logging.warning('File /%s/%s may have been removed. Skipping file.',
bucket, file_name)
shard_count = min(cls._MAX_SHARD_COUNT, mapper_spec.shard_count)
try:
shards_per_file = shard_count // len(all_file_names)
except ZeroDivisionError:
shards_per_file = 1
if shards_per_file == 0:
shards_per_file = 1
chunks = []
for file_stats in all_file_names:
file_name = file_stats.filename
file_size = file_stats.st_size
file_chunk_size = file_size // shards_per_file
for i in xrange(shards_per_file - 1):
chunks.append(GoogleCloudStorageLineInputReader.from_json(
{cls.OBJECT_NAMES_PARAM: file_name,
cls.INITIAL_POSITION_PARAM: file_chunk_size * i,
cls.END_POSITION_PARAM: file_chunk_size * (i + 1),
cls.BUFFER_SIZE_PARAM : buffer_size,
cls.DELIMITER_PARAM: delimiter,
cls._ACCOUNT_ID_PARAM : account_id
}))
chunks.append(GoogleCloudStorageLineInputReader.from_json(
{cls.OBJECT_NAMES_PARAM: file_name,
cls.INITIAL_POSITION_PARAM: file_chunk_size * (shards_per_file - 1),
cls.END_POSITION_PARAM: file_size,
cls.BUFFER_SIZE_PARAM : buffer_size,
cls.DELIMITER_PARAM: delimiter,
cls._ACCOUNT_ID_PARAM : account_id}))
return chunks
def to_json(self):
"""Returns an json-compatible input shard spec for remaining inputs."""
new_pos = self._file_reader.tell()
if self._has_iterated:
new_pos -= 1
return {self.OBJECT_NAMES_PARAM: self._file_name,
self.INITIAL_POSITION_PARAM: new_pos,
self.END_POSITION_PARAM: self._end_position}
def __str__(self):
"""Returns the string representation of this LineInputReader."""
return 'File Name(%r):[%d, %d]' % (
self._file_name, self._file_reader.tell(), self._end_position)
@classmethod
def from_json(cls, json):
"""Instantiates an instance of this InputReader for the given shard spec."""
return cls(json[cls.OBJECT_NAMES_PARAM],
json[cls.INITIAL_POSITION_PARAM],
json[cls.END_POSITION_PARAM])
# pylint: disable=too-many-arguments
def __init__(self, file_name, start_position, end_position,
buffer_size=None, delimiter=None, account_id=None):
"""Initializes this instance with the given file name and character range.
This GoogleCloudStorageLineInputReader will read from the first record
starting strictly after start_position until the first record ending at or
after end_position (exclusive). As an exception, if start_position is 0,
then this InputReader starts reading at the first record.
Args:
file_name: the file name that this input reader is processing.
start_position: the position to start reading at.
end_position: a position in the last record to read.
buffer_size: Used by the GCS reader to read data.
delimiter: The delimiter is used as a path separator to designate
directory hierarchy.
account_id: internal use
"""
self._file_name = file_name
self._buffer_size = buffer_size
self._account_id = account_id
self._delimiter = delimiter
self._start_position = start_position
options = {}
if self._buffer_size:
options['read_buffer_size'] = self._buffer_size
if self._account_id:
options['_account_id'] = self._account_id
try:
# pylint: disable=star-args
self._file_reader = cloudstorage.open(file_name, **options)
self._file_reader.seek(start_position)
except cloudstorage.NotFoundError:
logging.warning('File %s may have been removed. Skipping file.',
file_name)
raise StopIteration()
self._end_position = end_position
self._has_iterated = False
self._read_before_start = bool(start_position)
def next(self):
"""Returns the next input from as an (( file_name, offset), line) tuple."""
self._has_iterated = True
if self._read_before_start:
self._file_reader.readline()
self._read_before_start = False
start_position = self._file_reader.tell()
if start_position > self._end_position:
raise StopIteration()
line = self._file_reader.readline()
if not line:
raise StopIteration()
return (self._file_name, start_position), line.rstrip('\n')
class GoogleCloudStorageZipInputReader(InputReader):
"""Input reader for files from a zip archive stored in the GCS.
Each instance of the reader will read the TOC, from the end of the zip file,
and then only the contained files which it is responsible for.
Required configuration in the mapper_spec.input_reader dictionary.
bucket_name : name of the bucket to use (with no extra delimiters or
suffixed such as directories.
objects : a list of object names or prefixes. All objects must be
in the bucket_name. They all must be zip files. If the name ends with
a * it will be
treated as prefix and all objects with matching names will be read.
Entries should not start with a slash unless that is part of the object's
name. An example list could be:
['my-1st-input-file', 'directory/my-2nd-file', 'some/other/dir/input-*']
To retrieve all files '*' will match every object in the bucket. If a file
is listed twice or is covered by multiple prefixes it will be read twice,
there is no deduplication.
Optional configuration in the mapper_sec.input_reader dictionary.
buffer_size : the size of the read buffer for each file handle.
delimiter : if specified, turn on the shallow splitting mode.
The delimiter is used as a path separator to designate directory
hierarchy. Matching of prefixes from objects
will stop at the first directory instead of matching
all files under the directory. This allows MR to process bucket with
hundreds of thousands of files.
Outputs:
A tuple containing an other tuple and the file contents
((Zip file name, text file), file data)
Zip file name : Name of the zip file being processed
text file : Current file being outputed
data : contents of the file
"""
# Mapreduce parameters.
OBJECT_NAMES_PARAM = 'objects'
START_INDEX_PARAM = 'start_index'
END_INDEX_PARAM = 'end_index'
BUFFER_SIZE_PARAM = 'buffer_size'
DELIMITER_PARAM = 'delimiter'
BUCKET_NAME_PARAM = 'bucket_name'
_ACCOUNT_ID_PARAM = 'account_id'
# Maximum number of shards to allow.
_MAX_SHARD_COUNT = 256
@classmethod
def validate(cls, mapper_spec):
"""Validates mapper spec and all mapper parameters.
Args:
mapper_spec: The MapperSpec for this InputReader.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
reader_spec = _get_params(mapper_spec, allow_old=False)
# Bucket Name is required
if cls.BUCKET_NAME_PARAM not in reader_spec:
raise errors.BadReaderParamsError(
'%s is required for Google Cloud Storage' %
cls.BUCKET_NAME_PARAM)
try:
cloudstorage.validate_bucket_name(
reader_spec[cls.BUCKET_NAME_PARAM])
except ValueError, error:
raise errors.BadReaderParamsError('Bad bucket name, %s' % (error))
# Object Name(s) are required
if cls.OBJECT_NAMES_PARAM not in reader_spec:
raise errors.BadReaderParamsError(
'%s is required for Google Cloud Storage' %
cls.OBJECT_NAMES_PARAM)
file_names = reader_spec[cls.OBJECT_NAMES_PARAM]
if not isinstance(file_names, list):
raise errors.BadReaderParamsError(
'Object name list is not a list but a %s' %
file_names.__class__.__name__)
for file_name in file_names:
if not isinstance(file_name, basestring):
raise errors.BadReaderParamsError(
'Object name is not a string but a %s' %
file_name.__class__.__name__)
if cls.DELIMITER_PARAM in reader_spec:
delimiter = reader_spec[cls.DELIMITER_PARAM]
if not isinstance(delimiter, basestring):
raise errors.BadReaderParamsError(
'%s is not a string but a %s' %
(cls.DELIMITER_PARAM, type(delimiter)))
@classmethod
def from_json(cls, json):
"""Creates an instance of the InputReader for the given input shard state.
Args:
json: The InputReader state as a dict-like object.
Returns:
An instance of the InputReader configured using the values of json.
"""
return cls(json[cls.OBJECT_NAMES_PARAM],
json[cls.START_INDEX_PARAM],
json[cls.END_INDEX_PARAM])
def to_json(self):
"""Returns an input shard state for the remaining inputs.
Returns:
A json-izable version of the remaining InputReader.
"""
return {self.OBJECT_NAMES_PARAM: self._file_name,
self.START_INDEX_PARAM: self._start_index,
self.END_INDEX_PARAM: self._end_index}
def __str__(self):
"""Returns the string representation of this GCSZipInputReader."""
return 'File Name(%r):[%d, %d]' % (
self._file_name, self._start_index, self._end_index)
# pylint: disable=too-many-locals
@classmethod
def split_input(cls, mapper_spec):
"""Returns a list of shard_count input_spec_shards for input_spec.
Args:
mapper_spec: The mapper specification to split from. Must contain
'objects' parameter with one or more file_names.
Returns:
A list of GCSInputReaders corresponding to the specified shards.
"""
reader_spec = _get_params(mapper_spec, allow_old=False)
bucket = reader_spec[cls.BUCKET_NAME_PARAM]
file_names = reader_spec[cls.OBJECT_NAMES_PARAM]
delimiter = reader_spec.get(cls.DELIMITER_PARAM)
account_id = reader_spec.get(cls._ACCOUNT_ID_PARAM)
buffer_size = reader_spec.get(cls.BUFFER_SIZE_PARAM)
all_file_names = []
for file_name in file_names:
if file_name.endswith('*'):
all_file_names.extend(
[file_stat for file_stat in cloudstorage.listbucket(
'/' + bucket + '/' + file_name[:-1], delimiter=delimiter,
_account_id=account_id)])
else:
try:
all_file_names.append(cloudstorage
.stat(('/%s/%s') % (bucket, file_name)))
except cloudstorage.NotFoundError:
logging.warning('File /%s/%s may have been removed. Skipping file.',
bucket, file_name)
shard_count = min(cls._MAX_SHARD_COUNT, mapper_spec.shard_count)
try:
shards_per_file = shard_count // len(all_file_names)
except ZeroDivisionError:
shards_per_file = 1
if shards_per_file == 0:
shards_per_file = 1
sub_files = {}
total_size = 0
for file_name in all_file_names:
logging.info(file_name.filename)
zip_input = zipfile.ZipFile(cloudstorage.open(file_name.filename))
sub_files[file_name] = zip_input.infolist()
total_size += sum(x.file_size for x in sub_files[file_name])
shard_count = min(cls._MAX_SHARD_COUNT, mapper_spec.shard_count)
size_per_shard = total_size // shard_count
readers = []
for file_name in all_file_names:
bfiles = sub_files[file_name]
current_shard_size = 0
start_file_index = 0
next_file_index = 0
for fileinfo in bfiles:
next_file_index += 1
current_shard_size += fileinfo.file_size
if current_shard_size >= size_per_shard:
readers.append(cls(file_name.filename, start_file_index,
next_file_index, buffer_size=buffer_size,
delimiter=delimiter, account_id=account_id))
current_shard_size = 0
start_file_index = next_file_index
if current_shard_size != 0:
readers.append(cls(file_name.filename, start_file_index,
next_file_index, buffer_size=buffer_size,
delimiter=delimiter, account_id=account_id))
return readers
# pylint: disable=too-many-arguments
def __init__(self, file_name, start_index, end_index,
buffer_size=None, delimiter=None, account_id=None):
"""Initializes this instance with the given file and file range.
This GCSZipInputReader will read from the file with index start_index
up to but not including the file with index end_index.
Args:
file_name: the file name that this input reader is processing.
start_index: the index of the first file to read.
end_index: the index of the first file that will not be read.
buffer_size: Used by the GCS reader to read data.
delimiter: The delimiter is used as a path separator to designate
directory hierarchy.
account_id: internal use
"""
self._file_name = file_name
self._start_index = start_index
self._end_index = end_index
self._buffer_size = buffer_size
self._account_id = account_id
self._delimiter = delimiter
options = {}
if self._buffer_size:
options['read_buffer_size'] = self._buffer_size
if self._account_id:
options['_account_id'] = self._account_id
try:
# pylint: disable=star-args
self._reader = cloudstorage.open(file_name, **options)
except cloudstorage.NotFoundError:
logging.warning('File /%s may have been removed. Skipping file.',
file_name)
self._zip = None
self._entries = None
def next(self):
"""Returns the next input from this input reader as
((ZipInfo, Current file name), full file contents) tuple.
Returns:
The next input from this input reader, in the form of a 2-tuple.
The first element of the tuple is another tuple (Zip file name,
text file name).
The second element of the tuple complete body of the file.
"""
if not self._zip:
self._zip = zipfile.ZipFile(self._reader)
# Get a list of entries, reversed so we can pop entries off in order
self._entries = self._zip.infolist()[self._start_index:self._end_index]
self._entries.reverse()
if not self._entries:
raise StopIteration()
entry = self._entries.pop()
self._start_index += 1
return (((self._file_name, self._zip.infolist()[self._start_index - 1]
.filename)), self._read(entry))
def _read(self, entry):
"""Read entry content.
Args:
entry: zip file entry as zipfile.ZipInfo.
Returns:
Entry content as string.
"""
start_time = time.time()
content = self._zip.read(entry.filename)
ctx = context.get()
if ctx:
operation.counters.Increment(COUNTER_IO_READ_BYTES, len(content))(ctx)
operation.counters.Increment(
COUNTER_IO_READ_MSEC, int((time.time() - start_time) * 1000))(ctx)
return content
class GoogleCloudStorageZipLineInputReader(InputReader):
"""Input reader for newline delimited files in zip archives from GCS.
This has the same external interface as the GoogleCloudStorageZipInputReader,
in that it takes a list of files as its input and yields lines to the reader.
However the files themselves are expected to be zip archives of line delimited
files instead of the files themselves.
This is useful as many line delimited files gain greatly from compression.
Required configuration in the mapper_spec.input_reader dictionary.
bucket_name : name of the bucket to use (with no extra delimiters or
suffixed such as directories.
objects : a list of object names or prefixes. All objects must be
in the bucket_name. They all must be zip files. If the name ends with
a * it will be treated as prefix and all objects with matching names will
be read. Entries should not start with a slash unless that is part of the
object's name. An example list could be:
['my-1st-input-file', 'directory/my-2nd-file', 'some/other/dir/input-*']
To retrieve all files '*' will match every object in the bucket. If a file
is listed twice or is covered by multiple prefixes it will be read twice,
there is no deduplication.
Optional configuration in the mapper_sec.input_reader dictionary.
buffer_size : the size of the read buffer for each file handle.
delimiter : if specified, turn on the shallow splitting mode.
The delimiter is used as a path separator to designate directory
hierarchy. Matching of prefixes from objects
will stop at the first directory instead of matching
all files under the directory. This allows MR to process bucket with
hundreds of thousands of files.
Outputs:
A tuple containing an other tuple and the file contents
((Zip file name, text file, start position), line)
Zip file name : Name of the zip file being processed
text file : Current file being outputed
start position : Files index position for the start of the data
line : The data read till a '\n' was reached
"""
# Mapreduce parameters.
OBJECT_NAMES_PARAM = 'objects'
BUFFER_SIZE_PARAM = 'buffer_size'
DELIMITER_PARAM = 'delimiter'
BUCKET_NAME_PARAM = 'bucket_name'
_ACCOUNT_ID_PARAM = 'account_id'
# Maximum number of shards to allow.
_MAX_SHARD_COUNT = 256
# Serialization parameters.
START_FILE_INDEX_PARAM = 'start_file_index'
END_FILE_INDEX_PARAM = 'end_file_index'
OFFSET_PARAM = 'offset'
@classmethod
def validate(cls, mapper_spec):
"""Validates mapper spec and all mapper parameters.
Args:
mapper_spec: The MapperSpec for this InputReader.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
reader_spec = _get_params(mapper_spec, allow_old=False)
# Bucket Name is required
if cls.BUCKET_NAME_PARAM not in reader_spec:
raise errors.BadReaderParamsError(
'%s is required for Google Cloud Storage' %
cls.BUCKET_NAME_PARAM)
try:
cloudstorage.validate_bucket_name(
reader_spec[cls.BUCKET_NAME_PARAM])
except ValueError, error:
raise errors.BadReaderParamsError('Bad bucket name, %s' % (error))
# Object Name(s) are required
if cls.OBJECT_NAMES_PARAM not in reader_spec:
raise errors.BadReaderParamsError(
'%s is required for Google Cloud Storage' %
cls.OBJECT_NAMES_PARAM)
file_names = reader_spec[cls.OBJECT_NAMES_PARAM]
if not isinstance(file_names, list):
raise errors.BadReaderParamsError(
'Object name list is not a list but a %s' %
file_names.__class__.__name__)
for file_name in file_names:
if not isinstance(file_name, basestring):
raise errors.BadReaderParamsError(
'Object name is not a string but a %s' %
file_name.__class__.__name__)
if cls.DELIMITER_PARAM in reader_spec:
delimiter = reader_spec[cls.DELIMITER_PARAM]
if not isinstance(delimiter, basestring):
raise errors.BadReaderParamsError(
'%s is not a string but a %s' %
(cls.DELIMITER_PARAM, type(delimiter)))
def to_json(self):
"""Returns an input shard state for the remaining inputs.
Returns:
A json-izable version of the remaining InputReader.
"""
return {self.OBJECT_NAMES_PARAM: self._file_name,
self.START_FILE_INDEX_PARAM: self._start_file_index,
self.END_FILE_INDEX_PARAM: self._end_file_index,
self.OFFSET_PARAM: self._next_offset()}
@classmethod
def from_json(cls, json):
"""Creates an instance of the InputReader for the given input shard state.
Args:
json: The InputReader state as a dict-like object.
_reader: For dependency injection.
Returns:
An instance of the InputReader configured using the values of json.
"""
return cls(json[cls.OBJECT_NAMES_PARAM],
json[cls.START_FILE_INDEX_PARAM],
json[cls.END_FILE_INDEX_PARAM],
json[cls.OFFSET_PARAM])
def __str__(self):
"""Returns the string representation of this reader.
Returns:
string file_name:[start file num, end file num]:current offset.
"""
return 'file_name(%r):[%d, %d]:%d' % (
self._file_name, self._start_file_index, self._end_file_index,
self._next_offset())
# pylint: disable=too-many-locals
@classmethod
def split_input(cls, mapper_spec):
"""Returns a list of shard_count input_spec_shards for input_spec.
Args:
mapper_spec: The mapper specification to split from. Must contain
'objects' parameter with one or more file_names.
Returns:
A list of GCSInputReaders corresponding to the specified shards.
"""
reader_spec = _get_params(mapper_spec, allow_old=False)
bucket = reader_spec[cls.BUCKET_NAME_PARAM]
file_names = reader_spec[cls.OBJECT_NAMES_PARAM]
delimiter = reader_spec.get(cls.DELIMITER_PARAM)
account_id = reader_spec.get(cls._ACCOUNT_ID_PARAM)
buffer_size = reader_spec.get(cls.BUFFER_SIZE_PARAM)
all_file_names = []
for file_name in file_names:
if file_name.endswith('*'):
all_file_names.extend(
[file_stat for file_stat in cloudstorage.listbucket(
'/' + bucket + '/' + file_name[:-1], delimiter=delimiter,
_account_id=account_id)])
else:
try:
all_file_names.append(cloudstorage.stat(('/%s/%s') %
(bucket, file_name)))
except cloudstorage.NotFoundError:
logging.warning('File /%s/%s may have been removed. Skipping file.',
bucket, file_name)
shard_count = min(cls._MAX_SHARD_COUNT, mapper_spec.shard_count)
try:
shards_per_file = shard_count // len(all_file_names)
except ZeroDivisionError:
shards_per_file = 1
if shards_per_file == 0:
shards_per_file = 1
sub_files = {}
total_size = 0
for file_name in all_file_names:
zip_input = zipfile.ZipFile(cloudstorage.open(file_name.filename))
sub_files[file_name] = zip_input.infolist()
total_size += sum(x.file_size for x in sub_files[file_name])
shard_count = min(cls._MAX_SHARD_COUNT, mapper_spec.shard_count)
size_per_shard = total_size // shard_count
readers = []
for file_name in all_file_names:
bfiles = sub_files[file_name]
current_shard_size = 0
start_file_index = 0
next_file_index = 0
for fileinfo in bfiles:
next_file_index += 1
current_shard_size += fileinfo.file_size
if current_shard_size >= size_per_shard:
readers.append(cls(file_name.filename, start_file_index,
next_file_index, buffer_size=buffer_size,
delimiter=delimiter, account_id=account_id))
current_shard_size = 0
start_file_index = next_file_index
if current_shard_size != 0:
readers.append(cls(file_name.filename, start_file_index,
next_file_index, buffer_size=buffer_size,
delimiter=delimiter, account_id=account_id))
return readers
def next(self):
"""Returns the next line from this input reader as
((ZipInfo, file_name, Start Position), line) tuple.
Returns:
The next input from this input reader, in the form of a 2-tuple.
The first element of the tuple describes the source, it is itself
a tuple (Zip file name, Text file name, byteoffset).
The second element of the tuple is the line found at that offset.
"""
if not self._filestream:
if not self._zip:
self._zip = zipfile.ZipFile(self._reader)
# Get a list of entries, reversed so we can pop entries off in order
self._entries = self._zip.infolist()[self._start_file_index:
self._end_file_index]
self._entries.reverse()
if not self._entries and not self._entry:
raise StopIteration()
if not self._entry:
self._entry = self._entries.pop()
file_name = self._entry.filename
value = self._zip.read(file_name)
self._filestream = StringIO.StringIO(value)
if self._initial_offset:
self._filestream.seek(self._initial_offset)
start_position = self._filestream.tell()
line = self._filestream.readline()
self._initial_offset = self._filestream.tell()
if not line:
# Done with this file in the zip. Move on to the next file.
self._filestream.close()
self._filestream = None
self._start_file_index += 1
self._initial_offset = 0
self._entry = None
return self.next()
return((self._file_name, file_name, start_position), line.rstrip('\n'))
def _next_offset(self):
"""Return the offset of the next line to read."""
if self._filestream:
offset = self._filestream.tell()
if offset:
offset -= 1
else:
offset = self._initial_offset
return offset
# pylint: disable=too-many-arguments
def __init__(self, file_name, start_file_index, end_file_index, offset=0,
buffer_size=None, delimiter=None, account_id=None):
"""Initializes this instance with the given file name and file range.
This GoogleCloudStorageZipInputReader will read from the file with index
start_file_index up to but not including the file with index end_file_index.
It will return lines starting at offset within file[start_file_index]
Args:
file_name: the file name that this input reader is processing.
start_file_index: the index of the first file to read within the zip.
end_file_index: the index of the first file that will not be read.
offset: the byte offset within blob_key.zip[start_file_index] to start
reading. The reader will continue to the end of the file.
"""
self._file_name = file_name
self._start_file_index = start_file_index
self._end_file_index = end_file_index
self._initial_offset = offset
self._buffer_size = buffer_size
self._account_id = account_id
self._delimiter = delimiter
options = {}
if self._buffer_size:
options['read_buffer_size'] = self._buffer_size
if self._account_id:
options['_account_id'] = self._account_id
try:
# pylint: disable=star-args
self._reader = cloudstorage.open(file_name, **options)
except cloudstorage.NotFoundError:
logging.warning('File /%s may have been removed. Skipping file.',
file_name)
self._zip = None
self._entries = None
self._entry = None
self._filestream = None
| rbruyere/appengine-mapreduce | python/test/mapreduce gcs/mapreduce/input_readers.py | Python | apache-2.0 | 128,812 |
/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/ecr/model/BatchCheckLayerAvailabilityResult.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <aws/core/AmazonWebServiceResult.h>
#include <aws/core/utils/StringUtils.h>
#include <aws/core/utils/UnreferencedParam.h>
#include <utility>
using namespace Aws::ECR::Model;
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
using namespace Aws;
BatchCheckLayerAvailabilityResult::BatchCheckLayerAvailabilityResult()
{
}
BatchCheckLayerAvailabilityResult::BatchCheckLayerAvailabilityResult(const AmazonWebServiceResult<JsonValue>& result)
{
*this = result;
}
BatchCheckLayerAvailabilityResult& BatchCheckLayerAvailabilityResult::operator =(const AmazonWebServiceResult<JsonValue>& result)
{
const JsonValue& jsonValue = result.GetPayload();
if(jsonValue.ValueExists("layers"))
{
Array<JsonValue> layersJsonList = jsonValue.GetArray("layers");
for(unsigned layersIndex = 0; layersIndex < layersJsonList.GetLength(); ++layersIndex)
{
m_layers.push_back(layersJsonList[layersIndex].AsObject());
}
}
if(jsonValue.ValueExists("failures"))
{
Array<JsonValue> failuresJsonList = jsonValue.GetArray("failures");
for(unsigned failuresIndex = 0; failuresIndex < failuresJsonList.GetLength(); ++failuresIndex)
{
m_failures.push_back(failuresJsonList[failuresIndex].AsObject());
}
}
return *this;
}
| chiaming0914/awe-cpp-sdk | aws-cpp-sdk-ecr/source/model/BatchCheckLayerAvailabilityResult.cpp | C++ | apache-2.0 | 1,963 |