text
stringlengths 7
1.01M
|
|---|
package io.mincong.tomcat;
import java.io.IOException;
import java.util.logging.Logger;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.annotation.WebFilter;
@WebFilter("/")
public class MyFilter implements Filter {
private static final Logger LOGGER = Logger.getLogger(MyFilter.class.getName());
@Override
public void init(FilterConfig filterConfig) {
LOGGER.info("Initialized.");
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
throws IOException, ServletException {
LOGGER.info("Before...");
chain.doFilter(request, response);
LOGGER.info("After...");
}
@Override
public void destroy() {
LOGGER.info("Destroyed.");
}
}
|
/*
* Copyright 2005-2018 Dozer Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.dozermapper.core.vo.deepindex;
public class HeadOfHouseHoldsContainer {
private HeadOfHouseHolds headOfHouseHolds;
public HeadOfHouseHolds getHeadOfHouseHolds() {
return headOfHouseHolds;
}
public void setHeadOfHouseHolds(HeadOfHouseHolds headOfHouseHolds) {
this.headOfHouseHolds = headOfHouseHolds;
}
}
|
package com.nukkitx.protocol.bedrock.v388.serializer;
import com.nukkitx.network.VarInts;
import com.nukkitx.protocol.bedrock.packet.SpawnParticleEffectPacket;
import com.nukkitx.protocol.bedrock.v388.BedrockUtils;
import com.nukkitx.protocol.serializer.PacketSerializer;
import io.netty.buffer.ByteBuf;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public class SpawnParticleEffectSerializer_v388 implements PacketSerializer<SpawnParticleEffectPacket> {
public static final SpawnParticleEffectSerializer_v388 INSTANCE = new SpawnParticleEffectSerializer_v388();
@Override
public void serialize(ByteBuf buffer, SpawnParticleEffectPacket packet) {
buffer.writeByte(packet.getDimensionId());
VarInts.writeLong(buffer, packet.getUniqueEntityId());
BedrockUtils.writeVector3f(buffer, packet.getPosition());
BedrockUtils.writeString(buffer, packet.getIdentifier());
}
@Override
public void deserialize(ByteBuf buffer, SpawnParticleEffectPacket packet) {
packet.setDimensionId(buffer.readUnsignedByte());
packet.setUniqueEntityId(VarInts.readLong(buffer));
packet.setPosition(BedrockUtils.readVector3f(buffer));
packet.setIdentifier(BedrockUtils.readString(buffer));
}
}
|
/*
* Copyright (c) 2015 Spotify AB.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.heroic.servlet;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Strings;
import com.spotify.heroic.ws.ErrorMessage;
import com.spotify.heroic.ws.MandatoryClientIdErrorMessage;
import java.io.IOException;
import javax.servlet.FilterChain;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.core.Response.Status;
/**
* Rejects anonymous requests. That is, requests to any API endpoint that are
* missing a non-null X-Client-Id HTTP header.<p></p>
* *Note* that this @SuppressWarnings has to go here even though it's just for the doFilter
* method, else the JavaDoc for it doesn't render. Weird.
*/
@SuppressWarnings("checkstyle:LineLength")
public class MandatoryClientIdFilter extends SimpleFilter {
public static final String X_CLIENT_ID_HEADER_NAME = "X-Client-Id";
public static final String MISSING_X_CLIENT_ID = "missing-client-id";
public static final String ERROR_MESSAGE_TEXT =
"This anonymous request has been rejected. Please add a 'x-client-id' " +
"HTTP header to your request.";
public MandatoryClientIdFilter(ObjectMapper mapper) {
super(mapper);
}
/**
* Reject (with a 400) the request, if the X-Client-Id HTTP header is not present
* or is non-null/empty.<p>
* Calling {@link javax.servlet.FilterChain#doFilter}
* effectively "passes" this filter and the next
* filter gets a stab at it. <p>
* Conversely, not calling doFilter halts "happy path" processing altogether
* and that's the mechanism with which we stop anonymous requests.<p>
* Finally, using
* {@link javax.servlet.http.HttpServletResponse#sendError(int, java.lang.String)} to
* return a status message didn't work and instead sent text of "internal error" back
* to the client.
*/
@Override
public ErrorMessage doFilterImpl(
HttpServletRequest request, HttpServletResponse response, FilterChain chain
) throws IOException, ServletException {
final var info =
new MandatoryClientIdErrorMessage("Anonymous requests are not permitted");
response.setStatus(Status.BAD_REQUEST.getStatusCode());
return info;
}
/**
* Returns true if the HTTP header X-Client-Id is present and non-null and not empty.
* @param request request to pluck X-Client-Id's value from
* @return see above
*/
@Override
public boolean passesFilter(ServletRequest request) {
var req = HttpServletRequest.class.cast(request);
return !Strings.isNullOrEmpty(req.getHeader(X_CLIENT_ID_HEADER_NAME));
}
};
|
package org.kilocraft.essentials.api.util;
import net.minecraft.entity.EntityType;
import net.minecraft.predicate.entity.EntityPredicates;
import net.minecraft.server.network.ServerPlayerEntity;
import net.minecraft.server.world.ServerWorld;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.Box;
import net.minecraft.util.math.ChunkPos;
import net.minecraft.util.registry.Registry;
import net.minecraft.world.GameMode;
import net.minecraft.world.WorldAccess;
import org.kilocraft.essentials.api.KiloEssentials;
import org.kilocraft.essentials.api.ModConstants;
import org.kilocraft.essentials.util.settings.ServerSettings;
public class TickManager {
//Ticks worth one real day
private final static int STORED_TICKS = 72000;
//Cached tick times for mspt and tps calculations
private static final long[] TICK_TIMES = new long[STORED_TICKS];
/*
* Arrays that store the tps & mspt of the last x seconds
* index - time
* 0 - 5s
* 1 - 15s
* 2 - 1m
* 3 - 5m
* 4 - 15m
* 5 - 1h
* */
private final static int[] TICK_STORAGE_SIZES = {100, 300, 1200, 6000, 18000, 72000};
public static double[] tps = new double[TICK_STORAGE_SIZES.length];
public static double[] mspt = new double[TICK_STORAGE_SIZES.length];
private static int currentTick = 0;
private TickManager() {
}
public static void onTick() {
currentTick = KiloEssentials.getMinecraftServer().getTicks();
//Get the tick length of the previous
long[] lastTickLengths = KiloEssentials.getMinecraftServer().lastTickLengths;
long lastTickLength = lastTickLengths[(currentTick + lastTickLengths.length - 1) % lastTickLengths.length];
//Make sure the value was initialized
if (lastTickLength != 0) {
TICK_TIMES[currentTick % STORED_TICKS] = lastTickLength;
}
calculateTps();
if (currentTick % ServerSettings.tick_utils_update_rate == 0 && ServerSettings.tick_utils_automated)
automatedTickUtils();
}
private static void calculateTps() {
for (int i = 0; i < TICK_STORAGE_SIZES.length; i++) {
if (i == 0 || currentTick % (i * 5) == 0) writeTpsAndMspt(i);
}
}
private static void writeTpsAndMspt(int index) {
//Time used for calculating ticks per second (each tick is at least 50ms long)
double totalTickTime = 0;
//Time used for calculating average ms per tick
double actualTotalTickTime = 0;
int validTicks = 0;
int length = Math.min(TICK_STORAGE_SIZES[index], STORED_TICKS);
for (int i = 0; i < length; i++) {
long tickTime = TICK_TIMES[(currentTick - i + STORED_TICKS) % STORED_TICKS];
if (tickTime != 0) {
//Calculate tick length (has to be at least 50ms, because that is how long the server will wait if it finished quicker)
totalTickTime += Math.max(tickTime, 50000000);
actualTotalTickTime += tickTime;
validTicks++;
}
}
if (validTicks > 0) {
double averageTickLength = actualTotalTickTime / validTicks;
double averageTPS = 1000000000 / (totalTickTime / validTicks);
tps[index] = averageTPS;
mspt[index] = averageTickLength / 1000000;
}
}
private static void automatedTickUtils() {
checkViewDistance(mspt[0]);
checkMobcaps(mspt[0]);
checkTickDistance(mspt[0]);
}
private static void checkViewDistance(double mspt) {
int viewDistance = ServerSettings.getViewDistance();
if (mspt > 45 && ServerSettings.tick_utils_global_mobcap <= ServerSettings.tick_utils_min_mobcap && ServerSettings.getViewDistance() > ServerSettings.tick_utils_min_view_distance) {
ServerSettings.setViewDistance(viewDistance - 1);
} else if (mspt < 35 && viewDistance < ServerSettings.tick_utils_max_view_distance) {
ServerSettings.setViewDistance(viewDistance + 1);
}
}
private static void checkTickDistance(double mspt) {
int tickDistance = ServerSettings.tick_utils_tick_distance;
if (mspt > 40 && tickDistance > ServerSettings.tick_utils_min_tick_distance) {
ServerSettings.setInt("tick_utils.tick_distance", ServerSettings.tick_utils_tick_distance - 1);
} else if (mspt < 30 && tickDistance < ServerSettings.tick_utils_max_tick_distance && ServerSettings.tick_utils_global_mobcap >= ServerSettings.tick_utils_max_mobcap) {
ServerSettings.setInt("tick_utils.tick_distance", ServerSettings.tick_utils_tick_distance + 1);
}
}
private static void checkMobcaps(double mspt) {
if (mspt > 45 && ServerSettings.tick_utils_tick_distance == ServerSettings.tick_utils_min_tick_distance && ServerSettings.tick_utils_global_mobcap > ServerSettings.tick_utils_min_mobcap) {
ServerSettings.setFloat("tick_utils.global_mobcap", ServerSettings.tick_utils_global_mobcap - 0.1F);
} else if (mspt < 35 && ServerSettings.tick_utils_global_mobcap < ServerSettings.tick_utils_max_mobcap && ServerSettings.getViewDistance() == ServerSettings.tick_utils_max_view_distance) {
ServerSettings.setFloat("tick_utils.global_mobcap", ServerSettings.tick_utils_global_mobcap + 0.1F);
}
}
public static boolean shouldTick(ChunkPos pos, ServerWorld world) {
if (ServerSettings.tick_utils_tick_distance == -1 || ServerSettings.tick_utils_tick_distance >= ServerSettings.getViewDistance()) {
return true;
}
for (ServerPlayerEntity player : world.getPlayers()) {
if (player.interactionManager.getGameMode() != GameMode.SPECTATOR && player.getChunkPos().getChebyshevDistance(pos) <= ServerSettings.tick_utils_tick_distance) {
return true;
}
}
return false;
}
public static boolean isEntityLimitReached(WorldAccess world, BlockPos pos, EntityType<?>... entityType) {
if (entityType == null || entityType.length == 0) return true;
int range = ServerSettings.getInt("entity_limit." + Registry.ENTITY_TYPE.getId(entityType[0]).getPath() + ".range");
int limit = ServerSettings.getInt("entity_limit." + Registry.ENTITY_TYPE.getId(entityType[0]).getPath() + ".limit");
// Ignore negative values
if (range > 0 && limit > 0) {
// Count mobs from all given types
int entityCount = 0;
for (EntityType<?> type : entityType) {
entityCount += world.getEntitiesByType(type, new Box(pos.mutableCopy().add(range, range, range), pos.mutableCopy().add(-range, -range, -range)), EntityPredicates.EXCEPT_SPECTATOR).size();
}
if (limit <= entityCount) {
return true;
}
}
return false;
}
public static String getFormattedMSPT() {
return ModConstants.DECIMAL_FORMAT.format(mspt[0]);
}
}
|
/*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dremio.exec.store.hive.metadata;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Optional;
import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.mapred.JobConf;
import com.dremio.common.map.CaseInsensitiveMap;
import com.dremio.exec.store.hive.HiveUtilities;
import com.dremio.exec.store.hive.exec.HiveReaderProtoUtil;
import com.dremio.exec.store.parquet.ManagedSchema;
import com.dremio.exec.store.parquet.ManagedSchemaField;
import com.dremio.hive.proto.HiveReaderProto;
import com.google.common.base.Splitter;
/**
* Class for capturing hive schema
*/
public class ManagedHiveSchema implements ManagedSchema {
private final Map<String, ManagedSchemaField> fieldInfo;
public ManagedHiveSchema(final JobConf jobConf, final HiveReaderProto.HiveTableXattr tableXattr) {
final java.util.Properties tableProperties = new java.util.Properties();
HiveUtilities.addProperties(jobConf, tableProperties, HiveReaderProtoUtil.getTableProperties(tableXattr));
final String fieldNameProp = Optional.ofNullable(tableProperties.getProperty("columns")).orElse("");
final String fieldTypeProp = Optional.ofNullable(tableProperties.getProperty("columns.types")).orElse("");
final boolean enforceVarcharWidth = tableXattr.getEnforceVarcharWidth();
final Iterator<String> fieldNames = Splitter.on(",").trimResults().split(fieldNameProp).iterator();
final Iterator<TypeInfo> fieldTypes = TypeInfoUtils.getTypeInfosFromTypeString(fieldTypeProp).iterator();
final Map<String, ManagedSchemaField> schemaFieldMap = new HashMap<>();
while (fieldNames.hasNext() && fieldTypes.hasNext()) {
final String fieldName = fieldNames.next();
final TypeInfo fieldType = fieldTypes.next();
ManagedSchemaField field;
if (fieldType instanceof DecimalTypeInfo) {
field = ManagedSchemaField.newFixedLenField(fieldName, fieldType.getTypeName(),
((DecimalTypeInfo) fieldType).getPrecision(), ((DecimalTypeInfo) fieldType).getScale());
} else if (fieldType instanceof BaseCharTypeInfo) {
if (enforceVarcharWidth) {
field = ManagedSchemaField.newFixedLenField(fieldName, fieldType.getTypeName(),
((BaseCharTypeInfo) fieldType).getLength(), 0);
} else {
field = ManagedSchemaField.newUnboundedLenField(fieldName, fieldType.getTypeName());
}
} else {
// Extend ManagedSchemaField.java in case granular information has to be stored.
// No mention of len and scale means it is unbounded. So, we store max values.
field = ManagedSchemaField.newUnboundedLenField(fieldName, fieldType.getTypeName());
}
schemaFieldMap.put(fieldName, field);
}
fieldInfo = CaseInsensitiveMap.newImmutableMap(schemaFieldMap);
}
@Override
public Optional<ManagedSchemaField> getField(final String fieldName) {
return Optional.ofNullable(fieldInfo.get(fieldName));
}
public Map<String, ManagedSchemaField> getAllFields() {
return this.fieldInfo;
}
@Override
public String toString() {
return "HiveSchema{" +
"fieldInfo=" + fieldInfo +
'}';
}
}
|
package io.happylrd.demo3.servlet;
import io.happylrd.demo3.controller.InputProductController;
import io.happylrd.demo3.controller.SaveProductController;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
public class DispatcherServlet extends HttpServlet {
private static final long serialVersionUUID = 748495L;
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
process(req, resp);
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
process(req, resp);
}
private void process(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
String uri = request.getRequestURI();
int lastIndex = uri.lastIndexOf("/");
String action = uri.substring(lastIndex + 1);
// forward to a view
String dispatchUrl = null;
if (action.equals("product_input.action")) {
InputProductController controller =
new InputProductController();
dispatchUrl = controller.handleRequest(request, response);
} else if (action.equals("product_save.action")) {
SaveProductController controller =
new SaveProductController();
dispatchUrl = controller.handleRequest(request, response);
}
if (dispatchUrl != null) {
RequestDispatcher requestDispatcher =
request.getRequestDispatcher(dispatchUrl);
requestDispatcher.forward(request, response);
}
}
}
|
/*******************************************************************************
* Copyright 2019 Fabrizio Pastore, Leonardo Mariani
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
// This probe monitor all calls, also the one within components boundaries
/* probekit /BCT_new/probes/bctComponentLogger.probe
*/
// "imports" specifications for probes (if any):
import probes.LoggerProbe;
class bctLPAll91 {
// Class for probe unnamed_probe
public static class Probe_0 {
// Fragment at class scope
int id = 1;
public static void _afterCall (
Object /*returnedObject*/ returnedObject,
String /*className*/ className,
String /*methodName*/ methodName,
String /*methodSig*/ methodSig,
Object[] /*args*/ args ) {
// Internal signature for this method: (Ljava/lang/Object;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
// ------------------ begin user-written fragment code ----------------
try{
LoggerProbe.exit(className,methodName,methodSig,args,returnedObject);
} catch ( Throwable e ){
System.err.println("BCT COMPONENT LOGGER ERROR/EXCEPTION: "+e.getMessage());
e.printStackTrace();
}
// ------------------- end user-written fragment code -----------------
}
public static void _beforeCall (
String /*className*/ className,
String /*methodName*/ methodName,
String /*methodSig*/ methodSig,
Object[] /*args*/ args ) {
// Internal signature for this method: (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
try{
//System.out.println("#"+Thread.currentThread().getId()+"#"+"CALLENTER"+bctLP91.class);
LoggerProbe.enter(className,methodName,methodSig,args);
} catch ( Throwable e ){
System.err.println("BCT COMPONENT LOGGER ERROR/EXCEPTION: "+e.getMessage());
e.printStackTrace();
}
}
}
// Class for probe unnamed_probe
public static class Probe_1 {
// Fragment at class scope
int id = 0;
public static void _exit (
Object /*returnedObject*/ returnedObject,
String /*className*/ className,
String /*methodName*/ methodName,
String /*methodSig*/ methodSig,
Object[] /*args*/ args ) {
// Internal signature for this method: (Ljava/lang/Object;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
try{
LoggerProbe.exit(className,methodName,methodSig,args,returnedObject);
} catch ( Throwable e ){
System.err.println("BCT COMPONENT LOGGER ERROR/EXCEPTION: "+e.getMessage());
e.printStackTrace();
}
}
public static void _entry (
String /*className*/ className,
String /*methodName*/ methodName,
String /*methodSig*/ methodSig,
Object[] /*args*/ args ) {
// Internal signature for this method: (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;[Ljava/lang/Object;)V
try{
LoggerProbe.enter(className,methodName,methodSig,args);
} catch ( Throwable e ){
System.err.println("BCT COMPONENT LOGGER ERROR/EXCEPTION: "+e.getMessage());
e.printStackTrace();
}
}
}
}
|
package com.warden.common.biz;
import io.micrometer.core.instrument.util.StringUtils;
import org.beetl.sql.core.SQLManager;
import org.beetl.sql.core.db.KeyHolder;
import org.beetl.sql.core.engine.PageQuery;
import org.beetl.sql.core.mapper.BaseMapper;
import org.beetl.sql.core.query.Query;
import org.springframework.beans.factory.annotation.Autowired;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Calendar;
import java.util.List;
import static io.lettuce.core.MigrateArgs.Builder.key;
/**
* @author YangJiaYing
* @date 2019/05/19
*/
public abstract class BaseBiz<M extends BaseMapper<E>, E> {
@Autowired
protected M mapper;
public void setMapper(M mapper) {
this.mapper = mapper;
}
/**
* 通用插入,插入一个实体对象到数据库,所以字段将参与操作,除非你使用ColumnIgnore注解
*
* @param entity
*/
public void insert(E entity) {
if (entity != null) {
// setEntityCreateInfo(entity);
this.mapper.insert(entity);
}
}
/**
* saveOrUpdate
*
* @param entity
* @param autDbAssignKey
*/
public void save(E entity, boolean autDbAssignKey) {
if (entity != null) {
Object key = key(entity);
if (key != null) {
this.updateTemplateById(entity);
} else {
this.insert(entity, autDbAssignKey);
}
}
}
/**
* (数据库表有自增主键调用此方法)如果实体对应的有自增主键,插入一个实体到数据库,设置assignKey为true的时候,将会获取此主键
*
* @param entity
* @param autDbAssignKey 是否获取自增主键
*/
public void insert(E entity, boolean autDbAssignKey) {
if (entity != null) {
// setEntityCreateInfo(entity);
this.mapper.insert(entity, autDbAssignKey);
}
}
/**
* 插入实体到数据库,对于null值不做处理
*
* @param entity
*/
public void insertTemplate(E entity) {
if (entity != null) {
// setEntityCreateInfo(entity);
this.mapper.insertTemplate(entity);
}
}
/**
* 如果实体对应的有自增主键,插入实体到数据库,对于null值不做处理,设置assignKey为true的时候,将会获取此主键
*
* @param entity
* @param autDbAssignKey
*/
public void insertTemplate(E entity, boolean autDbAssignKey) {
if (entity != null) {
// setEntityCreateInfo(entity);
this.mapper.insertTemplate(entity, autDbAssignKey);
}
}
/**
* 批量插入实体。此方法不会获取自增主键的值,如果需要,建议不适用批量插入,适用
* <pre>
* insert(E entity,true);
* </pre>
*
* @param list
*/
public void insertBatch(List<E> list) {
for (E e : list) {
// setEntityCreateInfo(e);
}
this.mapper.insertBatch(list);
}
/**
* (数据库表有自增主键调用此方法)如果实体对应的有自增主键,插入实体到数据库,自增主键值放到keyHolder里处理
*
* @param entity
* @return
*/
public KeyHolder insertReturnKey(E entity) {
if (entity != null) {
// setEntityCreateInfo(entity);
return this.mapper.insertReturnKey(entity);
}
return null;
}
/**
* 根据主键更新对象,所以属性都参与更新。也可以使用主键ColumnIgnore来控制更新的时候忽略此字段
*
* @param entity
* @return
*/
public int updateById(E entity) {
if (entity != null) {
// setEntityUpdateInfo(entity);
return this.mapper.updateById(entity);
}
return 0;
}
/**
* 根据主键更新对象,只有不为null的属性参与更新
*
* @param entity
* @return
*/
public int updateTemplateById(E entity) {
if (entity != null) {
// setEntityUpdateInfo(entity);
return this.mapper.updateTemplateById(entity);
}
return 0;
}
/**
* 根据主键删除对象,如果对象是复合主键,传入对象本生即可
*
* @param key
* @return
*/
public int deleteById(Object key) {
return this.mapper.deleteById(key);
}
/**
* 根据主键获取对象,如果对象不存在,则会抛出一个Runtime异常
*
* @param key
* @return
*/
public E unique(Object key) {
return this.mapper.unique(key);
}
/**
* 根据主键获取对象,如果对象不存在,返回null
*
* @param key
* @return
*/
public E single(Object key) {
return this.mapper.single(key);
}
/**
* 根据主键获取对象,如果在事物中执行会添加数据库行级锁(select * from table where id = ? for update),如果对象不存在,返回null
*
* @param key
* @return
*/
public E lock(Object key) {
return this.mapper.lock(key);
}
/**
* 返回实体对应的所有数据库记录
*
* @return
*/
public List<E> all() {
return this.mapper.all();
}
/**
* 返回实体对应的一个范围的记录
*
* @param start
* @param size
* @return
*/
public List<E> all(int start, int size) {
return this.mapper.all(start, size);
}
/**
* 返回实体在数据库里的总数
*
* @return
*/
public long allCount() {
return this.mapper.allCount();
}
/**
* 模板查询,返回符合模板得所有结果。beetlsql将取出非null值(日期类型排除在外),从数据库找出完全匹配的结果集
*
* @param entity
* @return
*/
public List<E> template(E entity) {
return this.mapper.template(entity);
}
/**
* 模板查询,返回一条结果,如果没有,返回null
*
* @param entity
* @return
*/
public <E> E templateOne(E entity) {
return this.mapper.templateOne(entity);
}
public List<E> template(E entity, int start, int size) {
return this.mapper.template(entity, start, size);
}
public void templatePage(PageQuery<E> query) {
this.mapper.templatePage(query);
}
/**
* 符合模板得个数
*
* @param entity
* @return
*/
public long templateCount(E entity) {
return this.mapper.templateCount(entity);
}
/**
* 执行一个jdbc sql模板查询
*
* @param sql
* @param args
* @return
*/
public List<E> execute(String sql, Object... args) {
return this.mapper.execute(sql, args);
}
/**
* 执行一个更新的jdbc sql
*
* @param sql
* @param args
* @return
*/
public int executeUpdate(String sql, Object... args) {
return this.mapper.executeUpdate(sql, args);
}
public SQLManager getSQLManager() {
return this.mapper.getSQLManager();
}
/**
* 设置实体新增信息
*
* @param entity
*/
// protected void setEntityCreateInfo(E entity) {
//
// ReflectionUtils.invokeSetter(entity, "createTime", Calendar.getInstance().getTimeInMillis());
//
// Class clazz=entity.getClass();
//
// if(TenantLevel.class.isAssignableFrom(clazz)){
// String tenantInfoId=BaseContextHandler.getTenantInfoId();
// if(StringUtils.isNotEmpty(tenantInfoId)){
// ((TenantLevel)entity).setTenantInfoId(Long.parseLong(tenantInfoId));
//
// }
// }
// }
// public static void main(String[] args) {
// System.out.println(TenantLevel.class.isAssignableFrom(BaseBiz.class));
// }
// protected Object key(E entity) {
// return ReflectionUtils.invokeGetter(entity, "id");
// }
// /**
// * 设置实体更新的信息
// *
// * @param entity
// */
// protected void setEntityUpdateInfo(E entity) {
// ReflectionUtils.invokeSetter(entity, "updateTime", Calendar.getInstance().getTimeInMillis());
// }
// /**
// * 根据 searchable进行单表查询
// *
// * @param searchable
// * @return
// */
// public PageQuery<E> page(Searchable searchable) {
// PageQuery pageQuery = searchable.getPage();
// if (pageQuery == null) {
// pageQuery = new PageQuery();
// }
//
// Query<E> query = getSQLManager().query(getEntityClazz());
// query = SearchCallback.DEFAULT.prepareQuery(query, searchable);
//
// long totalCount = query.count();
//
// if (totalCount > 0) {
// query = SearchCallback.DEFAULT.preparePage(query, searchable);
// query=SearchCallback.DEFAULT.prepareQuery(query,searchable);
// query = SearchCallback.DEFAULT.preparOrder(query, searchable);
// List<E> list = query.select();
// pageQuery.setList(list);
// pageQuery.setTotalRow(totalCount);
// } else {
// pageQuery.setList(null);
// pageQuery.setTotalRow(0);
// }
//
// return pageQuery;
// }
// /**
// * 根据 searchable进行单表查询
// *
// * @param searchable
// * @return
// */
// public List<E> list(Searchable searchable) {
//
// searchable.removePageable();
// Query<E> query = getSQLManager().query(getEntityClazz());
// query = SearchCallback.DEFAULT.prepareQuery(query, searchable);
// query = SearchCallback.DEFAULT.preparOrder(query, searchable);
// List<E> list = query.select();
//
// return list;
// }
/**
* 获取该业务层的泛型实际类
*
* @return
*/
public Class<E> getEntityClazz() {
Type type = getClass().getGenericSuperclass();
Type[] generics = ((ParameterizedType) type).getActualTypeArguments();
Class<E> clazz = (Class<E>) generics[1];
return clazz;
}
}
|
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.content.browser;
import android.content.ClipData;
import android.content.ClipboardManager;
import android.content.Context;
import android.text.TextUtils;
import androidx.test.filters.LargeTest;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.chromium.base.test.util.CriteriaHelper;
import org.chromium.base.test.util.DisabledTest;
import org.chromium.base.test.util.Feature;
import org.chromium.base.test.util.UrlUtils;
import org.chromium.content.browser.webcontents.WebContentsImpl;
import org.chromium.content_public.browser.test.util.TestThreadUtils;
import org.chromium.content_shell_apk.ContentShellActivityTestRule;
import org.chromium.content_shell_apk.ContentShellActivityTestRule.RerunWithUpdatedContainerView;
import java.util.concurrent.Callable;
/**
* Tests rich text clipboard functionality.
*/
@RunWith(BaseJUnit4ClassRunner.class)
public class ClipboardTest {
@Rule
public ContentShellActivityTestRule mActivityTestRule = new ContentShellActivityTestRule();
private static final String TEST_PAGE_DATA_URL = UrlUtils.encodeHtmlDataUri(
"<html><body>Hello, <a href=\"http://www.example.com/\">world</a>, how <b> "
+ "Chromium</b> doing today?</body></html>");
private static final String EXPECTED_TEXT_RESULT = "Hello, world, how Chromium doing today?";
// String to search for in the HTML representation on the clipboard.
private static final String EXPECTED_HTML_NEEDLE = "http://www.example.com/";
@Before
public void setUp() {
mActivityTestRule.launchContentShellWithUrl(TEST_PAGE_DATA_URL);
mActivityTestRule.waitForActiveShellToBeDoneLoading();
}
/**
* Tests that copying document fragments will put at least a plain-text representation
* of the contents on the clipboard. For Android JellyBean and higher, we also expect
* the HTML representation of the fragment to be available.
*/
@Test
@LargeTest
@Feature({"Clipboard", "TextInput"})
@RerunWithUpdatedContainerView
@DisabledTest(message = "https://crbug.com/791021")
public void testCopyDocumentFragment() {
ClipboardManager clipboardManager =
TestThreadUtils.runOnUiThreadBlockingNoException(new Callable<ClipboardManager>() {
@Override
public ClipboardManager call() {
return (ClipboardManager) mActivityTestRule.getActivity().getSystemService(
Context.CLIPBOARD_SERVICE);
}
});
Assert.assertNotNull(clipboardManager);
// Clear the clipboard to make sure we start with a clean state.
clipboardManager.setPrimaryClip(ClipData.newPlainText(null, ""));
Assert.assertFalse(hasPrimaryClip(clipboardManager));
final WebContentsImpl webContents = (WebContentsImpl) mActivityTestRule.getWebContents();
selectAll(webContents);
copy(webContents);
// Waits until data has been made available on the Android clipboard.
CriteriaHelper.pollUiThread(() -> hasPrimaryClip(clipboardManager));
// Verify that the data on the clipboard is what we expect it to be. For Android JB MR2
// and higher we expect HTML content, for other versions the plain-text representation.
TestThreadUtils.runOnUiThreadBlocking(() -> {
final ClipData clip = clipboardManager.getPrimaryClip();
Assert.assertEquals(EXPECTED_TEXT_RESULT,
clip.getItemAt(0).coerceToText(mActivityTestRule.getActivity()));
String htmlText = clip.getItemAt(0).getHtmlText();
Assert.assertNotNull(htmlText);
Assert.assertTrue(htmlText.contains(EXPECTED_HTML_NEEDLE));
});
}
private void copy(final WebContentsImpl webContents) {
TestThreadUtils.runOnUiThreadBlocking(() -> { webContents.copy(); });
}
private void selectAll(final WebContentsImpl webContents) {
TestThreadUtils.runOnUiThreadBlocking(() -> { webContents.selectAll(); });
}
// Returns whether there is a primary clip with content on the current clipboard.
private Boolean hasPrimaryClip(ClipboardManager clipboardManager) {
final ClipData clip = clipboardManager.getPrimaryClip();
if (clip != null && clip.getItemCount() > 0) {
return !TextUtils.isEmpty(clip.getItemAt(0).getText());
}
return false;
}
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Other licenses:
* -----------------------------------------------------------------------------
* Commercial licenses for this work are available. These replace the above
* ASL 2.0 and offer limited warranties, support, maintenance, and commercial
* database integrations.
*
* For more information, please visit: http://www.jooq.org/licenses
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package org.jooq;
// ...
// ...
import static org.jooq.SQLDialect.FIREBIRD;
import static org.jooq.SQLDialect.H2;
import static org.jooq.SQLDialect.HSQLDB;
import static org.jooq.SQLDialect.MARIADB;
// ...
import static org.jooq.SQLDialect.MYSQL;
// ...
import static org.jooq.SQLDialect.POSTGRES;
// ...
import static org.jooq.SQLDialect.SQLITE;
// ...
// ...
// ...
// ...
/**
* This type is part of the jOOQ DSL to create {@link Select}, {@link Insert},
* {@link Update}, {@link Delete}, {@link Merge} statements prefixed with a
* <code>WITH</code> clause and with {@link CommonTableExpression}s.
* <p>
* Example:
* <code><pre>
* DSL.with("table", "col1", "col2")
* .as(
* select(one(), two())
* )
* .select()
* .from("table")
* </pre></code>
*
* @author Lukas Eder
*/
public interface WithAsStep17 {
/**
* Associate a subselect with a common table expression's table and column names.
*/
@Support({ FIREBIRD, H2, HSQLDB, MARIADB, MYSQL, POSTGRES, SQLITE })
WithStep as(Select<? extends Record17<?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?>> select);
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.io.sstable.format.big;
import java.io.IOException;
import java.nio.BufferOverflowException;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.cassandra.db.compaction.OperationType;
import org.apache.cassandra.db.lifecycle.LifecycleNewTracker;
import org.apache.cassandra.io.util.File;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.cache.ChunkCache;
import org.apache.cassandra.config.Config;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.rows.*;
import org.apache.cassandra.db.transform.Transformation;
import org.apache.cassandra.io.FSWriteError;
import org.apache.cassandra.io.compress.CompressedSequentialWriter;
import org.apache.cassandra.io.compress.ICompressor;
import org.apache.cassandra.io.sstable.*;
import org.apache.cassandra.io.sstable.format.SSTableFlushObserver;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.apache.cassandra.io.sstable.format.SSTableWriter;
import org.apache.cassandra.io.sstable.metadata.MetadataCollector;
import org.apache.cassandra.io.sstable.metadata.MetadataComponent;
import org.apache.cassandra.io.sstable.metadata.MetadataType;
import org.apache.cassandra.io.sstable.metadata.StatsMetadata;
import org.apache.cassandra.io.util.*;
import org.apache.cassandra.schema.CompressionParams;
import org.apache.cassandra.schema.TableMetadataRef;
import org.apache.cassandra.utils.*;
import org.apache.cassandra.utils.concurrent.Transactional;
import static org.apache.cassandra.utils.Clock.Global.currentTimeMillis;
public class BigTableWriter extends SSTableWriter
{
private static final Logger logger = LoggerFactory.getLogger(BigTableWriter.class);
private final ColumnIndex columnIndexWriter;
private final IndexWriter iwriter;
private final FileHandle.Builder dbuilder;
protected final SequentialWriter dataFile;
private DecoratedKey lastWrittenKey;
private DataPosition dataMark;
private long lastEarlyOpenLength = 0;
private final Optional<ChunkCache> chunkCache = Optional.ofNullable(ChunkCache.instance);
private final SequentialWriterOption writerOption = SequentialWriterOption.newBuilder()
.trickleFsync(DatabaseDescriptor.getTrickleFsync())
.trickleFsyncByteInterval(DatabaseDescriptor.getTrickleFsyncIntervalInKb() * 1024)
.build();
public BigTableWriter(Descriptor descriptor,
long keyCount,
long repairedAt,
UUID pendingRepair,
boolean isTransient,
TableMetadataRef metadata,
MetadataCollector metadataCollector,
SerializationHeader header,
Collection<SSTableFlushObserver> observers,
LifecycleNewTracker lifecycleNewTracker)
{
super(descriptor, keyCount, repairedAt, pendingRepair, isTransient, metadata, metadataCollector, header, observers);
lifecycleNewTracker.trackNew(this); // must track before any files are created
if (compression)
{
final CompressionParams compressionParams = compressionFor(lifecycleNewTracker.opType());
dataFile = new CompressedSequentialWriter(new File(getFilename()),
descriptor.filenameFor(Component.COMPRESSION_INFO),
new File(descriptor.filenameFor(Component.DIGEST)),
writerOption,
compressionParams,
metadataCollector);
}
else
{
dataFile = new ChecksummedSequentialWriter(new File(getFilename()),
new File(descriptor.filenameFor(Component.CRC)),
new File(descriptor.filenameFor(Component.DIGEST)),
writerOption);
}
dbuilder = new FileHandle.Builder(descriptor.filenameFor(Component.DATA)).compressed(compression)
.mmapped(DatabaseDescriptor.getDiskAccessMode() == Config.DiskAccessMode.mmap);
chunkCache.ifPresent(dbuilder::withChunkCache);
iwriter = new IndexWriter(keyCount);
columnIndexWriter = new ColumnIndex(this.header, dataFile, descriptor.version, this.observers, getRowIndexEntrySerializer().indexInfoSerializer());
}
/**
* Given an OpType, determine the correct Compression Parameters
* @param opType
* @return {@link org.apache.cassandra.schema.CompressionParams}
*/
private CompressionParams compressionFor(final OperationType opType)
{
CompressionParams compressionParams = metadata.getLocal().params.compression;
final ICompressor compressor = compressionParams.getSstableCompressor();
if (null != compressor && opType == OperationType.FLUSH)
{
// When we are flushing out of the memtable throughput of the compressor is critical as flushes,
// especially of large tables, can queue up and potentially block writes.
// This optimization allows us to fall back to a faster compressor if a particular
// compression algorithm indicates we should. See CASSANDRA-15379 for more details.
switch (DatabaseDescriptor.getFlushCompression())
{
// It is relatively easier to insert a Noop compressor than to disable compressed writing
// entirely as the "compression" member field is provided outside the scope of this class.
// It may make sense in the future to refactor the ownership of the compression flag so that
// We can bypass the CompressedSequentialWriter in this case entirely.
case none:
compressionParams = CompressionParams.NOOP;
break;
case fast:
if (!compressor.recommendedUses().contains(ICompressor.Uses.FAST_COMPRESSION))
{
// The default compressor is generally fast (LZ4 with 16KiB block size)
compressionParams = CompressionParams.DEFAULT;
break;
}
case table:
default:
}
}
return compressionParams;
}
public void mark()
{
dataMark = dataFile.mark();
iwriter.mark();
}
public void resetAndTruncate()
{
dataFile.resetAndTruncate(dataMark);
iwriter.resetAndTruncate();
}
/**
* Perform sanity checks on @param decoratedKey and @return the position in the data file before any data is written
*/
protected long beforeAppend(DecoratedKey decoratedKey)
{
assert decoratedKey != null : "Keys must not be null"; // empty keys ARE allowed b/c of indexed column values
if (lastWrittenKey != null && lastWrittenKey.compareTo(decoratedKey) >= 0)
throw new RuntimeException("Last written key " + lastWrittenKey + " >= current key " + decoratedKey + " writing into " + getFilename());
return (lastWrittenKey == null) ? 0 : dataFile.position();
}
private void afterAppend(DecoratedKey decoratedKey, long dataEnd, RowIndexEntry index, ByteBuffer indexInfo) throws IOException
{
metadataCollector.addKey(decoratedKey.getKey());
lastWrittenKey = decoratedKey;
last = lastWrittenKey;
if (first == null)
first = lastWrittenKey;
if (logger.isTraceEnabled())
logger.trace("wrote {} at {}", decoratedKey, dataEnd);
iwriter.append(decoratedKey, index, dataEnd, indexInfo);
}
/**
* Appends partition data to this writer.
*
* @param iterator the partition to write
* @return the created index entry if something was written, that is if {@code iterator}
* wasn't empty, {@code null} otherwise.
*
* @throws FSWriteError if a write to the dataFile fails
*/
public RowIndexEntry append(UnfilteredRowIterator iterator)
{
DecoratedKey key = iterator.partitionKey();
if (key.getKey().remaining() > FBUtilities.MAX_UNSIGNED_SHORT)
{
logger.error("Key size {} exceeds maximum of {}, skipping row", key.getKey().remaining(), FBUtilities.MAX_UNSIGNED_SHORT);
return null;
}
if (iterator.isEmpty())
return null;
long startPosition = beforeAppend(key);
observers.forEach((o) -> o.startPartition(key, iwriter.indexFile.position()));
//Reuse the writer for each row
columnIndexWriter.reset();
try (UnfilteredRowIterator collecting = Transformation.apply(iterator, new StatsCollector(metadataCollector)))
{
columnIndexWriter.buildRowIndex(collecting);
// afterAppend() writes the partition key before the first RowIndexEntry - so we have to add it's
// serialized size to the index-writer position
long indexFilePosition = ByteBufferUtil.serializedSizeWithShortLength(key.getKey()) + iwriter.indexFile.position();
RowIndexEntry entry = RowIndexEntry.create(startPosition, indexFilePosition,
collecting.partitionLevelDeletion(),
columnIndexWriter.headerLength,
columnIndexWriter.columnIndexCount,
columnIndexWriter.indexInfoSerializedSize(),
columnIndexWriter.indexSamples(),
columnIndexWriter.offsets(),
getRowIndexEntrySerializer().indexInfoSerializer());
long endPosition = dataFile.position();
long rowSize = endPosition - startPosition;
maybeLogLargePartitionWarning(key, rowSize);
maybeLogManyTombstonesWarning(key, metadataCollector.totalTombstones);
metadataCollector.addPartitionSizeInBytes(rowSize);
afterAppend(key, endPosition, entry, columnIndexWriter.buffer());
return entry;
}
catch (BufferOverflowException boe)
{
throw new PartitionSerializationException(iterator, boe);
}
catch (IOException e)
{
throw new FSWriteError(e, dataFile.getPath());
}
}
private RowIndexEntry.IndexSerializer<IndexInfo> getRowIndexEntrySerializer()
{
return (RowIndexEntry.IndexSerializer<IndexInfo>) rowIndexEntrySerializer;
}
private void maybeLogLargePartitionWarning(DecoratedKey key, long rowSize)
{
if (rowSize > DatabaseDescriptor.getCompactionLargePartitionWarningThreshold())
{
String keyString = metadata().partitionKeyType.getString(key.getKey());
logger.warn("Writing large partition {}/{}:{} ({}) to sstable {}", metadata.keyspace, metadata.name, keyString, FBUtilities.prettyPrintMemory(rowSize), getFilename());
}
}
private void maybeLogManyTombstonesWarning(DecoratedKey key, int tombstoneCount)
{
if (tombstoneCount > DatabaseDescriptor.getCompactionTombstoneWarningThreshold())
{
String keyString = metadata().partitionKeyType.getString(key.getKey());
logger.warn("Writing {} tombstones to {}/{}:{} in sstable {}", tombstoneCount, metadata.keyspace, metadata.name, keyString, getFilename());
}
}
private static class StatsCollector extends Transformation
{
private final MetadataCollector collector;
private int cellCount;
StatsCollector(MetadataCollector collector)
{
this.collector = collector;
}
@Override
public Row applyToStatic(Row row)
{
if (!row.isEmpty())
cellCount += Rows.collectStats(row, collector);
return row;
}
@Override
public Row applyToRow(Row row)
{
collector.updateClusteringValues(row.clustering());
cellCount += Rows.collectStats(row, collector);
return row;
}
@Override
public RangeTombstoneMarker applyToMarker(RangeTombstoneMarker marker)
{
collector.updateClusteringValues(marker.clustering());
if (marker.isBoundary())
{
RangeTombstoneBoundaryMarker bm = (RangeTombstoneBoundaryMarker)marker;
collector.update(bm.endDeletionTime());
collector.update(bm.startDeletionTime());
}
else
{
collector.update(((RangeTombstoneBoundMarker)marker).deletionTime());
}
return marker;
}
@Override
public void onPartitionClose()
{
collector.addCellPerPartitionCount(cellCount);
}
@Override
public DeletionTime applyToDeletion(DeletionTime deletionTime)
{
collector.update(deletionTime);
return deletionTime;
}
}
@SuppressWarnings("resource")
public SSTableReader openEarly()
{
// find the max (exclusive) readable key
IndexSummaryBuilder.ReadableBoundary boundary = iwriter.getMaxReadable();
if (boundary == null)
return null;
StatsMetadata stats = statsMetadata();
assert boundary.indexLength > 0 && boundary.dataLength > 0;
// open the reader early
IndexSummary indexSummary = iwriter.summary.build(metadata().partitioner, boundary);
long indexFileLength = new File(descriptor.filenameFor(Component.PRIMARY_INDEX)).length();
int indexBufferSize = optimizationStrategy.bufferSize(indexFileLength / indexSummary.size());
FileHandle ifile = iwriter.builder.bufferSize(indexBufferSize).complete(boundary.indexLength);
if (compression)
dbuilder.withCompressionMetadata(((CompressedSequentialWriter) dataFile).open(boundary.dataLength));
int dataBufferSize = optimizationStrategy.bufferSize(stats.estimatedPartitionSize.percentile(DatabaseDescriptor.getDiskOptimizationEstimatePercentile()));
FileHandle dfile = dbuilder.bufferSize(dataBufferSize).complete(boundary.dataLength);
invalidateCacheAtBoundary(dfile);
SSTableReader sstable = SSTableReader.internalOpen(descriptor,
components, metadata,
ifile, dfile,
indexSummary,
iwriter.bf.sharedCopy(),
maxDataAge,
stats,
SSTableReader.OpenReason.EARLY,
header);
// now it's open, find the ACTUAL last readable key (i.e. for which the data file has also been flushed)
sstable.first = getMinimalKey(first);
sstable.last = getMinimalKey(boundary.lastKey);
return sstable;
}
void invalidateCacheAtBoundary(FileHandle dfile)
{
chunkCache.ifPresent(cache -> {
if (lastEarlyOpenLength != 0 && dfile.dataLength() > lastEarlyOpenLength)
cache.invalidatePosition(dfile, lastEarlyOpenLength);
});
lastEarlyOpenLength = dfile.dataLength();
}
public SSTableReader openFinalEarly()
{
// we must ensure the data is completely flushed to disk
dataFile.sync();
iwriter.indexFile.sync();
return openFinal(SSTableReader.OpenReason.EARLY);
}
@SuppressWarnings("resource")
private SSTableReader openFinal(SSTableReader.OpenReason openReason)
{
if (maxDataAge < 0)
maxDataAge = currentTimeMillis();
StatsMetadata stats = statsMetadata();
// finalize in-memory state for the reader
IndexSummary indexSummary = iwriter.summary.build(metadata().partitioner);
long indexFileLength = new File(descriptor.filenameFor(Component.PRIMARY_INDEX)).length();
int dataBufferSize = optimizationStrategy.bufferSize(stats.estimatedPartitionSize.percentile(DatabaseDescriptor.getDiskOptimizationEstimatePercentile()));
int indexBufferSize = optimizationStrategy.bufferSize(indexFileLength / indexSummary.size());
FileHandle ifile = iwriter.builder.bufferSize(indexBufferSize).complete();
if (compression)
dbuilder.withCompressionMetadata(((CompressedSequentialWriter) dataFile).open(0));
FileHandle dfile = dbuilder.bufferSize(dataBufferSize).complete();
invalidateCacheAtBoundary(dfile);
SSTableReader sstable = SSTableReader.internalOpen(descriptor,
components,
metadata,
ifile,
dfile,
indexSummary,
iwriter.bf.sharedCopy(),
maxDataAge,
stats,
openReason,
header);
sstable.first = getMinimalKey(first);
sstable.last = getMinimalKey(last);
return sstable;
}
protected SSTableWriter.TransactionalProxy txnProxy()
{
return new TransactionalProxy();
}
class TransactionalProxy extends SSTableWriter.TransactionalProxy
{
// finalise our state on disk, including renaming
protected void doPrepare()
{
iwriter.prepareToCommit();
// write sstable statistics
dataFile.prepareToCommit();
writeMetadata(descriptor, finalizeMetadata());
// save the table of components
SSTable.appendTOC(descriptor, components);
if (openResult)
finalReader = openFinal(SSTableReader.OpenReason.NORMAL);
}
protected Throwable doCommit(Throwable accumulate)
{
accumulate = dataFile.commit(accumulate);
accumulate = iwriter.commit(accumulate);
return accumulate;
}
@Override
protected Throwable doPostCleanup(Throwable accumulate)
{
accumulate = dbuilder.close(accumulate);
return accumulate;
}
protected Throwable doAbort(Throwable accumulate)
{
accumulate = iwriter.abort(accumulate);
accumulate = dataFile.abort(accumulate);
return accumulate;
}
}
private void writeMetadata(Descriptor desc, Map<MetadataType, MetadataComponent> components)
{
File file = new File(desc.filenameFor(Component.STATS));
try (SequentialWriter out = new SequentialWriter(file, writerOption))
{
desc.getMetadataSerializer().serialize(components, out, desc.version);
out.finish();
}
catch (IOException e)
{
throw new FSWriteError(e, file.path());
}
}
public long getFilePointer()
{
return dataFile.position();
}
public long getOnDiskFilePointer()
{
return dataFile.getOnDiskFilePointer();
}
public long getEstimatedOnDiskBytesWritten()
{
return dataFile.getEstimatedOnDiskBytesWritten();
}
/**
* Encapsulates writing the index and filter for an SSTable. The state of this object is not valid until it has been closed.
*/
class IndexWriter extends AbstractTransactional implements Transactional
{
private final SequentialWriter indexFile;
public final FileHandle.Builder builder;
public final IndexSummaryBuilder summary;
public final IFilter bf;
private DataPosition mark;
IndexWriter(long keyCount)
{
indexFile = new SequentialWriter(new File(descriptor.filenameFor(Component.PRIMARY_INDEX)), writerOption);
builder = new FileHandle.Builder(descriptor.filenameFor(Component.PRIMARY_INDEX)).mmapped(DatabaseDescriptor.getIndexAccessMode() == Config.DiskAccessMode.mmap);
chunkCache.ifPresent(builder::withChunkCache);
summary = new IndexSummaryBuilder(keyCount, metadata().params.minIndexInterval, Downsampling.BASE_SAMPLING_LEVEL);
bf = FilterFactory.getFilter(keyCount, metadata().params.bloomFilterFpChance);
// register listeners to be alerted when the data files are flushed
indexFile.setPostFlushListener(() -> summary.markIndexSynced(indexFile.getLastFlushOffset()));
dataFile.setPostFlushListener(() -> summary.markDataSynced(dataFile.getLastFlushOffset()));
}
// finds the last (-offset) decorated key that can be guaranteed to occur fully in the flushed portion of the index file
IndexSummaryBuilder.ReadableBoundary getMaxReadable()
{
return summary.getLastReadableBoundary();
}
public void append(DecoratedKey key, RowIndexEntry indexEntry, long dataEnd, ByteBuffer indexInfo) throws IOException
{
bf.add(key);
long indexStart = indexFile.position();
try
{
ByteBufferUtil.writeWithShortLength(key.getKey(), indexFile);
rowIndexEntrySerializer.serialize(indexEntry, indexFile, indexInfo);
}
catch (IOException e)
{
throw new FSWriteError(e, indexFile.getPath());
}
long indexEnd = indexFile.position();
if (logger.isTraceEnabled())
logger.trace("wrote index entry: {} at {}", indexEntry, indexStart);
summary.maybeAddEntry(key, indexStart, indexEnd, dataEnd);
}
/**
* Closes the index and bloomfilter, making the public state of this writer valid for consumption.
*/
void flushBf()
{
if (components.contains(Component.FILTER))
{
String path = descriptor.filenameFor(Component.FILTER);
try (FileOutputStreamPlus stream = new FileOutputStreamPlus(path))
{
// bloom filter
BloomFilterSerializer.serialize((BloomFilter) bf, stream);
stream.flush();
stream.sync();
}
catch (IOException e)
{
throw new FSWriteError(e, path);
}
}
}
public void mark()
{
mark = indexFile.mark();
}
public void resetAndTruncate()
{
// we can't un-set the bloom filter addition, but extra keys in there are harmless.
// we can't reset dbuilder either, but that is the last thing called in afterappend so
// we assume that if that worked then we won't be trying to reset.
indexFile.resetAndTruncate(mark);
}
protected void doPrepare()
{
flushBf();
// truncate index file
long position = indexFile.position();
indexFile.prepareToCommit();
FileUtils.truncate(indexFile.getPath(), position);
// save summary
summary.prepareToCommit();
try (IndexSummary indexSummary = summary.build(getPartitioner()))
{
SSTableReader.saveSummary(descriptor, first, last, indexSummary);
}
}
protected Throwable doCommit(Throwable accumulate)
{
return indexFile.commit(accumulate);
}
protected Throwable doAbort(Throwable accumulate)
{
return indexFile.abort(accumulate);
}
@Override
protected Throwable doPostCleanup(Throwable accumulate)
{
accumulate = summary.close(accumulate);
accumulate = bf.close(accumulate);
accumulate = builder.close(accumulate);
return accumulate;
}
}
}
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.action.search;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.action.search.AbstractSearchAsyncAction;
import org.elasticsearch.action.search.InitialSearchPhase;
import org.elasticsearch.action.search.SearchActionListener;
import org.elasticsearch.action.search.SearchPhase;
import org.elasticsearch.action.search.SearchPhaseContext;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchShardIterator;
import org.elasticsearch.action.search.SearchTransportService;
import org.elasticsearch.action.search.TransportSearchAction;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.routing.RecoverySource;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.SearchPhaseResult;
import org.elasticsearch.search.AliasFilter;
import org.elasticsearch.testframework.ESTestCase;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestOptions;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
public class SearchAsyncActionTests extends ESTestCase {
public void testSkipSearchShards() throws InterruptedException {
SearchRequest request = new SearchRequest();
request.allowPartialSearchResults(true);
CountDownLatch latch = new CountDownLatch(1);
AtomicReference<TestSearchResponse> response = new AtomicReference<>();
ActionListener<SearchResponse> responseListener = new ActionListener<SearchResponse>() {
@Override
public void onResponse(SearchResponse searchResponse) {
response.set((TestSearchResponse) searchResponse);
}
@Override
public void onFailure(Exception e) {
logger.warn("test failed", e);
fail(e.getMessage());
}
};
DiscoveryNode primaryNode = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT);
DiscoveryNode replicaNode = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT);
AtomicInteger contextIdGenerator = new AtomicInteger(0);
GroupShardsIterator<SearchShardIterator> shardsIter = getShardsIter("idx",
new OriginalIndices(new String[]{"idx"}, IndicesOptions.strictExpandOpenAndForbidClosed()),
10, randomBoolean(), primaryNode, replicaNode);
int numSkipped = 0;
for (SearchShardIterator iter : shardsIter) {
if (iter.shardId().id() % 2 == 0) {
iter.resetAndSkip();
numSkipped++;
}
}
SearchTransportService transportService = new SearchTransportService(Settings.EMPTY, null, null);
Map<String, Transport.Connection> lookup = new HashMap<>();
Map<ShardId, Boolean> seenShard = new ConcurrentHashMap<>();
lookup.put(primaryNode.getId(), new MockConnection(primaryNode));
lookup.put(replicaNode.getId(), new MockConnection(replicaNode));
Map<String, AliasFilter> aliasFilters = Collections.singletonMap("_na_", new AliasFilter(null, Strings.EMPTY_ARRAY));
AtomicInteger numRequests = new AtomicInteger(0);
AbstractSearchAsyncAction asyncAction =
new AbstractSearchAsyncAction<TestSearchPhaseResult>(
"test",
logger,
transportService,
(cluster, node) -> {
assert cluster == null : "cluster was not null: " + cluster;
return lookup.get(node); },
aliasFilters,
Collections.emptyMap(),
null,
request,
responseListener,
shardsIter,
new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0),
0,
null,
new InitialSearchPhase.ArraySearchPhaseResults<>(shardsIter.size()),
request.getMaxConcurrentShardRequests(),
SearchResponse.Clusters.EMPTY) {
@Override
protected void executePhaseOnShard(SearchShardIterator shardIt, ShardRouting shard,
SearchActionListener<TestSearchPhaseResult> listener) {
seenShard.computeIfAbsent(shard.shardId(), (i) -> {
numRequests.incrementAndGet(); // only count this once per replica
return Boolean.TRUE;
});
new Thread(() -> {
Transport.Connection connection = getConnection(null, shard.currentNodeId());
TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult(contextIdGenerator.incrementAndGet(),
connection.getNode());
listener.onResponse(testSearchPhaseResult);
}).start();
}
@Override
protected SearchPhase getNextPhase(SearchPhaseResults<TestSearchPhaseResult> results, SearchPhaseContext context) {
return new SearchPhase("test") {
@Override
public void run() throws IOException {
latch.countDown();
}
};
}
};
asyncAction.start();
latch.await();
SearchResponse searchResponse = asyncAction.buildSearchResponse(null, null);
assertEquals(shardsIter.size()-numSkipped, numRequests.get());
assertEquals(0, searchResponse.getFailedShards());
assertEquals(numSkipped, searchResponse.getSkippedShards());
assertEquals(shardsIter.size(), searchResponse.getSuccessfulShards());
}
public void testLimitConcurrentShardRequests() throws InterruptedException {
SearchRequest request = new SearchRequest();
request.allowPartialSearchResults(true);
int numConcurrent = randomIntBetween(1, 5);
request.setMaxConcurrentShardRequests(numConcurrent);
CountDownLatch latch = new CountDownLatch(1);
AtomicReference<TestSearchResponse> response = new AtomicReference<>();
ActionListener<SearchResponse> responseListener = new ActionListener<SearchResponse>() {
@Override
public void onResponse(SearchResponse searchResponse) {
response.set((TestSearchResponse) searchResponse);
}
@Override
public void onFailure(Exception e) {
logger.warn("test failed", e);
fail(e.getMessage());
}
};
DiscoveryNode primaryNode = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT);
DiscoveryNode replicaNode = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT);
AtomicInteger contextIdGenerator = new AtomicInteger(0);
GroupShardsIterator<SearchShardIterator> shardsIter = getShardsIter("idx",
new OriginalIndices(new String[]{"idx"}, IndicesOptions.strictExpandOpenAndForbidClosed()),
10, randomBoolean(), primaryNode, replicaNode);
SearchTransportService transportService = new SearchTransportService(Settings.EMPTY, null, null);
Map<String, Transport.Connection> lookup = new HashMap<>();
Map<ShardId, Boolean> seenShard = new ConcurrentHashMap<>();
lookup.put(primaryNode.getId(), new MockConnection(primaryNode));
lookup.put(replicaNode.getId(), new MockConnection(replicaNode));
Map<String, AliasFilter> aliasFilters = Collections.singletonMap("_na_", new AliasFilter(null, Strings.EMPTY_ARRAY));
CountDownLatch awaitInitialRequests = new CountDownLatch(1);
AtomicInteger numRequests = new AtomicInteger(0);
AtomicInteger numResponses = new AtomicInteger(0);
AbstractSearchAsyncAction asyncAction =
new AbstractSearchAsyncAction<TestSearchPhaseResult>(
"test",
logger,
transportService,
(cluster, node) -> {
assert cluster == null : "cluster was not null: " + cluster;
return lookup.get(node); },
aliasFilters,
Collections.emptyMap(),
null,
request,
responseListener,
shardsIter,
new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0),
0,
null,
new InitialSearchPhase.ArraySearchPhaseResults<>(shardsIter.size()),
request.getMaxConcurrentShardRequests(),
SearchResponse.Clusters.EMPTY) {
@Override
protected void executePhaseOnShard(SearchShardIterator shardIt, ShardRouting shard,
SearchActionListener<TestSearchPhaseResult> listener) {
seenShard.computeIfAbsent(shard.shardId(), (i) -> {
numRequests.incrementAndGet(); // only count this once per replica
return Boolean.TRUE;
});
new Thread(() -> {
try {
awaitInitialRequests.await();
} catch (InterruptedException e) {
throw new AssertionError(e);
}
Transport.Connection connection = getConnection(null, shard.currentNodeId());
TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult(contextIdGenerator.incrementAndGet(),
connection.getNode());
if (numResponses.getAndIncrement() > 0 && randomBoolean()) { // at least one response otherwise the entire
// request fails
listener.onFailure(new RuntimeException());
} else {
listener.onResponse(testSearchPhaseResult);
}
}).start();
}
@Override
protected SearchPhase getNextPhase(SearchPhaseResults<TestSearchPhaseResult> results, SearchPhaseContext context) {
return new SearchPhase("test") {
@Override
public void run() throws IOException {
latch.countDown();
}
};
}
};
asyncAction.start();
assertEquals(numConcurrent, numRequests.get());
awaitInitialRequests.countDown();
latch.await();
assertEquals(10, numRequests.get());
}
public void testFanOutAndCollect() throws InterruptedException {
SearchRequest request = new SearchRequest();
request.allowPartialSearchResults(true);
request.setMaxConcurrentShardRequests(randomIntBetween(1, 100));
CountDownLatch latch = new CountDownLatch(1);
AtomicReference<TestSearchResponse> response = new AtomicReference<>();
ActionListener<SearchResponse> responseListener = new ActionListener<SearchResponse>() {
@Override
public void onResponse(SearchResponse searchResponse) {
response.set((TestSearchResponse) searchResponse);
}
@Override
public void onFailure(Exception e) {
logger.warn("test failed", e);
fail(e.getMessage());
}
};
DiscoveryNode primaryNode = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT);
DiscoveryNode replicaNode = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT);
Map<DiscoveryNode, Set<Long>> nodeToContextMap = new HashMap<>();
AtomicInteger contextIdGenerator = new AtomicInteger(0);
GroupShardsIterator<SearchShardIterator> shardsIter = getShardsIter("idx",
new OriginalIndices(new String[]{"idx"}, IndicesOptions.strictExpandOpenAndForbidClosed()),
randomIntBetween(1, 10), randomBoolean(), primaryNode, replicaNode);
AtomicInteger numFreedContext = new AtomicInteger();
SearchTransportService transportService = new SearchTransportService(Settings.EMPTY, null, null) {
@Override
public void sendFreeContext(Transport.Connection connection, long contextId, OriginalIndices originalIndices) {
numFreedContext.incrementAndGet();
assertTrue(nodeToContextMap.containsKey(connection.getNode()));
assertTrue(nodeToContextMap.get(connection.getNode()).remove(contextId));
}
};
Map<String, Transport.Connection> lookup = new HashMap<>();
lookup.put(primaryNode.getId(), new MockConnection(primaryNode));
lookup.put(replicaNode.getId(), new MockConnection(replicaNode));
Map<String, AliasFilter> aliasFilters = Collections.singletonMap("_na_", new AliasFilter(null, Strings.EMPTY_ARRAY));
final ExecutorService executor = Executors.newFixedThreadPool(randomIntBetween(1, Runtime.getRuntime().availableProcessors()));
AbstractSearchAsyncAction asyncAction =
new AbstractSearchAsyncAction<TestSearchPhaseResult>(
"test",
logger,
transportService,
(cluster, node) -> {
assert cluster == null : "cluster was not null: " + cluster;
return lookup.get(node); },
aliasFilters,
Collections.emptyMap(),
executor,
request,
responseListener,
shardsIter,
new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0),
0,
null,
new InitialSearchPhase.ArraySearchPhaseResults<>(shardsIter.size()),
request.getMaxConcurrentShardRequests(),
SearchResponse.Clusters.EMPTY) {
TestSearchResponse response = new TestSearchResponse();
@Override
protected void executePhaseOnShard(SearchShardIterator shardIt, ShardRouting shard, SearchActionListener<TestSearchPhaseResult>
listener) {
assertTrue("shard: " + shard.shardId() + " has been queried twice", response.queried.add(shard.shardId()));
Transport.Connection connection = getConnection(null, shard.currentNodeId());
TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult(contextIdGenerator.incrementAndGet(),
connection.getNode());
Set<Long> ids = nodeToContextMap.computeIfAbsent(connection.getNode(), (n) -> new HashSet<>());
ids.add(testSearchPhaseResult.getRequestId());
if (randomBoolean()) {
listener.onResponse(testSearchPhaseResult);
} else {
new Thread(() -> listener.onResponse(testSearchPhaseResult)).start();
}
}
@Override
protected SearchPhase getNextPhase(SearchPhaseResults<TestSearchPhaseResult> results, SearchPhaseContext context) {
return new SearchPhase("test") {
@Override
public void run() throws IOException {
for (int i = 0; i < results.getNumShards(); i++) {
TestSearchPhaseResult result = results.getAtomicArray().get(i);
assertEquals(result.node.getId(), result.getSearchShardTarget().getNodeId());
sendReleaseSearchContext(result.getRequestId(), new MockConnection(result.node), OriginalIndices.NONE);
}
responseListener.onResponse(response);
latch.countDown();
}
};
}
};
asyncAction.start();
latch.await();
assertNotNull(response.get());
assertFalse(nodeToContextMap.isEmpty());
assertTrue(nodeToContextMap.toString(), nodeToContextMap.containsKey(primaryNode) || nodeToContextMap.containsKey(replicaNode));
assertEquals(shardsIter.size(), numFreedContext.get());
if (nodeToContextMap.containsKey(primaryNode)) {
assertTrue(nodeToContextMap.get(primaryNode).toString(), nodeToContextMap.get(primaryNode).isEmpty());
} else {
assertTrue(nodeToContextMap.get(replicaNode).toString(), nodeToContextMap.get(replicaNode).isEmpty());
}
executor.shutdown();
}
static GroupShardsIterator<SearchShardIterator> getShardsIter(String index, OriginalIndices originalIndices, int numShards,
boolean doReplicas, DiscoveryNode primaryNode, DiscoveryNode replicaNode) {
ArrayList<SearchShardIterator> list = new ArrayList<>();
for (int i = 0; i < numShards; i++) {
ArrayList<ShardRouting> started = new ArrayList<>();
ArrayList<ShardRouting> initializing = new ArrayList<>();
ArrayList<ShardRouting> unassigned = new ArrayList<>();
ShardRouting routing = ShardRouting.newUnassigned(new ShardId(new Index(index, "_na_"), i), true,
RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foobar"));
routing = routing.initialize(primaryNode.getId(), i + "p", 0);
routing.started();
started.add(routing);
if (doReplicas) {
routing = ShardRouting.newUnassigned(new ShardId(new Index(index, "_na_"), i), false,
RecoverySource.PeerRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foobar"));
if (replicaNode != null) {
routing = routing.initialize(replicaNode.getId(), i + "r", 0);
if (randomBoolean()) {
routing.started();
started.add(routing);
} else {
initializing.add(routing);
}
} else {
unassigned.add(routing); // unused yet
}
}
Collections.shuffle(started, random());
started.addAll(initializing);
list.add(new SearchShardIterator(null, new ShardId(new Index(index, "_na_"), i), started, originalIndices));
}
return new GroupShardsIterator<>(list);
}
public static class TestSearchResponse extends SearchResponse {
public final Set<ShardId> queried = new HashSet<>();
}
public static class TestSearchPhaseResult extends SearchPhaseResult {
final DiscoveryNode node;
public TestSearchPhaseResult(long id, DiscoveryNode node) {
this.requestId = id;
this.node = node;
}
@Override
public void readFrom(StreamInput in) throws IOException {
}
@Override
public void writeTo(StreamOutput out) throws IOException {
}
}
public static final class MockConnection implements Transport.Connection {
private final DiscoveryNode node;
public MockConnection(DiscoveryNode node) {
this.node = node;
}
@Override
public DiscoveryNode getNode() {
return node;
}
@Override
public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options)
throws IOException, TransportException {
throw new UnsupportedOperationException();
}
@Override
public void close() throws IOException {
throw new UnsupportedOperationException();
}
}
}
|
package com.jfspecial.modules.admin.friendlylink;
import com.jfspecial.component.base.BaseProjectModel;
import com.jfspecial.jfinal.component.annotation.ModelBind;
@ModelBind(table = "tb_friendlylink")
public class TbFriendlylink extends BaseProjectModel<TbFriendlylink> {
private static final long serialVersionUID = 1L;
public static final TbFriendlylink dao = new TbFriendlylink();
// columns START
private String ID = "id"; // 主键
private String NAME = "name"; // 名称/11111/
private String URL = "url"; // URL
private String SORT = "sort"; // 排序号
private String STATE = "state"; // 是否显示//radio/1,显示,2,不显示
private String TYPE = "type"; // 类型//select/1,见数据字典
private String REMARK = "remark"; // 备注//textarea
private String CREATE_TIME = "create_time"; // 创建时间
private String CREATE_ID = "create_id"; // 创建者
public TbFriendlylink setId(Integer value) {
set(ID, value);
return this;
}
public Integer getId() {
return get(ID);
}
public TbFriendlylink setName(String value) {
set(NAME, value);
return this;
}
public String getName() {
return get(NAME);
}
public TbFriendlylink setUrl(String value) {
set(URL, value);
return this;
}
public String getUrl() {
return get(URL);
}
public TbFriendlylink setSort(Integer value) {
set(SORT, value);
return this;
}
public Integer getSort() {
return get(SORT);
}
public TbFriendlylink setState(Integer value) {
set(STATE, value);
return this;
}
public Integer getState() {
return get(STATE);
}
public TbFriendlylink setType(Integer value) {
set(TYPE, value);
return this;
}
public Integer getType() {
return get(TYPE);
}
public TbFriendlylink setRemark(String value) {
set(REMARK, value);
return this;
}
public String getRemark() {
return get(REMARK);
}
public TbFriendlylink setCreateTime(String value) {
set(CREATE_TIME, value);
return this;
}
public String getCreateTime() {
return get(CREATE_TIME);
}
public TbFriendlylink setCreateId(Integer value) {
set(CREATE_ID, value);
return this;
}
public Integer getCreateId() {
return get(CREATE_ID);
}
}
|
package se.jiderhamn.classloader.leak.prevention.cleanup;
import java.lang.reflect.Field;
import java.util.*;
import java.util.logging.Level;
import se.jiderhamn.classloader.leak.prevention.ClassLoaderLeakPreventor;
import se.jiderhamn.classloader.leak.prevention.ClassLoaderPreMortemCleanUp;
/**
* Cleanup for removing custom {@link java.util.logging.Level}s loaded within the protected class loader.
* @author Mattias Jiderhamn
*/
public class JavaUtilLoggingLevelCleanUp implements ClassLoaderPreMortemCleanUp {
@Override
public void cleanUp(ClassLoaderLeakPreventor preventor) {
final Class<?> knownLevelClass = preventor.findClass("java.util.logging.Level$KnownLevel");
if(knownLevelClass != null) {
final Field levelObjectField = preventor.findField(knownLevelClass, "levelObject");
if(levelObjectField != null) {
//noinspection SynchronizationOnLocalVariableOrMethodParameter
synchronized (knownLevelClass) {
final Map<?, List/*<KnownLevel>*/> nameToLevels = preventor.getStaticFieldValue(knownLevelClass, "nameToLevels");
final Map<?, List/*<KnownLevel>*/> intToLevels = preventor.getStaticFieldValue(knownLevelClass, "intToLevels");
if(nameToLevels != null) {
final Set/*<KnownLevel>*/ removed = process(preventor, knownLevelClass, levelObjectField, nameToLevels);
if(intToLevels != null) {
for(List/*<KnownLevel>*/ knownLevels : intToLevels.values()) {
knownLevels.removeAll(removed);
}
}
}
else if(intToLevels != null) { // Use intToLevels as fallback; both should contain same values
process(preventor, knownLevelClass, levelObjectField, intToLevels);
}
}
}
else
preventor.warn("Found " + knownLevelClass + " but not levelObject field");
}
}
private Set/*<KnownLevel>*/ process(ClassLoaderLeakPreventor preventor, Class<?> knownLevelClass,
Field levelObjectField, Map<?, List/*<KnownLevel>*/> levelsMaps) {
final Set/*<KnownLevel>*/ output = new HashSet<Object>();
for(List/*<KnownLevel>*/ knownLevels : levelsMaps.values()) {
for(Iterator/*<KnownLevel>*/ iter = knownLevels.listIterator(); iter.hasNext(); ) {
final Object /* KnownLevel */ knownLevel = iter.next();
final Level levelObject = preventor.getFieldValue(levelObjectField, knownLevel);
if(preventor.isLoadedInClassLoader(levelObject)) {
preventor.warn(Level.class.getName() + " subclass loaded by protected ClassLoader: " +
levelObject.getClass() + "; removing from " + knownLevelClass);
iter.remove();
output.add(knownLevel);
}
}
}
return output;
}
}
|
package listClasses;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Iterator;
public class BasicLinkedList<T> implements Iterable<T> {
protected Node head;
protected Node tail;
protected int size;
public BasicLinkedList() {
size = 0;
}
protected class Node {
T data;
Node next;
Node(T d) {
data = d;
next = null;
}
}
public int getSize() {
int privacyLeakBlock = this.size;
return privacyLeakBlock;
}
public T getFirst() {
if (size == 0) {
return null;
}
return this.head.data;
}
public T getLast() {
if (size == 0) {
return null;
}
return this.tail.data;
}
public BasicLinkedList<T> addToFront(T data) {
Node n = new Node(data);
n.next = this.head;
this.head = n;
this.size++;
if (this.size == 1) {
this.tail = this.head;
}
return this;
}
public BasicLinkedList<T> addToEnd(T data) {
Node n = new Node(data);
if (this.size == 0) {
this.tail = n;
} else {
this.tail.next = n;
this.tail = n;
}
this.size++;
if (this.size == 1) {
this.head = this.tail;
}
return this;
}
public T retrieveFirstElement() {
if (size == 0) {
return null;
}
T answer = this.head.data;
if (this.head.next != null) {
this.head = this.head.next;
} else {
this.head = null;
this.tail = this.head;
}
this.size--;
return answer;
}
public T retrieveLastElement() {
if (size == 0) {
return null;
}
T answer = this.tail.data;
if (this.size == 1) {
this.tail = null;
this.head = this.tail;
} else {
Node current = this.head;
while (current.next.next != null) {
current = current.next;
}
this.tail = current;
}
this.size--;
return answer;
}
public Iterator<T> iterator() {
return new Iterator<T>() {
Node current = head;
@Override
public boolean hasNext() {
if (current == null) {
return false;
}
return true;
}
@Override
public T next() {
if (hasNext()) {
T data = current.data;
current = current.next;
return data;
}
return null;
}
@Override
public void remove() {
throw new UnsupportedOperationException("Remove not implemented.");
}
};
}
public BasicLinkedList<T> remove(T targetData, Comparator<T> comparator) {
int removed = 0;
while (comparator.compare(this.head.data, targetData) == 0) {
this.head = this.head.next;
removed++;
if (this.size - removed == 0) {
this.size -= removed;
return this;
}
}
Node behind = this.head;
Node ahead = behind.next;
while (ahead != null) {
if (comparator.compare(ahead.data, targetData) == 0) {
ahead = ahead.next;
removed++;
} else {
behind.next = ahead;
ahead = ahead.next;
behind = behind.next;
}
}
behind.next = null;
this.size -= removed;
Node current = this.head;
while (current.next != null) {
current = current.next;
}
this.tail = current;
return this;
}
public ArrayList<T> getReverseArrayList() {
ArrayList<T> reversedArrayList = new ArrayList<T>();
BasicLinkedList<T> reversed = getReverseList();
Node current = reversed.head;
while (current != null) {
reversedArrayList.add(current.data);
current = current.next;
}
return reversedArrayList;
}
public BasicLinkedList<T> getReverseList() {
BasicLinkedList<T> reversed = new BasicLinkedList<T>();
reversed.head = recursiveReverser(cloneList(this).head);
Node current = reversed.head;
while (current.next != null) {
current = current.next;
}
reversed.tail = current;
reversed.size = this.size;
return reversed;
}
private Node recursiveReverser(Node current) {
if (current == null || current.next == null) {
return current;
}
Node rev = recursiveReverser(current.next);
current.next.next = current;
current.next = null;
return rev;
}
private BasicLinkedList<T> cloneList(BasicLinkedList<T> original) {
if (this.size == 0) {
return null;
}
BasicLinkedList<T> clone = new BasicLinkedList<T>();
clone.head = new Node(this.head.data);
if (this.size == 1) {
return clone;
}
Node currentC = clone.head;
Node currentO = this.head;
while (currentO != null) {
if (currentO != this.head) {
Node add = new Node(currentO.data);
currentC.next = add;
currentC = currentC.next;
}
currentO = currentO.next;
}
clone.size = this.size;
return clone;
}
}
|
/*
* Copyright 2014-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.codecentric.boot.admin.server.config;
import java.nio.charset.StandardCharsets;
import java.util.List;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.client.HttpClientBuilder;
import org.reactivestreams.Publisher;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.boot.autoconfigure.AutoConfigureBefore;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.condition.ConditionalOnSingleCandidate;
import org.springframework.boot.autoconfigure.condition.NoneNestedConditions;
import org.springframework.boot.autoconfigure.mail.MailSenderAutoConfiguration;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Conditional;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
import org.springframework.context.annotation.Primary;
import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
import org.springframework.mail.MailSender;
import org.springframework.mail.javamail.JavaMailSender;
import org.springframework.web.client.RestTemplate;
import org.thymeleaf.TemplateEngine;
import org.thymeleaf.spring5.SpringTemplateEngine;
import org.thymeleaf.spring5.templateresolver.SpringResourceTemplateResolver;
import org.thymeleaf.templatemode.TemplateMode;
import de.codecentric.boot.admin.server.domain.entities.InstanceRepository;
import de.codecentric.boot.admin.server.domain.events.InstanceEvent;
import de.codecentric.boot.admin.server.notify.CompositeNotifier;
import de.codecentric.boot.admin.server.notify.DiscordNotifier;
import de.codecentric.boot.admin.server.notify.HipchatNotifier;
import de.codecentric.boot.admin.server.notify.LetsChatNotifier;
import de.codecentric.boot.admin.server.notify.MailNotifier;
import de.codecentric.boot.admin.server.notify.MicrosoftTeamsNotifier;
import de.codecentric.boot.admin.server.notify.NotificationTrigger;
import de.codecentric.boot.admin.server.notify.Notifier;
import de.codecentric.boot.admin.server.notify.NotifierProxyProperties;
import de.codecentric.boot.admin.server.notify.OpsGenieNotifier;
import de.codecentric.boot.admin.server.notify.PagerdutyNotifier;
import de.codecentric.boot.admin.server.notify.SlackNotifier;
import de.codecentric.boot.admin.server.notify.TelegramNotifier;
import de.codecentric.boot.admin.server.notify.filter.FilteringNotifier;
import de.codecentric.boot.admin.server.notify.filter.web.NotificationFilterController;
@Configuration(proxyBeanMethods = false)
@EnableConfigurationProperties(NotifierProxyProperties.class)
@AutoConfigureAfter({ MailSenderAutoConfiguration.class })
public class AdminServerNotifierAutoConfiguration {
private static RestTemplate createNotifierRestTemplate(NotifierProxyProperties proxyProperties) {
RestTemplate restTemplate = new RestTemplate();
if (proxyProperties.getHost() != null) {
HttpClientBuilder builder = HttpClientBuilder.create();
builder.setProxy(new HttpHost(proxyProperties.getHost(), proxyProperties.getPort()));
if (proxyProperties.getUsername() != null && proxyProperties.getPassword() != null) {
CredentialsProvider credsProvider = new BasicCredentialsProvider();
credsProvider.setCredentials(new AuthScope(proxyProperties.getHost(), proxyProperties.getPort()),
new UsernamePasswordCredentials(proxyProperties.getUsername(), proxyProperties.getPassword()));
builder.setDefaultCredentialsProvider(credsProvider);
}
restTemplate.setRequestFactory(new HttpComponentsClientHttpRequestFactory(builder.build()));
}
return restTemplate;
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnBean(Notifier.class)
@Lazy(false)
public static class NotifierTriggerConfiguration {
@Bean(initMethod = "start", destroyMethod = "stop")
@ConditionalOnMissingBean(NotificationTrigger.class)
public NotificationTrigger notificationTrigger(Notifier notifier, Publisher<InstanceEvent> events) {
return new NotificationTrigger(notifier, events);
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnBean(Notifier.class)
@AutoConfigureBefore({ NotifierTriggerConfiguration.class })
@Lazy(false)
public static class CompositeNotifierConfiguration {
@Bean
@Primary
@Conditional(NoSingleNotifierCandidateCondition.class)
public CompositeNotifier compositeNotifier(List<Notifier> notifiers) {
return new CompositeNotifier(notifiers);
}
static class NoSingleNotifierCandidateCondition extends NoneNestedConditions {
NoSingleNotifierCandidateCondition() {
super(ConfigurationPhase.REGISTER_BEAN);
}
@ConditionalOnSingleCandidate(Notifier.class)
static class HasSingleNotifierInstance {
}
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnSingleCandidate(FilteringNotifier.class)
@Lazy(false)
public static class FilteringNotifierWebConfiguration {
private final FilteringNotifier filteringNotifier;
public FilteringNotifierWebConfiguration(FilteringNotifier filteringNotifier) {
this.filteringNotifier = filteringNotifier;
}
@Bean
public NotificationFilterController notificationFilterController() {
return new NotificationFilterController(this.filteringNotifier);
}
}
@Configuration(proxyBeanMethods = false)
@AutoConfigureBefore({ NotifierTriggerConfiguration.class, CompositeNotifierConfiguration.class })
@ConditionalOnBean(MailSender.class)
@Lazy(false)
public static class MailNotifierConfiguration {
private final ApplicationContext applicationContext;
public MailNotifierConfiguration(ApplicationContext applicationContext) {
this.applicationContext = applicationContext;
}
@Bean
@ConditionalOnMissingBean
@ConfigurationProperties("spring.boot.admin.notify.mail")
public MailNotifier mailNotifier(JavaMailSender mailSender, InstanceRepository repository) {
return new MailNotifier(mailSender, repository, mailNotifierTemplateEngine());
}
@Bean
public TemplateEngine mailNotifierTemplateEngine() {
SpringResourceTemplateResolver resolver = new SpringResourceTemplateResolver();
resolver.setApplicationContext(this.applicationContext);
resolver.setTemplateMode(TemplateMode.HTML);
resolver.setCharacterEncoding(StandardCharsets.UTF_8.name());
SpringTemplateEngine templateEngine = new SpringTemplateEngine();
templateEngine.addTemplateResolver(resolver);
return templateEngine;
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnProperty(prefix = "spring.boot.admin.notify.hipchat", name = "url")
@AutoConfigureBefore({ NotifierTriggerConfiguration.class, CompositeNotifierConfiguration.class })
@Lazy(false)
public static class HipchatNotifierConfiguration {
@Bean
@ConditionalOnMissingBean
@ConfigurationProperties("spring.boot.admin.notify.hipchat")
public HipchatNotifier hipchatNotifier(InstanceRepository repository, NotifierProxyProperties proxyProperties) {
return new HipchatNotifier(repository, createNotifierRestTemplate(proxyProperties));
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnProperty(prefix = "spring.boot.admin.notify.slack", name = "webhook-url")
@AutoConfigureBefore({ NotifierTriggerConfiguration.class, CompositeNotifierConfiguration.class })
@Lazy(false)
public static class SlackNotifierConfiguration {
@Bean
@ConditionalOnMissingBean
@ConfigurationProperties("spring.boot.admin.notify.slack")
public SlackNotifier slackNotifier(InstanceRepository repository, NotifierProxyProperties proxyProperties) {
return new SlackNotifier(repository, createNotifierRestTemplate(proxyProperties));
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnProperty(prefix = "spring.boot.admin.notify.letschat", name = "url")
@AutoConfigureBefore({ NotifierTriggerConfiguration.class, CompositeNotifierConfiguration.class })
@Lazy(false)
public static class LetsChatNotifierConfiguration {
@Bean
@ConditionalOnMissingBean
@ConfigurationProperties("spring.boot.admin.notify.letschat")
public LetsChatNotifier letsChatNotifier(InstanceRepository repository,
NotifierProxyProperties proxyProperties) {
return new LetsChatNotifier(repository, createNotifierRestTemplate(proxyProperties));
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnProperty(prefix = "spring.boot.admin.notify.pagerduty", name = "service-key")
@AutoConfigureBefore({ NotifierTriggerConfiguration.class, CompositeNotifierConfiguration.class })
@Lazy(false)
public static class PagerdutyNotifierConfiguration {
@Bean
@ConditionalOnMissingBean
@ConfigurationProperties("spring.boot.admin.notify.pagerduty")
public PagerdutyNotifier pagerdutyNotifier(InstanceRepository repository,
NotifierProxyProperties proxyProperties) {
return new PagerdutyNotifier(repository, createNotifierRestTemplate(proxyProperties));
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnProperty(prefix = "spring.boot.admin.notify.opsgenie", name = "api-key")
@AutoConfigureBefore({ NotifierTriggerConfiguration.class, CompositeNotifierConfiguration.class })
@Lazy(false)
public static class OpsGenieNotifierConfiguration {
@Bean
@ConditionalOnMissingBean
@ConfigurationProperties("spring.boot.admin.notify.opsgenie")
public OpsGenieNotifier opsgenieNotifier(InstanceRepository repository,
NotifierProxyProperties proxyProperties) {
return new OpsGenieNotifier(repository, createNotifierRestTemplate(proxyProperties));
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnProperty(prefix = "spring.boot.admin.notify.ms-teams", name = "webhook-url")
@AutoConfigureBefore({ NotifierTriggerConfiguration.class, CompositeNotifierConfiguration.class })
@Lazy(false)
public static class MicrosoftTeamsNotifierConfiguration {
@Bean
@ConditionalOnMissingBean
@ConfigurationProperties("spring.boot.admin.notify.ms-teams")
public MicrosoftTeamsNotifier microsoftTeamsNotifier(InstanceRepository repository,
NotifierProxyProperties proxyProperties) {
return new MicrosoftTeamsNotifier(repository, createNotifierRestTemplate(proxyProperties));
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnProperty(prefix = "spring.boot.admin.notify.telegram", name = "auth-token")
@AutoConfigureBefore({ NotifierTriggerConfiguration.class, CompositeNotifierConfiguration.class })
@Lazy(false)
public static class TelegramNotifierConfiguration {
@Bean
@ConditionalOnMissingBean
@ConfigurationProperties("spring.boot.admin.notify.telegram")
public TelegramNotifier telegramNotifier(InstanceRepository repository,
NotifierProxyProperties proxyProperties) {
return new TelegramNotifier(repository, createNotifierRestTemplate(proxyProperties));
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnProperty(prefix = "spring.boot.admin.notify.discord", name = "webhook-url")
@AutoConfigureBefore({ NotifierTriggerConfiguration.class, CompositeNotifierConfiguration.class })
@Lazy(false)
public static class DiscordNotifierConfiguration {
@Bean
@ConditionalOnMissingBean
@ConfigurationProperties("spring.boot.admin.notify.discord")
public DiscordNotifier discordNotifier(InstanceRepository repository, NotifierProxyProperties proxyProperties) {
return new DiscordNotifier(repository, createNotifierRestTemplate(proxyProperties));
}
}
}
|
package com.annimon.stream.operator;
import com.annimon.stream.function.IntPredicate;
import com.annimon.stream.iterator.PrimitiveIterator;
public class IntFilter extends PrimitiveIterator.OfInt {
private final PrimitiveIterator.OfInt iterator;
private final IntPredicate predicate;
private int next;
public IntFilter(PrimitiveIterator.OfInt iterator, IntPredicate predicate) {
this.iterator = iterator;
this.predicate = predicate;
}
@Override
public boolean hasNext() {
while (iterator.hasNext()) {
next = iterator.nextInt();
if (predicate.test(next)) {
return true;
}
}
return false;
}
@Override
public int nextInt() {
return next;
}
}
|
package io.github.smallintro.springboot.employeeservice.exception;
public class RecordNotFoundException extends Exception{
private static final long serialVersionUID = 1L;
public RecordNotFoundException(String msg) {
super(msg);
}
}
|
package org.firstinspires.ftc.teamcode.teamcode.Subsystems.ForMainRobot;
import com.qualcomm.robotcore.hardware.Servo;
import system.config.ConfigParam;
import system.config.TeleopConfig;
import system.robot.Robot;
import system.robot.SubSystem;
import util.control.Button;
import util.control.CustomizableGamepad;
import util.control.Toggle;
public class PlopperSubsystem extends SubSystem {
CustomizableGamepad inputs;
public Servo armServo;
public Servo clawServo;
static final String ARMBUTTON = "ArmButton";
static final String CLAWBUTTON = "ClawButton";
Toggle armToggle;
Toggle clawToggle;
public PlopperSubsystem(Robot r, String armServo, String clawServo) {
super(r);
this.armServo = robot.hardwareMap.servo.get(armServo);
this.clawServo = robot.hardwareMap.servo.get(clawServo);
armToggle = new Toggle(Toggle.ToggleTypes.flipToggle, false);
clawToggle = new Toggle(Toggle.ToggleTypes.flipToggle, false);
usesConfig = true;
}
@Override
public void init() {
}
@Override
public void init_loop() {
}
@Override
public void start() {
inputs = robot.pullControls(this);
}
@Override
public void handle() {
armToggle.updateToggle(inputs.getInput("armToggleButton"));
clawToggle.updateToggle(inputs.getInput("armToggleButton"));
if(armToggle.getCurrentState()){
armServo.setPosition(1);
}
else {
armServo.setPosition(-1);
}
if(clawToggle.getCurrentState()){
clawServo.setPosition(1);
}
else {
clawServo.setPosition(-1);
}
}
@Override
public void stop() {
}
@TeleopConfig
public static ConfigParam[] teleopConfig() {
return new ConfigParam[]{
new ConfigParam(ARMBUTTON, Button.BooleanInputs.b, 2),
new ConfigParam(CLAWBUTTON, Button.BooleanInputs.x, 2)
};
}
}
|
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE190_Integer_Overflow__int_getParameter_Servlet_add_05.java
Label Definition File: CWE190_Integer_Overflow__int.label.xml
Template File: sources-sinks-05.tmpl.java
*/
/*
* @description
* CWE: 190 Integer Overflow
* BadSource: getParameter_Servlet Read data from a querystring using getParameter()
* GoodSource: A hardcoded non-zero, non-min, non-max, even number
* Sinks: add
* GoodSink: Ensure there will not be an overflow before adding 1 to data
* BadSink : Add 1 to data, which can cause an overflow
* Flow Variant: 05 Control flow: if(privateTrue) and if(privateFalse)
*
* */
import javax.servlet.http.*;
import java.util.logging.Level;
public class CWE190_Integer_Overflow__int_getParameter_Servlet_add_05 extends AbstractTestCaseServlet
{
/* The two variables below are not defined as "final", but are never
* assigned any other value, so a tool should be able to identify that
* reads of these will always return their initialized values.
*/
private boolean privateTrue = true;
private boolean privateFalse = false;
public void bad(HttpServletRequest request, HttpServletResponse response) throws Throwable
{
int data;
if (privateTrue)
{
data = Integer.MIN_VALUE; /* Initialize data */
/* POTENTIAL FLAW: Read data from a querystring using getParameter() */
{
String stringNumber = request.getParameter("name");
try
{
data = Integer.parseInt(stringNumber.trim());
}
catch(NumberFormatException exceptNumberFormat)
{
IO.logger.log(Level.WARNING, "Number format exception reading data from parameter 'name'", exceptNumberFormat);
}
}
}
else
{
/* INCIDENTAL: CWE 561 Dead Code, the code below will never run
* but ensure data is inititialized before the Sink to avoid compiler errors */
data = 0;
}
if (privateTrue)
{
/* POTENTIAL FLAW: if data == Integer.MAX_VALUE, this will overflow */
int result = (int)(data + 1);
IO.writeLine("result: " + result);
}
}
/* goodG2B1() - use goodsource and badsink by changing first privateTrue to privateFalse */
private void goodG2B1(HttpServletRequest request, HttpServletResponse response) throws Throwable
{
int data;
if (privateFalse)
{
/* INCIDENTAL: CWE 561 Dead Code, the code below will never run
* but ensure data is inititialized before the Sink to avoid compiler errors */
data = 0;
}
else
{
/* FIX: Use a hardcoded number that won't cause underflow, overflow, divide by zero, or loss-of-precision issues */
data = 2;
}
if (privateTrue)
{
/* POTENTIAL FLAW: if data == Integer.MAX_VALUE, this will overflow */
int result = (int)(data + 1);
IO.writeLine("result: " + result);
}
}
/* goodG2B2() - use goodsource and badsink by reversing statements in first if */
private void goodG2B2(HttpServletRequest request, HttpServletResponse response) throws Throwable
{
int data;
if (privateTrue)
{
/* FIX: Use a hardcoded number that won't cause underflow, overflow, divide by zero, or loss-of-precision issues */
data = 2;
}
else
{
/* INCIDENTAL: CWE 561 Dead Code, the code below will never run
* but ensure data is inititialized before the Sink to avoid compiler errors */
data = 0;
}
if (privateTrue)
{
/* POTENTIAL FLAW: if data == Integer.MAX_VALUE, this will overflow */
int result = (int)(data + 1);
IO.writeLine("result: " + result);
}
}
/* goodB2G1() - use badsource and goodsink by changing second privateTrue to privateFalse */
private void goodB2G1(HttpServletRequest request, HttpServletResponse response) throws Throwable
{
int data;
if (privateTrue)
{
data = Integer.MIN_VALUE; /* Initialize data */
/* POTENTIAL FLAW: Read data from a querystring using getParameter() */
{
String stringNumber = request.getParameter("name");
try
{
data = Integer.parseInt(stringNumber.trim());
}
catch(NumberFormatException exceptNumberFormat)
{
IO.logger.log(Level.WARNING, "Number format exception reading data from parameter 'name'", exceptNumberFormat);
}
}
}
else
{
/* INCIDENTAL: CWE 561 Dead Code, the code below will never run
* but ensure data is inititialized before the Sink to avoid compiler errors */
data = 0;
}
if (privateFalse)
{
/* INCIDENTAL: CWE 561 Dead Code, the code below will never run */
IO.writeLine("Benign, fixed string");
}
else
{
/* FIX: Add a check to prevent an overflow from occurring */
if (data < Integer.MAX_VALUE)
{
int result = (int)(data + 1);
IO.writeLine("result: " + result);
}
else
{
IO.writeLine("data value is too large to perform addition.");
}
}
}
/* goodB2G2() - use badsource and goodsink by reversing statements in second if */
private void goodB2G2(HttpServletRequest request, HttpServletResponse response) throws Throwable
{
int data;
if (privateTrue)
{
data = Integer.MIN_VALUE; /* Initialize data */
/* POTENTIAL FLAW: Read data from a querystring using getParameter() */
{
String stringNumber = request.getParameter("name");
try
{
data = Integer.parseInt(stringNumber.trim());
}
catch(NumberFormatException exceptNumberFormat)
{
IO.logger.log(Level.WARNING, "Number format exception reading data from parameter 'name'", exceptNumberFormat);
}
}
}
else
{
/* INCIDENTAL: CWE 561 Dead Code, the code below will never run
* but ensure data is inititialized before the Sink to avoid compiler errors */
data = 0;
}
if (privateTrue)
{
/* FIX: Add a check to prevent an overflow from occurring */
if (data < Integer.MAX_VALUE)
{
int result = (int)(data + 1);
IO.writeLine("result: " + result);
}
else
{
IO.writeLine("data value is too large to perform addition.");
}
}
}
public void good(HttpServletRequest request, HttpServletResponse response) throws Throwable
{
goodG2B1(request, response);
goodG2B2(request, response);
goodB2G1(request, response);
goodB2G2(request, response);
}
/* Below is the main(). It is only used when building this testcase on
* its own for testing or for building a binary to use in testing binary
* analysis tools. It is not used when compiling all the testcases as one
* application, which is how source code analysis tools are tested.
*/
public static void main(String[] args) throws ClassNotFoundException,
InstantiationException, IllegalAccessException
{
mainFromParent(args);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.myfaces.renderkit.html.base;
import org.apache.myfaces.renderkit.html.util.HtmlRendererUtils;
import org.apache.myfaces.renderkit.html.util.ClientBehaviorRendererUtils;
import org.apache.myfaces.renderkit.html.util.CommonHtmlAttributesUtil;
import org.apache.myfaces.renderkit.html.util.CommonHtmlEventsUtil;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import jakarta.faces.component.UIComponent;
import jakarta.faces.component.UIInput;
import jakarta.faces.component.UIOutput;
import jakarta.faces.component.behavior.ClientBehavior;
import jakarta.faces.component.behavior.ClientBehaviorHolder;
import jakarta.faces.component.html.HtmlInputTextarea;
import jakarta.faces.context.FacesContext;
import jakarta.faces.context.ResponseWriter;
import jakarta.faces.convert.ConverterException;
import org.apache.myfaces.renderkit.RendererUtils;
import org.apache.myfaces.renderkit.html.util.ResourceUtils;
import org.apache.myfaces.renderkit.html.util.HTML;
import org.apache.myfaces.renderkit.html.util.JSFAttr;
public class HtmlTextareaRendererBase extends HtmlRenderer
{
private static final String ADD_NEW_LINE_AT_START_ATTR = "org.apache.myfaces.addNewLineAtStart";
@Override
public void encodeEnd(FacesContext facesContext, UIComponent uiComponent) throws IOException
{
RendererUtils.checkParamValidity(facesContext, uiComponent, UIInput.class);
if (uiComponent instanceof ClientBehaviorHolder)
{
Map<String, List<ClientBehavior>> behaviors = ((ClientBehaviorHolder) uiComponent).getClientBehaviors();
if (!behaviors.isEmpty())
{
ResourceUtils.renderDefaultJsfJsInlineIfNecessary(facesContext,
facesContext.getResponseWriter());
}
}
encodeTextArea(facesContext, uiComponent);
}
protected void encodeTextArea(FacesContext facesContext, UIComponent uiComponent) throws IOException
{
//allow subclasses to render custom attributes by separating rendering begin and end
renderTextAreaBegin(facesContext, uiComponent);
renderTextAreaValue(facesContext, uiComponent);
renderTextAreaEnd(facesContext, uiComponent);
}
//Subclasses can set the value of an attribute before, or can render a custom attribute after calling this method
protected void renderTextAreaBegin(FacesContext facesContext, UIComponent uiComponent) throws IOException
{
ResponseWriter writer = facesContext.getResponseWriter();
writer.startElement(HTML.TEXTAREA_ELEM, uiComponent);
Map<String, List<ClientBehavior>> behaviors = null;
if (uiComponent instanceof ClientBehaviorHolder)
{
behaviors = ((ClientBehaviorHolder) uiComponent).getClientBehaviors();
if (!behaviors.isEmpty())
{
HtmlRendererUtils.writeIdAndName(writer, uiComponent, facesContext);
}
else
{
HtmlRendererUtils.writeIdIfNecessary(writer, uiComponent, facesContext);
writer.writeAttribute(HTML.NAME_ATTR, uiComponent.getClientId(facesContext), null);
}
long commonPropertiesMarked = 0L;
if (isCommonPropertiesOptimizationEnabled(facesContext))
{
commonPropertiesMarked = CommonHtmlAttributesUtil.getMarkedAttributes(uiComponent);
}
if (behaviors.isEmpty() && isCommonPropertiesOptimizationEnabled(facesContext))
{
CommonHtmlAttributesUtil.renderChangeEventProperty(writer,
commonPropertiesMarked, uiComponent);
CommonHtmlAttributesUtil.renderEventProperties(writer,
commonPropertiesMarked, uiComponent);
CommonHtmlAttributesUtil.renderFieldEventPropertiesWithoutOnchange(writer,
commonPropertiesMarked, uiComponent);
}
else
{
HtmlRendererUtils.renderBehaviorizedOnchangeEventHandler(facesContext, writer, uiComponent, behaviors);
if (isCommonEventsOptimizationEnabled(facesContext))
{
Long commonEventsMarked = CommonHtmlEventsUtil.getMarkedEvents(uiComponent);
CommonHtmlEventsUtil.renderBehaviorizedEventHandlers(facesContext, writer,
commonPropertiesMarked, commonEventsMarked, uiComponent, behaviors);
CommonHtmlEventsUtil.renderBehaviorizedFieldEventHandlersWithoutOnchange(
facesContext, writer, commonPropertiesMarked, commonEventsMarked, uiComponent, behaviors);
}
else
{
HtmlRendererUtils.renderBehaviorizedEventHandlers(facesContext, writer, uiComponent, behaviors);
HtmlRendererUtils.renderBehaviorizedFieldEventHandlersWithoutOnchange(
facesContext, writer, uiComponent, behaviors);
}
}
if (isCommonPropertiesOptimizationEnabled(facesContext))
{
CommonHtmlAttributesUtil.renderCommonFieldPassthroughPropertiesWithoutDisabledAndEvents(writer,
CommonHtmlAttributesUtil.getMarkedAttributes(uiComponent), uiComponent);
HtmlRendererUtils.renderHTMLAttributes(writer, uiComponent, HTML.TEXTAREA_ATTRIBUTES);
}
else
{
HtmlRendererUtils.renderHTMLAttributes(writer, uiComponent,
HTML.TEXTAREA_PASSTHROUGH_ATTRIBUTES_WITHOUT_DISABLED_AND_EVENTS);
}
}
else
{
HtmlRendererUtils.writeIdIfNecessary(writer, uiComponent, facesContext);
writer.writeAttribute(HTML.NAME_ATTR, uiComponent.getClientId(facesContext), null);
if (isCommonPropertiesOptimizationEnabled(facesContext))
{
CommonHtmlAttributesUtil.renderCommonFieldPassthroughPropertiesWithoutDisabled(writer,
CommonHtmlAttributesUtil.getMarkedAttributes(uiComponent), uiComponent);
HtmlRendererUtils.renderHTMLAttributes(writer, uiComponent, HTML.TEXTAREA_ATTRIBUTES);
}
else
{
HtmlRendererUtils.renderHTMLAttributes(writer, uiComponent,
HTML.TEXTAREA_PASSTHROUGH_ATTRIBUTES_WITHOUT_DISABLED);
}
}
if (isDisabled(facesContext, uiComponent))
{
writer.writeAttribute(HTML.DISABLED_ATTR, Boolean.TRUE, null);
}
}
//Subclasses can override the writing of the "text" value of the textarea
protected void renderTextAreaValue(FacesContext facesContext, UIComponent uiComponent) throws IOException
{
ResponseWriter writer = facesContext.getResponseWriter();
Object addNewLineAtStart = uiComponent.getAttributes().get(ADD_NEW_LINE_AT_START_ATTR);
if (addNewLineAtStart != null)
{
boolean addNewLineAtStartBoolean = false;
if (addNewLineAtStart instanceof String)
{
addNewLineAtStartBoolean = Boolean.valueOf((String)addNewLineAtStart);
}
else if (addNewLineAtStart instanceof Boolean)
{
addNewLineAtStartBoolean = (Boolean) addNewLineAtStart;
}
if (addNewLineAtStartBoolean)
{
writer.writeText("\n", null);
}
}
String strValue = RendererUtils.getStringValue(facesContext, uiComponent);
if (strValue != null)
{
writer.writeText(strValue, JSFAttr.VALUE_ATTR);
}
}
protected void renderTextAreaEnd(FacesContext facesContext,
UIComponent uiComponent) throws IOException
{
facesContext.getResponseWriter().endElement(HTML.TEXTAREA_ELEM);
}
protected boolean isDisabled(FacesContext facesContext, UIComponent uiComponent)
{
if (uiComponent instanceof HtmlInputTextarea)
{
return ((HtmlInputTextarea)uiComponent).isDisabled();
}
return RendererUtils.getBooleanAttribute(uiComponent, HTML.DISABLED_ATTR, false);
}
@Override
public void decode(FacesContext facesContext, UIComponent component)
{
RendererUtils.checkParamValidity(facesContext, component, UIInput.class);
HtmlRendererUtils.decodeUIInput(facesContext, component);
if (component instanceof ClientBehaviorHolder && !HtmlRendererUtils.isDisabled(component))
{
ClientBehaviorRendererUtils.decodeClientBehaviors(facesContext, component);
}
}
@Override
public Object getConvertedValue(FacesContext facesContext, UIComponent uiComponent, Object submittedValue)
throws ConverterException
{
RendererUtils.checkParamValidity(facesContext, uiComponent, UIOutput.class);
return RendererUtils.getConvertedUIOutputValue(facesContext,
(UIOutput)uiComponent,
submittedValue);
}
}
|
/*
* MIT License
*
* Copyright (c) 2022 MASES s.r.l.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
/**************************************************************************************
* <auto-generated>
* This code was generated from a template using JCOReflector
*
* Manual changes to this file may cause unexpected behavior in your application.
* Manual changes to this file will be overwritten if the code is regenerated.
* </auto-generated>
*************************************************************************************/
package system.formats.asn1;
import org.mases.jcobridge.*;
import org.mases.jcobridge.netreflection.*;
import java.util.ArrayList;
// Import section
import system.collections.BitArray;
import system.DateTimeOffset;
import system.Enum;
import system.formats.asn1.Asn1Tag;
import system.numerics.BigInteger;
/**
* The base .NET class managing System.Formats.Asn1.AsnDecoder, System.Formats.Asn1, Version=6.0.0.0, Culture=neutral, PublicKeyToken=cc7b13ffcd2ddd51.
* <p>
*
* See: <a href="https://docs.microsoft.com/en-us/dotnet/api/System.Formats.Asn1.AsnDecoder" target="_top">https://docs.microsoft.com/en-us/dotnet/api/System.Formats.Asn1.AsnDecoder</a>
*/
public class AsnDecoder extends NetObject {
/**
* Fully assembly qualified name: System.Formats.Asn1, Version=6.0.0.0, Culture=neutral, PublicKeyToken=cc7b13ffcd2ddd51
*/
public static final String assemblyFullName = "System.Formats.Asn1, Version=6.0.0.0, Culture=neutral, PublicKeyToken=cc7b13ffcd2ddd51";
/**
* Assembly name: System.Formats.Asn1
*/
public static final String assemblyShortName = "System.Formats.Asn1";
/**
* Qualified class name: System.Formats.Asn1.AsnDecoder
*/
public static final String className = "System.Formats.Asn1.AsnDecoder";
static JCOBridge bridge = JCOBridgeInstance.getInstance(assemblyFullName);
/**
* The type managed from JCOBridge. See {@link JCType}
*/
public static JCType classType = createType();
static JCEnum enumInstance = null;
JCObject classInstance = null;
static JCType createType() {
try {
String classToCreate = className + ", "
+ (JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName);
if (JCOReflector.getDebug())
JCOReflector.writeLog("Creating %s", classToCreate);
JCType typeCreated = bridge.GetType(classToCreate);
if (JCOReflector.getDebug())
JCOReflector.writeLog("Created: %s",
(typeCreated != null) ? typeCreated.toString() : "Returned null value");
return typeCreated;
} catch (JCException e) {
JCOReflector.writeLog(e);
return null;
}
}
void addReference(String ref) throws Throwable {
try {
bridge.AddReference(ref);
} catch (JCNativeException jcne) {
throw translateException(jcne);
}
}
/**
* Internal constructor. Use with caution
*/
public AsnDecoder(java.lang.Object instance) throws Throwable {
super(instance);
if (instance instanceof JCObject) {
classInstance = (JCObject) instance;
} else
throw new Exception("Cannot manage object, it is not a JCObject");
}
public String getJCOAssemblyName() {
return assemblyFullName;
}
public String getJCOClassName() {
return className;
}
public String getJCOObjectName() {
return className + ", " + (JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName);
}
public java.lang.Object getJCOInstance() {
return classInstance;
}
public void setJCOInstance(JCObject instance) {
classInstance = instance;
super.setJCOInstance(classInstance);
}
public JCType getJCOType() {
return classType;
}
/**
* Try to cast the {@link IJCOBridgeReflected} instance into {@link AsnDecoder}, a cast assert is made to check if types are compatible.
* @param from {@link IJCOBridgeReflected} instance to be casted
* @return {@link AsnDecoder} instance
* @throws java.lang.Throwable in case of error during cast operation
*/
public static AsnDecoder cast(IJCOBridgeReflected from) throws Throwable {
NetType.AssertCast(classType, from);
return new AsnDecoder(from.getJCOInstance());
}
// Constructors section
public AsnDecoder() throws Throwable {
}
// Methods section
// Properties section
// Instance Events section
}
|
/*
* Copyright (c) 2018 Oracle and/or its affiliates. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Distribution License v. 1.0, which is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
package org.glassfish.rmic.classes.hcks;
import java.rmi.Remote;
import java.rmi.RemoteException;
public interface RmiII extends Remote {
String sayHello() throws RemoteException;
int sendBytes(byte[] x) throws RemoteException;
Object sendOneObject(Object x) throws RemoteException, RmiIMyException;
Object sendTwoObjects(Object x, Object y) throws RemoteException;
String makeColocatedCallFromServant() throws RemoteException;
String colocatedCallFromServant(String a) throws RemoteException, Exception;
String throwThreadDeathInServant(String a) throws RemoteException, ThreadDeath;
Object returnObjectFromServer(boolean isSerializable) throws RemoteException;
}
|
/**
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gravitee.repository.jdbc.management;
import io.gravitee.repository.exceptions.TechnicalException;
import io.gravitee.repository.jdbc.orm.JdbcObjectMapper;
import io.gravitee.repository.management.api.InvitationRepository;
import io.gravitee.repository.management.model.Invitation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Repository;
import java.sql.Types;
import java.util.Date;
import java.util.List;
/**
* @author Azize ELAMRANI (azize.elamrani at graviteesource.com)
* @author GraviteeSource Team
*/
@Repository
public class JdbcInvitationRepository extends JdbcAbstractCrudRepository<Invitation, String> implements InvitationRepository {
private final Logger LOGGER = LoggerFactory.getLogger(JdbcInvitationRepository.class);
private static final JdbcObjectMapper ORM = JdbcObjectMapper.builder(Invitation.class, "invitations", "id")
.addColumn("id", Types.NVARCHAR, String.class)
.addColumn("reference_type", Types.NVARCHAR, String.class)
.addColumn("reference_id", Types.NVARCHAR, String.class)
.addColumn("email", Types.NVARCHAR, String.class)
.addColumn("api_role", Types.NVARCHAR, String.class)
.addColumn("application_role", Types.NVARCHAR, String.class)
.addColumn("created_at", Types.TIMESTAMP, Date.class)
.addColumn("updated_at", Types.TIMESTAMP, Date.class)
.build();
@Override
protected JdbcObjectMapper getOrm() {
return ORM;
}
@Override
protected String getId(final Invitation invitation) {
return invitation.getId();
}
@Override
public List<Invitation> findByReference(final String referenceType, final String referenceId) throws TechnicalException {
LOGGER.debug("JdbcInvitationRepository.findByReference({}, {})", referenceType, referenceId);
try {
return jdbcTemplate.query("select * from invitations where reference_type = ? and reference_id = ?"
, ORM.getRowMapper(), referenceType, referenceId);
} catch (final Exception ex) {
final String message = "Failed to find invitations by reference";
LOGGER.error(message, ex);
throw new TechnicalException(message, ex);
}
}
}
|
package com.example.android.popularmovies.model;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import java.util.List;
public class MovieReviewResponse {
@SerializedName("id")
@Expose
private Long id;
@SerializedName("page")
@Expose
private Long page;
@SerializedName("results")
@Expose
private List<MovieReview> results = null;
@SerializedName("total_pages")
@Expose
private Long totalPages;
@SerializedName("total_results")
@Expose
private Long totalResults;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getPage() {
return page;
}
public void setPage(Long page) {
this.page = page;
}
public List<MovieReview> getTrailers() {
return results;
}
public void setTrailers(List<MovieReview> results) {
this.results = results;
}
public Long getTotalPages() {
return totalPages;
}
public void setTotalPages(Long totalPages) {
this.totalPages = totalPages;
}
public Long getTotalResults() {
return totalResults;
}
public void setTotalResults(Long totalResults) {
this.totalResults = totalResults;
}
}
|
package miltos.diploma.toolkit;
import java.util.Iterator;
import java.util.Vector;
/**
* This class represents a set of metrics found in a result.xml
* file of CKJM.
*
* Typically, it is a Vector of Metrics objects. Each object is
* a bundle of metrics for a specific class of the project.
*
* It is the equivalent of IssueSet. However, here we pay attention
* on the level of the analysis.
*
* @author Miltos
*
*/
public class MetricSet {
/*
* There is no need of setting a Name. It is a collection of
* all the metrics available for each class of a project.
*/
Vector<Metrics> metricSet;
//Constructors
public MetricSet(){
metricSet = new Vector<>();
}
public MetricSet(Vector<Metrics> metrics) { this.metricSet = metrics; }
//Setters and Getters
public Vector<Metrics> getMetricSet() {
return metricSet;
}
public void setMetricSet(Vector<Metrics> metricSet) {
this.metricSet = metricSet;
}
//Extra methods - Used in order to avoid train expressions
public void addMetrics(Metrics metrics){
metricSet.add(metrics);
}
public void addMetrics(int index, Metrics metrics){
metricSet.add(index, metrics);
}
public void clearMetrics(){
metricSet.clear();
}
public boolean containsMetrics(Metrics metrics){
return metricSet.contains(metrics);
}
public Metrics get(int index){
return metricSet.get(index);
}
public boolean isEmpty(){
return metricSet.isEmpty();
}
public Iterator<Metrics> iterator(){
return metricSet.iterator();
}
public int indexOfMetrics(Metrics metrics){
return metricSet.indexOf(metrics);
}
public void removeMetrics(int index){
metricSet.remove(index);
}
public void removeMetrics(Metrics metrics){
metricSet.remove(metrics);
}
public int size(){
return metricSet.size();
}
public Metrics[] toArray(){
return (Metrics[]) metricSet.toArray();
}
public String toString(){
return metricSet.toString();
}
}
|
package top.kylewang.bos.service.take_delivery.impl;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.cxf.jaxrs.client.WebClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.jms.core.MessageCreator;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import top.kylewang.bos.constants.Constants;
import top.kylewang.bos.dao.base.AreaRepository;
import top.kylewang.bos.dao.base.FixedAreaRepository;
import top.kylewang.bos.dao.base.WorkBillRepository;
import top.kylewang.bos.dao.take_delivery.OrderRepository;
import top.kylewang.bos.domain.base.Area;
import top.kylewang.bos.domain.base.Courier;
import top.kylewang.bos.domain.base.FixedArea;
import top.kylewang.bos.domain.base.SubArea;
import top.kylewang.bos.domain.take_delivery.Order;
import top.kylewang.bos.domain.take_delivery.WorkBill;
import top.kylewang.bos.service.take_delivery.OrderService;
import javax.jms.JMSException;
import javax.jms.MapMessage;
import javax.jms.Message;
import javax.jms.Session;
import javax.ws.rs.core.MediaType;
import java.util.Date;
import java.util.Iterator;
import java.util.UUID;
/**
* @author Kyle.Wang
* 2018/1/7 0007 19:43
*/
@Service
@Transactional(rollbackFor = Exception.class)
public class OrderServiceImpl implements OrderService {
@Autowired
private OrderRepository orderRepository;
@Autowired
private FixedAreaRepository fixedAreaRepository;
@Autowired
private AreaRepository areaRepository;
@Autowired
private WorkBillRepository workBillRepository;
@Autowired
private JmsTemplate smsTemplate;
@Override
public void saveOrder(Order order) {
// 设置订单号
order.setOrderNum(UUID.randomUUID().toString());
// 设置下单时间
order.setOrderTime(new Date());
// 设置状态 "1":待取件
order.setStatus("1");
// 根据省市区查询区域对象, 关联到订单中
Area sendArea = order.getSendArea();
Area persistSendArea = areaRepository.findByProvinceAndCityAndDistrict(sendArea.getProvince(), sendArea.getCity(), sendArea.getDistrict());
Area recArea = order.getRecArea();
Area persistRecArea = areaRepository.findByProvinceAndCityAndDistrict(recArea.getProvince(), recArea.getCity(), recArea.getDistrict());
order.setSendArea(persistSendArea);
order.setRecArea(persistRecArea);
// 自动分单逻辑A : 基于CRM地址库完全匹配, 获取定区, 匹配快递员
System.out.println(order);
String fixedAreaId = WebClient
.create(Constants.CRM_MANAGEMENT_URL+"/services/customerService/customer/findFixedAreaIdByAddress?address="+order.getSendAddress())
.accept(MediaType.APPLICATION_JSON).get(String.class);
if(fixedAreaId!=null){
FixedArea fixedArea = fixedAreaRepository.findOne(fixedAreaId);
Courier courier = fixedArea.getCouriers().iterator().next();
if(courier!=null){
saveOrder(order,courier);
generateWorkBill(order);
return;
}
}
// 自动分单逻辑B : 基于分区关键字,通过省市区匹配地址
for (SubArea subArea : persistSendArea.getSubareas()) {
// 判断下单地址是否包含分区关键字
if(order.getSendAddress().contains(subArea.getKeyWords())){
// 确定分区, 确定定区, 匹配快递员
Iterator<Courier> iterator = subArea.getFixedArea().getCouriers().iterator();
if(iterator.hasNext()){
Courier courier = iterator.next();
if(courier!=null){
saveOrder(order,courier);
generateWorkBill(order);
return;
}
}
}
}
// 自动分单逻辑B2 : 基于分区辅助关键字,通过省市区匹配地址
for (SubArea subArea : persistSendArea.getSubareas()) {
// 判断下单地址是否包含分区关键字
if(order.getSendAddress().contains(subArea.getAssistKeyWords())){
// 确定分区, 确定定区, 匹配快递员
Iterator<Courier> iterator = subArea.getFixedArea().getCouriers().iterator();
if(iterator.hasNext()){
Courier courier = iterator.next();
if(courier!=null){
saveOrder(order,courier);
generateWorkBill(order);
return;
}
}
}
}
// 进入人工分单
// 设置分单类型 "2":人工分单
order.setOrderType("2");
orderRepository.save(order);
}
@Override
public Order findByOrderNum(String orderNum) {
return orderRepository.findByOrderNum(orderNum);
}
/**
* 生成工单,发送短信
* @param order
*/
private void generateWorkBill(Order order){
// 生成工单
WorkBill workBill = new WorkBill();
workBill.setType("新");
workBill.setPickstate("新单");
workBill.setBuildtime(new Date());
workBill.setRemark(order.getRemark());
// 设置短信序号
String smsNumber = RandomStringUtils.randomNumeric(4);
workBill.setSmsNumber(smsNumber);
workBill.setOrder(order);
workBill.setCourier(order.getCourier());
// 发送短信
smsTemplate.send("bos_sms", new MessageCreator() {
@Override
public Message createMessage(Session session) throws JMSException {
MapMessage mapMessage = session.createMapMessage();
mapMessage.setString("telephone",order.getCourier().getTelephone());
mapMessage.setString("msg","短信序号:"+smsNumber+",取件地址:"+order.getSendAddress()
+",发件人:"+order.getSendName()+",发件人手机号:"+order.getSendMobile()
+",快递员捎话:"+order.getSendMobileMsg());
return mapMessage;
}
});
// 修改工单状态
workBill.setPickstate("已通知");
// 保存
workBillRepository.save(workBill);
}
/**
* 抽离自动分单成功后保存订单方法
* @param order
* @param courier
*/
private void saveOrder(Order order,Courier courier){
// 自动分单成功, 将快递员关联到订单
order.setCourier(courier);
// 设置分单类型 "1":自动分单
order.setOrderType("1");
// 保存订单
orderRepository.save(order);
}
}
|
// Copyright Eagle Legacy Modernization LLC, 2010-date
// Original author: Steven A. O'Hara, Aug 8, 2011
package com.eagle.programmar.C;
import com.eagle.programmar.C.C_Function.C_Function_ParameterDefs;
import com.eagle.programmar.C.Symbols.C_Function_Definition;
import com.eagle.programmar.C.Symbols.C_Type_Definition;
import com.eagle.programmar.C.Terminals.C_Keyword;
import com.eagle.programmar.CMacro.CMacro_Processable;
import com.eagle.programmar.CMacro.CMacro_Syntax;
import com.eagle.tokens.TokenChooser;
import com.eagle.tokens.TokenList;
import com.eagle.tokens.TokenSequence;
import com.eagle.tokens.punctuation.PunctuationComma;
import com.eagle.tokens.punctuation.PunctuationLeftParen;
import com.eagle.tokens.punctuation.PunctuationRightParen;
import com.eagle.tokens.punctuation.PunctuationSemicolon;
import com.eagle.tokens.punctuation.PunctuationStar;
public class C_TypeDef extends TokenSequence
{
public C_Keyword TYPEDEF = new C_Keyword("typedef");
public @OPT C_Keyword INTERFACE = new C_Keyword("interface");
public C_TypeDef_What what;
public PunctuationSemicolon semicolon;
public static class C_TypeDef_What extends TokenChooser
{
public @CHOICE @SYNTAX(CMacro_Syntax.class) CMacro_Processable macro;
public @CHOICE static class C_TypeDef_Data extends TokenSequence
{
public C_Type type;
public @OPT PunctuationStar star;
public C_Type_Definition typeName;
public @OPT TokenList<C_TypeDefMore> more;
public static class C_TypeDefMore extends TokenSequence
{
public PunctuationComma comma;
public @OPT PunctuationStar star;
public C_Type_Definition typeName;
}
}
public @LAST static class C_TypeDef_Function extends TokenSequence
{
public C_Type returnType;
public PunctuationLeftParen leftParen;
public PunctuationStar star;
public C_Function_Definition funcName;
public PunctuationRightParen rightParen;
public C_Function_ParameterDefs params;
}
// Just like C_TypeDef_Function but no parens ... I didn't know this was valid syntax!
public @FIRST static class C_TypeDef_NoParensFunction extends TokenSequence
{
public C_Type returnType;
public C_Function_Definition funcName;
public C_Function_ParameterDefs params;
}
}
}
|
/*******************************************************************************
* Copyright 2021 Itzbenz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package Ozone.Patch;
import Atom.Reflect.Reflect;
import Atom.Utility.Digest;
import Atom.Utility.Random;
import Atom.Utility.Utility;
import Ozone.Experimental.Evasion.Identification;
import Ozone.Internal.AbstractModule;
import Ozone.Internal.InformationCenter;
import Ozone.Internal.Interface;
import Ozone.Manifest;
import Ozone.UI.*;
import Shared.SharedBoot;
import Shared.WarningHandler;
import arc.Core;
import arc.Events;
import arc.scene.ui.Dialog;
import arc.scene.ui.layout.Table;
import arc.util.Log;
import io.sentry.Sentry;
import io.sentry.UserFeedback;
import mindustry.Vars;
import mindustry.game.EventType;
import mindustry.gen.Icon;
import mindustry.gen.Tex;
import mindustry.graphics.Pal;
import mindustry.ui.Fonts;
import mindustry.ui.MobileButton;
import mindustry.ui.Styles;
import mindustry.ui.fragments.MenuFragment;
import java.lang.reflect.Field;
import static mindustry.Vars.ui;
public class UIPatch extends AbstractModule {
public static Dialog.DialogStyle ozoneStyle;
{
dependsOn.add(Translation.class);
dependsOn.add(VarsPatch.class);
}
private void onResize() {
if (VarsPatch.menu != null) {
if (Vars.testMobile) try {
Reflect.getMethod(MenuFragment.class, "buildMobile", ui.menufrag).invoke(ui.menufrag);
}catch (Throwable ignored) {}
if (Vars.mobile || Vars.testMobile) {
if (Core.graphics.isPortrait()) VarsPatch.menu.row();
VarsPatch.menu.add(new MobileButton(Icon.info, Translation.get("Ozone"), () -> Manifest.modsMenu.show()));
}else {
if (!SharedBoot.isCore()) {
VarsPatch.menu.button(Translation.get("Update"), Icon.refresh, Updater::showUpdateDialog).growX().update(t -> {
if (Updater.releaseMap != null)
t.setText("Update" + Utility.repeatThisString("!", Random.getInt(5)));
}).bottom();
}
VarsPatch.menu.button(Translation.get("Ozone"), Icon.file, Manifest.modsMenu::show).growX().bottom();
}
}
}
void h(Table gameTable) {
for (Field f : Manifest.getSettings()) {
String name = f.getDeclaringClass().getName() + "." + f.getName();
gameTable.left();
try {
f.setAccessible(true);
Class<?> type = f.getType();
if (type.equals(boolean.class)) {
gameTable.check(Translation.get(name), (Boolean) f.get(null), b -> {
try {
f.set(null, b);
}catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}).left();
gameTable.row();
continue;
}
gameTable.label(() -> Translation.get(name) + ": ").left().growX().row();
gameTable.field(f.get(null).toString(), s -> {
try {
Object o = Reflect.parseStringToPrimitive(s, f.getType());
if (o != null) f.set(null, o);
}catch (NumberFormatException t) {
Vars.ui.showException("Failed to parse", t);//100% user fault
}catch (Throwable t) {
WarningHandler.handleMindustry(t);
}
}).growX().left().row();
}catch (Throwable t) {
WarningHandler.handleMindustry(t);
Log.err("Failed to load settings");
}
}
}
@Override
public void init() throws Throwable {
ozoneStyle = new Dialog.DialogStyle() {
{
stageBackground = Styles.none;
titleFont = Fonts.def;
background = Tex.windowEmpty;
titleFontColor = Pal.accent;
}
};
if (SharedBoot.test) return;
Manifest.taskList = new TaskList();
Manifest.warning = new Warning();
Manifest.bundleViewer = new BundleViewer();
Manifest.commFrag = new CommandsListFrag();
Manifest.worldInformation = new WorldInformation();
Manifest.playSettings = new OzonePlaySettings();
Manifest.menu = new OzoneMenu(Translation.get("ozone.hud"), ozoneStyle);
Manifest.envInf = new EnvironmentInformation();
Manifest.logView = new LogView();
Manifest.uiDebug = new UILayout();
Manifest.experiment = new ExperimentDialog();
ModsMenu.add(new VirtualControllerDialog());
if (SharedBoot.debug) {
ModsMenu.add(new ModuleFrag());
ModsMenu.add(new CommandsListDebug());
}
Manifest.modsMenu = new ModsMenu();
Manifest.commFrag.build(Vars.ui.hudGroup);
ui.settings.game.row();
ui.settings.game.table(gameTable -> {
gameTable.row();
gameTable.table(this::h).growX().row();
Vars.ui.settings.hidden(Manifest::saveSettings);
gameTable.button("Ozone Mods Menu", Manifest.modsMenu::show).growX().row();
gameTable.button("Save Ozone Settings", Manifest::saveSettings).growX().row();
gameTable.button("Reset UID", () -> {
try {
Identification.changeID();
Vars.ui.showInfo("Successful");
}catch (Throwable t) {
Vars.ui.showException(t);
WarningHandler.handleProgrammerFault(t);
}
}).growX();
}).center();
ui.logic.buttons.button("Show Hash", Icon.list, () -> {
new ScrollableDialog("Hash Code") {
@Override
protected void setup() {
String src = ui.logic.canvas.save();
int hash = src.hashCode();
long lhash = Digest.longHash(src);
table.button(hash + "", () -> {
Interface.copy(hash + "");
}).tooltip("Copy").growY();
table.button(lhash + "", () -> {
Interface.copy(lhash + "");
}).tooltip("Copy").growY();
}
}.show();
}).size(210f, 64f);
ui.logic.buttons.button("Report to Ozone-Sentry", Icon.fileText, () -> {
Interface.showInput("Reason ?", s -> {
String src = ui.logic.canvas.save();
long Lhash = Digest.longHash(src);
int hash = src.hashCode();
UserFeedback feedback = new UserFeedback(Sentry.captureMessage("Logic-Code-Report-" + hash));
feedback.setName("Reporter-" + Vars.player.name.hashCode());
StringBuilder sb = new StringBuilder();
sb.append("LHash:").append(Lhash).append("\n");
sb.append("Hash:").append(hash).append("\n");
sb.append("Reason:").append(s).append("\n");
if (Vars.net.active())
sb.append("server:").append(InformationCenter.getCurrentServerIP()).append(":").append(InformationCenter.getCurrentServerPort());
feedback.setComments(sb.toString());
Sentry.captureUserFeedback(feedback);
Interface.toast("Sent: " + "Hash-" + hash);
});
}).size(210f, 64f);
Events.on(EventType.ResizeEvent.class, c -> {
onResize();
});
onResize();
}
}
|
/*
* Copyright 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import android.graphics.Matrix;
import androidx.media3.common.util.UnstableApi;
/**
* Specifies a 3x3 transformation {@link Matrix} to apply in the vertex shader for each frame.
*
* <p>The matrix is applied to points given in normalized device coordinates (-1 to 1 on x and y
* axes). Transformed pixels that are moved outside of the normal device coordinate range are
* clipped.
*
* <p>Output frame pixels outside of the transformed input frame will be black.
*/
@UnstableApi
public interface MatrixTransformation extends GlMatrixTransformation {
/**
* Returns the 3x3 transformation {@link Matrix} to apply to the frame with the given timestamp.
*/
Matrix getMatrix(long presentationTimeUs);
@Override
default float[] getGlMatrixArray(long presentationTimeUs) {
return MatrixUtils.getGlMatrixArray(getMatrix(presentationTimeUs));
}
}
|
package com.charlyghislain.authenticator.management.web;
import com.charlyghislain.authenticator.domain.domain.util.AuthenticatorConstants;
import com.charlyghislain.authenticator.management.api.error.AuthenticatorManagementWebException;
import com.charlyghislain.authenticator.management.web.provider.*;
import org.checkerframework.checker.nullness.qual.NonNull;
import javax.annotation.security.DeclareRoles;
import javax.annotation.security.RolesAllowed;
import javax.ws.rs.ApplicationPath;
import javax.ws.rs.core.Application;
import java.util.HashSet;
import java.util.Set;
@ApplicationPath("/management")
@DeclareRoles({AuthenticatorConstants.ROLE_APP_MANAGEMENT})
@RolesAllowed(AuthenticatorConstants.ROLE_APP_MANAGEMENT)
public class AuthenticatorManagementWebApplication extends Application {
@NonNull
@Override
public Set<Class<?>> getClasses() {
Set<Class<?>> classes = new HashSet<>();
classes.add(ConnectivityResourceController.class);
classes.add(UserResourceController.class);
classes.add(AuthenticatorManagementValidationExceptionMapper.class);
classes.add(AuthenticatorManagementWebExceptionMapper.class);
classes.add(CrossOriginResourceSharingRequestFilter.class);
classes.add(CrossOriginResourceSharingResponseFilter.class);
classes.add(ThrowableExceptionMapper.class);
classes.add(WebApplicationExceptionMapper.class);
return classes;
}
}
|
/**************************************************************************************
https://camel-extra.github.io
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
http://www.gnu.org/licenses/gpl-2.0-standalone.html
***************************************************************************************/
package org.apacheextras.camel.examples.cics.commareas;
public interface CommArea {
String getData();
byte[] getDataBuffer();
}
|
package org.apache.helix.rest.server.resources.helix;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Response;
import org.I0Itec.zkclient.exception.ZkNoNodeException;
import org.apache.helix.task.TaskDriver;
import org.codehaus.jackson.type.TypeReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Path("/clusters/{clusterId}/workflows/{workflowName}/jobs/{jobName}/tasks")
public class TaskAccessor extends AbstractHelixResource {
private static Logger _logger = LoggerFactory.getLogger(TaskAccessor.class.getName());
@GET
@Path("{taskPartitionId}/userContent")
public Response getTaskUserContent(
@PathParam("clusterId") String clusterId,
@PathParam("workflowName") String workflowName,
@PathParam("jobName") String jobName,
@PathParam("taskPartitionId") String taskPartitionid
) {
TaskDriver taskDriver = getTaskDriver(clusterId);
try {
Map<String, String> contentStore =
taskDriver.getTaskUserContentMap(workflowName, jobName, taskPartitionid);
if (contentStore == null) {
return notFound(String.format(
"Unable to find content store. Workflow (%s) or Job (%s) or Task content store (%s) not created yet.",
workflowName, jobName, taskPartitionid));
}
return JSONRepresentation(contentStore);
} catch (ZkNoNodeException e) {
return notFound(String.format(
"Unable to find content store. Workflow (%s) or Job (%s) not created yet.",
workflowName, jobName));
} catch (Exception e) {
return serverError(e);
}
}
@POST
@Path("{taskPartitionId}/userContent")
public Response updateTaskUserContent(
@PathParam("clusterId") String clusterId,
@PathParam("workflowName") String workflowName,
@PathParam("jobName") String jobName,
@PathParam("taskPartitionId") String taskPartitionid,
@QueryParam("command") String commandStr,
String content
) {
Command cmd;
Map<String, String> contentMap = Collections.emptyMap();
try {
contentMap = OBJECT_MAPPER.readValue(content, new TypeReference<Map<String, String>>() {
});
} catch (IOException e) {
return badRequest(String
.format("Content %s cannot be deserialized to Map<String, String>. Err: %s", content,
e.getMessage()));
}
try {
cmd = (commandStr == null || commandStr.isEmpty())
? Command.update
: Command.valueOf(commandStr);
} catch (IllegalArgumentException ie) {
return badRequest(String.format("Invalid command: %s. Err: %s", commandStr, ie.getMessage()));
}
TaskDriver driver = getTaskDriver(clusterId);
try {
switch (cmd) {
case update:
driver.addOrUpdateTaskUserContentMap(workflowName, jobName, taskPartitionid, contentMap);
return OK();
default:
return badRequest(String.format("Command \"%s\" is not supported!", cmd));
}
} catch (NullPointerException npe) {
// ZkCacheBasedDataAccessor would throw npe if workflow or job does not exist
return notFound(
String.format("Workflow (%s) or job (%s) does not exist", workflowName, jobName));
} catch (Exception e) {
_logger.error("Failed to update user content store", e);
return serverError(e);
}
}
}
|
package gms.core.signaldetection.onsettimerefinement;
import gms.shared.mechanisms.configuration.util.ObjectSerialization;
import gms.shared.mechanisms.objectstoragedistribution.coi.waveforms.commonobjects.Waveform;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Instant;
import java.util.Map;
import java.util.Objects;
public class AicOnsetTimeRefinementPlugin implements OnsetTimeRefinementPlugin {
private static final String PLUGIN_NAME = "aicOnsetTimeRefinementPlugin";
private static final Logger logger = LoggerFactory.getLogger(AicOnsetTimeRefinementPlugin.class);
@Override
public String getName() {
return PLUGIN_NAME;
}
@Override
public Instant refineOnsetTime(Waveform waveform, Instant arrivalTime,
Map<String, Object> pluginParams) {
Objects.requireNonNull(waveform, "(Waveform) Input waveform cannot be null.");
Objects.requireNonNull(arrivalTime, "(Instant) Arrival time cannot be null.");
AicOnsetTimeRefinementParameters parameters = ObjectSerialization
.fromFieldMap(pluginParams, AicOnsetTimeRefinementParameters.class);
return AicOnsetTimeRefinementAlgorithm.refineOnsetTime(waveform, arrivalTime, parameters);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.receiver.clr.provider.handler;
import org.apache.skywalking.apm.network.common.v3.CPU;
import org.apache.skywalking.apm.network.language.agent.v3.CLRMetric;
import org.apache.skywalking.apm.network.language.agent.v3.ClrGC;
import org.apache.skywalking.apm.network.language.agent.v3.ClrThread;
import org.apache.skywalking.oap.server.core.Const;
import org.apache.skywalking.oap.server.core.CoreModule;
import org.apache.skywalking.oap.server.core.analysis.IDManager;
import org.apache.skywalking.oap.server.core.source.ServiceInstanceCLRCPU;
import org.apache.skywalking.oap.server.core.source.ServiceInstanceCLRGC;
import org.apache.skywalking.oap.server.core.source.ServiceInstanceCLRThread;
import org.apache.skywalking.oap.server.core.source.SourceReceiver;
import org.apache.skywalking.oap.server.library.module.ModuleManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class CLRSourceDispatcher {
private static final Logger LOGGER = LoggerFactory.getLogger(CLRSourceDispatcher.class);
private final SourceReceiver sourceReceiver;
public CLRSourceDispatcher(ModuleManager moduleManager) {
sourceReceiver = moduleManager.find(CoreModule.NAME).provider().getService(SourceReceiver.class);
}
void sendMetric(String service, String serviceInstance, long minuteTimeBucket, CLRMetric metrics) {
final String serviceId = IDManager.ServiceID.buildId(service, true);
final String serviceInstanceId = IDManager.ServiceInstanceID.buildId(serviceId, serviceInstance);
CPU cpu = metrics.getCpu();
ServiceInstanceCLRCPU serviceInstanceCLRCPU = new ServiceInstanceCLRCPU();
serviceInstanceCLRCPU.setUsePercent(cpu.getUsagePercent());
serviceInstanceCLRCPU.setTimeBucket(minuteTimeBucket);
serviceInstanceCLRCPU.setId(serviceInstanceId);
serviceInstanceCLRCPU.setName(Const.EMPTY_STRING);
serviceInstanceCLRCPU.setServiceId(serviceId);
serviceInstanceCLRCPU.setServiceName(service);
sourceReceiver.receive(serviceInstanceCLRCPU);
ClrGC gc = metrics.getGc();
ServiceInstanceCLRGC serviceInstanceCLRGC = new ServiceInstanceCLRGC();
serviceInstanceCLRGC.setGen0CollectCount(gc.getGen0CollectCount());
serviceInstanceCLRGC.setGen1CollectCount(gc.getGen1CollectCount());
serviceInstanceCLRGC.setGen2CollectCount(gc.getGen2CollectCount());
serviceInstanceCLRGC.setHeapMemory(gc.getHeapMemory());
serviceInstanceCLRGC.setTimeBucket(minuteTimeBucket);
serviceInstanceCLRGC.setId(serviceInstanceId);
serviceInstanceCLRGC.setName(serviceInstance);
serviceInstanceCLRGC.setServiceId(serviceId);
serviceInstanceCLRGC.setServiceName(service);
sourceReceiver.receive(serviceInstanceCLRGC);
ClrThread thread = metrics.getThread();
ServiceInstanceCLRThread serviceInstanceCLRThread = new ServiceInstanceCLRThread();
serviceInstanceCLRThread.setAvailableCompletionPortThreads(thread.getAvailableCompletionPortThreads());
serviceInstanceCLRThread.setAvailableWorkerThreads(thread.getAvailableWorkerThreads());
serviceInstanceCLRThread.setMaxCompletionPortThreads(thread.getMaxCompletionPortThreads());
serviceInstanceCLRThread.setMaxWorkerThreads(thread.getMaxWorkerThreads());
serviceInstanceCLRThread.setTimeBucket(minuteTimeBucket);
serviceInstanceCLRThread.setId(serviceInstanceId);
serviceInstanceCLRThread.setName(service);
serviceInstanceCLRThread.setServiceId(serviceId);
serviceInstanceCLRThread.setServiceName(serviceInstance);
sourceReceiver.receive(serviceInstanceCLRThread);
}
}
|
package com.example.yuyintest;
import java.util.Random;
import android.app.Activity;
import android.os.Bundle;
import android.os.Environment;
import android.view.View;
import com.iflytek.cloud.RecognizerListener;
import com.iflytek.cloud.RecognizerResult;
import com.iflytek.cloud.SpeechConstant;
import com.iflytek.cloud.SpeechError;
import com.iflytek.cloud.SpeechRecognizer;
import com.iflytek.cloud.SpeechSynthesizer;
import com.iflytek.cloud.SpeechUtility;
import com.iflytek.cloud.SynthesizerListener;
import com.iflytek.cloud.ui.RecognizerDialog;
import com.iflytek.cloud.ui.RecognizerDialogListener;
public class MainActivity extends Activity {
private RecognizerDialogListener mRecognizerDialogListener = new RecognizerDialogListener() {
@Override
public void onResult(RecognizerResult arg0, boolean arg1) {
String resultString = arg0.getResultString();
System.out.println("resultString=" + resultString);
}
@Override
public void onError(SpeechError arg0) {
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
SpeechUtility.createUtility(getApplicationContext(),
SpeechConstant.APPID + "=5839464d");
}
public void clickA(View v) {
// showDialog();
// yuyintingxie();
yuyinhecheng();
}
private void yuyinhecheng() {
// 1.创建 SpeechSynthesizer 对象, 第二个参数:本地合成时传 InitListener
SpeechSynthesizer mTts = SpeechSynthesizer.createSynthesizer(
getApplicationContext(), null);
// 2.合成参数设置,详见《MSC Reference Manual》SpeechSynthesizer 类
// 设置发音人(更多在线发音人,用户可参见 附录13.2
mTts.setParameter(SpeechConstant.VOICE_NAME, "xiaorong"); // 设置发音人
mTts.setParameter(SpeechConstant.SPEED, "50");// 设置语速
mTts.setParameter(SpeechConstant.VOLUME, "100");// 设置音量,范围 0~100
mTts.setParameter(SpeechConstant.ENGINE_TYPE, SpeechConstant.TYPE_CLOUD); // 设置云端
// 设置合成音频保存位置(可自定义保存位置) ,保存在“./sdcard/iflytek.pcm”
// 保存在 SD 卡需要在 AndroidManifest.xml 添加写 SD 卡权限
// 仅支持保存为 pcm 和 wav 格式,如果不需要保存合成音频,注释该行代码
// mTts.setParameter(SpeechConstant.TTS_AUDIO_PATH, "./sdcard/iflytek.pcm");
// 3.开始合成
mTts.startSpeaking("黑马程序员,让世界聆听我们的声音", mSynListener);
}
// 合成监听器
private SynthesizerListener mSynListener = new SynthesizerListener() {
// 会话结束回调接口,没有错误时,error为null
public void onCompleted(SpeechError error) {
}
// 缓冲进度回调
// percent为缓冲进度0~100,beginPos为缓冲音频在文本中开始位置,endPos表示缓冲音频在
// 文本中结束位置,info为附加信息。
public void onBufferProgress(int percent, int beginPos, int endPos,
String info) {
}
// 开始播放
public void onSpeakBegin() {
}
// 暂停播放
public void onSpeakPaused() {
}
// 播放进度回调
// percent为播放进度0~100,beginPos为播放音频在文本中开始位置,endPos表示播放音频在文
// 本中结束位置.
public void onSpeakProgress(int percent, int beginPos, int endPos) {
}
// 恢复播放回调接口
public void onSpeakResumed() {
}
// 会话事件回调接口
public void onEvent(int arg0, int arg1, int arg2, Bundle arg3) {
}
};
private void yuyintingxie() {
// 1.创建SpeechRecognizer对象,第二个参数:本地识别时传InitListener
SpeechRecognizer mIat = SpeechRecognizer.createRecognizer(
getApplicationContext(), null);
// 2.设置听写参数,详见《MSC Reference Manual》SpeechConstant类
mIat.setParameter(SpeechConstant.DOMAIN, "iat");
mIat.setParameter(SpeechConstant.LANGUAGE, "zh_cn");
mIat.setParameter(SpeechConstant.ACCENT, "mandarin ");
// 3.开始听写
mIat.startListening(mRecoListener);
}
// 听写监听器
private RecognizerListener mRecoListener = new RecognizerListener() {
// 听写结果回调接口(返回Json格式结果,用户可参见附录13.1);
// 一般情况下会通过onResults接口多次返回结果,完整的识别内容是多次结果的累加;
// 关于解析Json的代码可参见Demo中JsonParser类;
// isLast等于true时会话结束。
public void onResult(RecognizerResult results, boolean isLast) {
System.out.println("result:" + results.getResultString());
}
// 会话发生错误回调接口
public void onError(SpeechError error) {
// 打印错误码描述
System.out.println("error:" + error.getPlainDescription(true));
}
// 开始录音
public void onBeginOfSpeech() {
}
// volume音量值0~30,data音频数据
public void onVolumeChanged(int volume, byte[] data) {
}
// 结束录音
public void onEndOfSpeech() {
}
// 扩展用接口
public void onEvent(int eventType, int arg1, int arg2, Bundle obj) {
}
};
private void showDialog() {
// 1.创建RecognizerDialog对象
RecognizerDialog mDialog = new RecognizerDialog(this, null);
// 2.设置accent、language等参数
mDialog.setParameter(SpeechConstant.LANGUAGE, "zh_cn");
mDialog.setParameter(SpeechConstant.ACCENT, "mandarin");
// 若要将UI控件用于语义理解, 必须添加以下参数设置, 设置之后onResult回调返回将是语义理解
// 结果
// mDialog.setParameter("asr_sch", "1");
// mDialog.setParameter("nlp_version", "2.0");
// 3.设置回调接口
mDialog.setListener(mRecognizerDialogListener);
// 4.显示dialog,接收语音输入
mDialog.show();
}
}
|
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2018.07.19 at 07:31:05 PM BRT
//
package org.jabref.logic.importer.fileformat.endnote;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"year",
"pubDates",
"copyrightDates"
})
@XmlRootElement(name = "dates")
public class Dates {
protected Year year;
@XmlElement(name = "pub-dates")
protected PubDates pubDates;
@XmlElement(name = "copyright-dates")
protected CopyrightDates copyrightDates;
/**
* Gets the value of the year property.
*
* @return
* possible object is
* {@link Year }
*
*/
public Year getYear() {
return year;
}
/**
* Sets the value of the year property.
*
* @param value
* allowed object is
* {@link Year }
*
*/
public void setYear(Year value) {
this.year = value;
}
/**
* Gets the value of the pubDates property.
*
* @return
* possible object is
* {@link PubDates }
*
*/
public PubDates getPubDates() {
return pubDates;
}
/**
* Sets the value of the pubDates property.
*
* @param value
* allowed object is
* {@link PubDates }
*
*/
public void setPubDates(PubDates value) {
this.pubDates = value;
}
/**
* Gets the value of the copyrightDates property.
*
* @return
* possible object is
* {@link CopyrightDates }
*
*/
public CopyrightDates getCopyrightDates() {
return copyrightDates;
}
/**
* Sets the value of the copyrightDates property.
*
* @param value
* allowed object is
* {@link CopyrightDates }
*
*/
public void setCopyrightDates(CopyrightDates value) {
this.copyrightDates = value;
}
}
|
/*
* Copyright (c) 2019-2021 GeyserMC. http://geysermc.org
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* @author GeyserMC
* @link https://github.com/GeyserMC/Geyser
*/
package org.geysermc.geyser.entity.type.living.animal;
import com.github.steveice10.mc.protocol.data.game.entity.metadata.type.BooleanEntityMetadata;
import com.nukkitx.math.vector.Vector3f;
import com.nukkitx.protocol.bedrock.data.entity.EntityFlag;
import org.geysermc.geyser.entity.type.Entity;
import org.geysermc.geyser.entity.EntityDefinition;
import org.geysermc.geyser.session.GeyserSession;
import org.geysermc.geyser.registry.type.ItemMapping;
import java.util.UUID;
public class StriderEntity extends AnimalEntity {
private boolean isCold = false;
public StriderEntity(GeyserSession session, int entityId, long geyserId, UUID uuid, EntityDefinition<?> definition, Vector3f position, Vector3f motion, float yaw, float pitch, float headYaw) {
super(session, entityId, geyserId, uuid, definition, position, motion, yaw, pitch, headYaw);
setFlag(EntityFlag.FIRE_IMMUNE, true);
setFlag(EntityFlag.BREATHING, true);
}
public void setCold(BooleanEntityMetadata entityMetadata) {
isCold = entityMetadata.getPrimitiveValue();
}
public void setSaddled(BooleanEntityMetadata entityMetadata) {
setFlag(EntityFlag.SADDLED, entityMetadata.getPrimitiveValue());
}
@Override
public void updateBedrockMetadata() {
// Make sure they are not shaking when riding another entity
// Needs to copy the parent state
if (getFlag(EntityFlag.RIDING)) {
boolean parentShaking = false;
if (vehicle instanceof StriderEntity) {
parentShaking = vehicle.getFlag(EntityFlag.SHAKING);
}
setFlag(EntityFlag.BREATHING, !parentShaking);
setFlag(EntityFlag.SHAKING, parentShaking);
} else {
setFlag(EntityFlag.BREATHING, !isCold);
setFlag(EntityFlag.SHAKING, isShaking());
}
// Update the passengers if we have any
for (Entity passenger : passengers) {
if (passenger != null) {
passenger.updateBedrockMetadata();
}
}
super.updateBedrockMetadata();
}
@Override
protected boolean isShaking() {
return isCold || super.isShaking();
}
@Override
public boolean canEat(String javaIdentifierStripped, ItemMapping mapping) {
return javaIdentifierStripped.equals("warped_fungus");
}
}
|
package com.machiav3lli.backup.schedules.db;
import android.content.SharedPreferences;
import androidx.room.Entity;
import androidx.room.PrimaryKey;
import androidx.room.TypeConverter;
import androidx.room.TypeConverters;
import com.machiav3lli.backup.Constants;
import com.machiav3lli.backup.schedules.SchedulingException;
import org.jetbrains.annotations.NotNull;
/**
* Holds scheduling data
*/
@Entity
public class Schedule {
@PrimaryKey(autoGenerate = true)
private long id;
private boolean enabled;
private int hour;
private int interval;
private long placed;
@TypeConverters(ModeConverter.class)
private Mode mode;
@TypeConverters(SubmodeConverter.class)
private Submode submode;
private long timeUntilNextEvent;
private boolean excludeSystem;
public Schedule() {
mode = Mode.ALL;
submode = Submode.BOTH;
}
// TODO: the shared preferences files should be replaced by a single
// database table
/**
* Get scheduling data from a preferences file.
*
* @param preferences preferences object
* @param number number of schedule to fetch
* @return scheduling data object
*/
public static Schedule fromPreferences(SharedPreferences preferences,
long number) throws SchedulingException {
final Schedule schedule = new Schedule();
schedule.id = number;
schedule.enabled = preferences.getBoolean(Constants.PREFS_SCHEDULES_ENABLED + number, false);
schedule.hour = preferences.getInt(Constants.PREFS_SCHEDULES_HOUROFDAY + number, 0);
schedule.interval = preferences.getInt(Constants.PREFS_SCHEDULES_INTERVAL + number, 1);
schedule.placed = preferences.getLong(Constants.PREFS_SCHEDULES_TIMEPLACED + number, 0);
schedule.mode = Mode.intToMode(preferences.getInt(Constants.PREFS_SCHEDULES_MODE + number, 0));
schedule.submode = Submode.intToSubmode(preferences.getInt(Constants.PREFS_SCHEDULES_SUBMODE + number, 0));
schedule.excludeSystem = preferences.getBoolean(Constants.PREFS_SCHEDULES_EXCLUDESYSTEM + number, false);
return schedule;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public int getHour() {
return hour;
}
public void setHour(int hour) {
this.hour = hour;
}
public int getInterval() {
return interval;
}
public void setInterval(int interval) {
this.interval = interval;
}
public long getPlaced() {
return placed;
}
public void setPlaced(long placed) {
this.placed = placed;
}
public Mode getMode() {
return mode;
}
public void setMode(Mode mode) {
this.mode = mode;
}
public void setMode(int mode) throws SchedulingException {
this.mode = Mode.intToMode(mode);
}
public Submode getSubmode() {
return submode;
}
public void setSubmode(int submode) throws SchedulingException {
this.submode = Submode.intToSubmode(submode);
}
public void setSubmode(Submode submode) {
this.submode = submode;
}
public long getTimeUntilNextEvent() {
return timeUntilNextEvent;
}
public void setTimeUntilNextEvent(long timeUntilNextEvent) {
this.timeUntilNextEvent = timeUntilNextEvent;
}
public boolean isExcludeSystem() {
return excludeSystem;
}
public void setExcludeSystem(boolean excludeSystem) {
this.excludeSystem = excludeSystem;
}
/**
* Persist the scheduling data.
*
* @param preferences shared preferences object
*/
public void persist(SharedPreferences preferences) {
final SharedPreferences.Editor edit = preferences.edit();
edit.putBoolean(Constants.PREFS_SCHEDULES_ENABLED + id, enabled);
edit.putInt(Constants.PREFS_SCHEDULES_HOUROFDAY + id, hour);
edit.putInt(Constants.PREFS_SCHEDULES_INTERVAL + id, interval);
edit.putLong(Constants.PREFS_SCHEDULES_TIMEPLACED + id, placed);
edit.putInt(Constants.PREFS_SCHEDULES_MODE + id, mode.value);
edit.putInt(Constants.PREFS_SCHEDULES_SUBMODE + id, submode.value);
edit.putBoolean(Constants.PREFS_SCHEDULES_EXCLUDESYSTEM + id,
excludeSystem);
edit.apply();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Schedule schedule = (Schedule) o;
return id == schedule.id &&
enabled == schedule.enabled &&
hour == schedule.hour &&
interval == schedule.interval &&
placed == schedule.placed &&
excludeSystem == schedule.excludeSystem &&
mode == schedule.mode &&
submode == schedule.submode;
}
@Override
public int hashCode() {
int hash = 7;
hash = 31 * hash + (int) id;
hash = 31 * hash + (enabled ? 1 : 0);
hash = 31 * hash + hour;
hash = 31 * hash + interval;
hash = 31 * hash + (int) placed;
hash = 31 * hash + mode.hashCode();
hash = 31 * hash + submode.hashCode();
hash = 31 * hash + (excludeSystem ? 1 : 0);
return hash;
}
@NotNull
@Override
public String toString() {
return "Schedule{" +
"id=" + id +
", enabled=" + enabled +
", hour=" + hour +
", interval=" + interval +
", placed=" + placed +
", mode=" + mode +
", submode=" + submode +
", excludeSystem=" + excludeSystem +
'}';
}
/**
* Scheduling mode, which packages to include in the scheduled backup
*/
public enum Mode {
ALL(0),
USER(1),
SYSTEM(2),
NEW_UPDATED(3),
CUSTOM(4);
private final int value;
Mode(int value) {
this.value = value;
}
/**
* Convert from int to mode. This method exists to handle the
* transition from storing having mode stored as integers to
* representing it as an enum.
*
* @param mode number written to disk
* @return corresponding mode
*/
public static Mode intToMode(int mode) throws SchedulingException {
switch (mode) {
case 0:
return ALL;
case 1:
return USER;
case 2:
return SYSTEM;
case 3:
return NEW_UPDATED;
case 4:
return CUSTOM;
default:
throw new SchedulingException(String.format(
"Unknown mode %s", mode));
}
}
public int getValue() {
return value;
}
}
/**
* Scheduling submode, whether to include apk, data or both in the backup
*/
public enum Submode {
APK(0),
DATA(1),
BOTH(2);
private final int value;
Submode(int value) {
this.value = value;
}
/**
* Convert from int to submode. This method exists to handle the
* transition from storing having submode stored as integers to
* representing it as an enum.
*
* @param submode number written to disk
* @return corresponding submode
*/
public static Submode intToSubmode(int submode) throws SchedulingException {
switch (submode) {
case 0:
return APK;
case 1:
return DATA;
case 2:
return BOTH;
default:
throw new SchedulingException(String.format(
"Unknown submode %s", submode));
}
}
public int getValue() {
return value;
}
}
public static class Builder {
final Schedule schedule;
public Builder() {
schedule = new Schedule();
}
public Builder withId(int id) {
schedule.id = id;
return this;
}
public Builder withEnabled(boolean enabled) {
schedule.enabled = enabled;
return this;
}
public Builder withHour(int hour) {
schedule.hour = hour;
return this;
}
public Builder withInterval(int interval) {
schedule.interval = interval;
return this;
}
public Builder withPlaced(long placed) {
schedule.placed = placed;
return this;
}
public Builder withMode(Mode mode) {
schedule.mode = mode;
return this;
}
public Builder withMode(int mode) throws SchedulingException {
schedule.mode = Mode.intToMode(mode);
return this;
}
public Builder withSubmode(Submode submode) {
schedule.submode = submode;
return this;
}
public Builder withSubmode(int submode) throws SchedulingException {
schedule.submode = Submode.intToSubmode(submode);
return this;
}
public Builder withExcludeSystem(boolean excludeSystem) {
schedule.excludeSystem = excludeSystem;
return this;
}
public Schedule build() {
return schedule;
}
}
static class ModeConverter {
private ModeConverter() {
}
@TypeConverter
public static String toString(Mode mode) {
return mode.name();
}
@TypeConverter
public static Mode toMode(String name) {
return Mode.valueOf(name);
}
}
static class SubmodeConverter {
private SubmodeConverter() {
}
@TypeConverter
public static String toString(Submode submode) {
return submode.name();
}
@TypeConverter
public static Submode toSubmode(String name) {
return Submode.valueOf(name);
}
}
}
|
/*
* Copyright (c) 2014-2015 Amberfog.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amberfog.countryflagsdemo;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
public class VerifyPhoneFragment extends BaseFlagFragment {
public VerifyPhoneFragment() {
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_flags, container, false);
initUI(rootView);
return rootView;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
initCodes(getActivity());
}
@Override
protected void send() {
hideKeyboard(mPhoneEdit);
mPhoneEdit.setError(null);
String phone = validate();
if (phone == null) {
mPhoneEdit.requestFocus();
mPhoneEdit.setError(getString(R.string.label_error_incorrect_phone));
return;
}
Toast.makeText(getActivity(), "send to " + phone, Toast.LENGTH_SHORT).show();
}
}
|
/*
* Copyright 2002-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.messaging.handler.annotation.support;
import java.lang.reflect.Method;
import java.util.Locale;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.springframework.core.MethodParameter;
import org.springframework.messaging.Message;
import org.springframework.messaging.converter.MessageConversionException;
import org.springframework.messaging.converter.MessageConverter;
import org.springframework.messaging.support.ErrorMessage;
import org.springframework.messaging.support.GenericMessage;
import org.springframework.messaging.support.MessageBuilder;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
/**
* Unit tests for {@link MessageMethodArgumentResolver}.
*
* @author Stephane Nicoll
* @author Juergen Hoeller
*/
public class MessageMethodArgumentResolverTests {
@Rule
public final ExpectedException thrown = ExpectedException.none();
private MessageConverter converter;
private MessageMethodArgumentResolver resolver;
private Method method;
@Before
public void setup() throws Exception {
this.method = MessageMethodArgumentResolverTests.class.getDeclaredMethod("handle",
Message.class, Message.class, Message.class, Message.class, ErrorMessage.class);
this.converter = mock(MessageConverter.class);
this.resolver = new MessageMethodArgumentResolver(this.converter);
}
@Test
public void resolveWithPayloadTypeAsWildcard() throws Exception {
Message<String> message = MessageBuilder.withPayload("test").build();
MethodParameter parameter = new MethodParameter(this.method, 0);
assertTrue(this.resolver.supportsParameter(parameter));
assertSame(message, this.resolver.resolveArgument(parameter, message));
}
@Test
public void resolveWithMatchingPayloadType() throws Exception {
Message<Integer> message = MessageBuilder.withPayload(123).build();
MethodParameter parameter = new MethodParameter(this.method, 1);
assertTrue(this.resolver.supportsParameter(parameter));
assertSame(message, this.resolver.resolveArgument(parameter, message));
}
@Test
public void resolveWithPayloadTypeSubclass() throws Exception {
Message<Integer> message = MessageBuilder.withPayload(123).build();
MethodParameter parameter = new MethodParameter(this.method, 2);
assertTrue(this.resolver.supportsParameter(parameter));
assertSame(message, this.resolver.resolveArgument(parameter, message));
}
@Test
public void resolveWithConversion() throws Exception {
Message<String> message = MessageBuilder.withPayload("test").build();
MethodParameter parameter = new MethodParameter(this.method, 1);
when(this.converter.fromMessage(message, Integer.class)).thenReturn(4);
@SuppressWarnings("unchecked")
Message<Integer> actual = (Message<Integer>) this.resolver.resolveArgument(parameter, message);
assertNotNull(actual);
assertSame(message.getHeaders(), actual.getHeaders());
assertEquals(new Integer(4), actual.getPayload());
}
@Test
public void resolveWithConversionNoMatchingConverter() throws Exception {
Message<String> message = MessageBuilder.withPayload("test").build();
MethodParameter parameter = new MethodParameter(this.method, 1);
assertTrue(this.resolver.supportsParameter(parameter));
thrown.expect(MessageConversionException.class);
thrown.expectMessage(Integer.class.getName());
thrown.expectMessage(String.class.getName());
this.resolver.resolveArgument(parameter, message);
}
@Test
public void resolveWithConversionEmptyPayload() throws Exception {
Message<String> message = MessageBuilder.withPayload("").build();
MethodParameter parameter = new MethodParameter(this.method, 1);
assertTrue(this.resolver.supportsParameter(parameter));
thrown.expect(MessageConversionException.class);
thrown.expectMessage("the payload is empty");
thrown.expectMessage(Integer.class.getName());
thrown.expectMessage(String.class.getName());
this.resolver.resolveArgument(parameter, message);
}
@Test
public void resolveWithPayloadTypeUpperBound() throws Exception {
Message<Integer> message = MessageBuilder.withPayload(123).build();
MethodParameter parameter = new MethodParameter(this.method, 3);
assertTrue(this.resolver.supportsParameter(parameter));
assertSame(message, this.resolver.resolveArgument(parameter, message));
}
@Test
public void resolveWithPayloadTypeOutOfBound() throws Exception {
Message<Locale> message = MessageBuilder.withPayload(Locale.getDefault()).build();
MethodParameter parameter = new MethodParameter(this.method, 3);
assertTrue(this.resolver.supportsParameter(parameter));
thrown.expect(MessageConversionException.class);
thrown.expectMessage(Number.class.getName());
thrown.expectMessage(Locale.class.getName());
this.resolver.resolveArgument(parameter, message);
}
@Test
public void resolveMessageSubclassMatch() throws Exception {
ErrorMessage message = new ErrorMessage(new UnsupportedOperationException());
MethodParameter parameter = new MethodParameter(this.method, 4);
assertTrue(this.resolver.supportsParameter(parameter));
assertSame(message, this.resolver.resolveArgument(parameter, message));
}
@Test
public void resolveWithMessageSubclassAndPayloadWildcard() throws Exception {
ErrorMessage message = new ErrorMessage(new UnsupportedOperationException());
MethodParameter parameter = new MethodParameter(this.method, 0);
assertTrue(this.resolver.supportsParameter(parameter));
assertSame(message, this.resolver.resolveArgument(parameter, message));
}
@Test
public void resolveWithWrongMessageType() throws Exception {
UnsupportedOperationException ex = new UnsupportedOperationException();
Message<? extends Throwable> message = new GenericMessage<Throwable>(ex);
MethodParameter parameter = new MethodParameter(this.method, 4);
assertTrue(this.resolver.supportsParameter(parameter));
thrown.expect(MethodArgumentTypeMismatchException.class);
thrown.expectMessage(ErrorMessage.class.getName());
thrown.expectMessage(GenericMessage.class.getName());
assertSame(message, this.resolver.resolveArgument(parameter, message));
}
@Test
public void resolveWithPayloadTypeAsWildcardAndNoConverter() throws Exception {
this.resolver = new MessageMethodArgumentResolver();
Message<String> message = MessageBuilder.withPayload("test").build();
MethodParameter parameter = new MethodParameter(this.method, 0);
assertTrue(this.resolver.supportsParameter(parameter));
assertSame(message, this.resolver.resolveArgument(parameter, message));
}
@Test
public void resolveWithConversionNeededButNoConverter() throws Exception {
this.resolver = new MessageMethodArgumentResolver();
Message<String> message = MessageBuilder.withPayload("test").build();
MethodParameter parameter = new MethodParameter(this.method, 1);
assertTrue(this.resolver.supportsParameter(parameter));
thrown.expect(MessageConversionException.class);
thrown.expectMessage(Integer.class.getName());
thrown.expectMessage(String.class.getName());
this.resolver.resolveArgument(parameter, message);
}
@Test
public void resolveWithConversionEmptyPayloadButNoConverter() throws Exception {
this.resolver = new MessageMethodArgumentResolver();
Message<String> message = MessageBuilder.withPayload("").build();
MethodParameter parameter = new MethodParameter(this.method, 1);
assertTrue(this.resolver.supportsParameter(parameter));
thrown.expect(MessageConversionException.class);
thrown.expectMessage("the payload is empty");
thrown.expectMessage(Integer.class.getName());
thrown.expectMessage(String.class.getName());
this.resolver.resolveArgument(parameter, message);
}
@SuppressWarnings("unused")
private void handle(
Message<?> wildcardPayload,
Message<Integer> integerPayload,
Message<Number> numberPayload,
Message<? extends Number> anyNumberPayload,
ErrorMessage subClass) {
}
}
|
package org.apache.spark.sql.delta;
/**
* Used to perform a set of reads in a transaction and then commit a set of updates to the
* state of the log. All reads from the {@link DeltaLog}, MUST go through this instance rather
* than directly to the {@link DeltaLog} otherwise they will not be check for logical conflicts
* with concurrent updates.
* <p>
* This class is not thread-safe.
* <p>
* param: deltaLog The Delta Log for the table this transaction is modifying.
* param: snapshot The snapshot that this transaction is reading at.
*/
public class OptimisticTransaction implements org.apache.spark.sql.delta.OptimisticTransactionImpl, org.apache.spark.sql.delta.metering.DeltaLogging {
/** Get the active transaction */
static public scala.Option<org.apache.spark.sql.delta.OptimisticTransaction> getActive () { throw new RuntimeException(); }
/**
* Runs the passed block of code with the given active transaction
* @param activeTransaction (undocumented)
* @param block (undocumented)
* @return (undocumented)
*/
static public <T extends java.lang.Object> T withActive (org.apache.spark.sql.delta.OptimisticTransaction activeTransaction, scala.Function0<T> block) { throw new RuntimeException(); }
/**
* Sets a transaction as the active transaction.
* <p>
* @note This is not meant for being called directly, only from
* <code>OptimisticTransaction.withNewTransaction</code>. Use that to create and set active txns.
* @param txn (undocumented)
*/
static void setActive (org.apache.spark.sql.delta.OptimisticTransaction txn) { throw new RuntimeException(); }
/**
* Clears the active transaction as the active transaction.
* <p>
* @note This is not meant for being called directly, <code>OptimisticTransaction.withNewTransaction</code>.
*/
static void clearActive () { throw new RuntimeException(); }
protected org.apache.spark.sql.SparkSession _spark () { throw new RuntimeException(); }
protected scala.collection.mutable.ArrayBuffer<java.lang.String> readTxn () { throw new RuntimeException(); }
protected scala.collection.mutable.ArrayBuffer<org.apache.spark.sql.catalyst.expressions.Expression> readPredicates () { throw new RuntimeException(); }
protected scala.collection.mutable.HashSet<org.apache.spark.sql.delta.actions.AddFile> readFiles () { throw new RuntimeException(); }
protected boolean committed () { throw new RuntimeException(); }
protected scala.Option<org.apache.spark.sql.delta.actions.Metadata> newMetadata () { throw new RuntimeException(); }
protected scala.Option<org.apache.spark.sql.delta.actions.Protocol> newProtocol () { throw new RuntimeException(); }
protected long txnStartNano () { throw new RuntimeException(); }
protected long commitStartNano () { throw new RuntimeException(); }
protected org.apache.spark.sql.delta.actions.CommitInfo commitInfo () { throw new RuntimeException(); }
protected long commitAttemptStartTime () { throw new RuntimeException(); }
protected scala.collection.mutable.ArrayBuffer<org.apache.spark.sql.delta.hooks.PostCommitHook> postCommitHooks () { throw new RuntimeException(); }
protected boolean hasWritten () { throw new RuntimeException(); }
public org.apache.spark.sql.delta.DeltaLog deltaLog () { throw new RuntimeException(); }
public org.apache.spark.sql.delta.Snapshot snapshot () { throw new RuntimeException(); }
public org.apache.spark.util.Clock clock () { throw new RuntimeException(); }
// not preceding
public OptimisticTransaction (org.apache.spark.sql.delta.DeltaLog deltaLog, org.apache.spark.sql.delta.Snapshot snapshot, org.apache.spark.util.Clock clock) { throw new RuntimeException(); }
/** Creates a new OptimisticTransaction.
* <p>
* @param deltaLog The Delta Log for the table this transaction is modifying.
* @param clock (undocumented)
*/
public OptimisticTransaction (org.apache.spark.sql.delta.DeltaLog deltaLog, org.apache.spark.util.Clock clock) { throw new RuntimeException(); }
}
|
package com.hrios_practice.android_mini_project_final_01;
import android.content.Context;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
assertEquals("com.hrios_practice.android_mini_project_final_01", appContext.getPackageName());
}
}
|
/*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apiman.gateway.engine.es;
import io.apiman.common.es.util.AbstractEsComponent;
import io.apiman.common.es.util.EsConstants;
import io.apiman.common.es.util.builder.index.EsIndexProperties;
import io.apiman.gateway.engine.async.AsyncResultImpl;
import io.apiman.gateway.engine.async.IAsyncResultHandler;
import io.apiman.gateway.engine.components.ISharedStateComponent;
import io.apiman.gateway.engine.es.beans.PrimitiveBean;
import io.apiman.gateway.engine.storage.util.BackingStoreUtil;
import java.util.HashMap;
import java.util.Map;
import javax.xml.namespace.QName;
import org.apache.commons.codec.binary.Base64;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.common.xcontent.XContentType;
import static io.apiman.common.es.util.builder.index.EsIndexUtils.KEYWORD_PROP;
import static io.apiman.common.es.util.builder.index.EsIndexUtils.TEXT_AND_KEYWORD_PROP_256;
import static io.apiman.gateway.engine.storage.util.BackingStoreUtil.JSON_MAPPER;
/**
* An elasticsearch implementation of the shared state component.
*
* @author eric.wittmann@redhat.com
*/
public class EsSharedStateComponent extends AbstractEsComponent implements ISharedStateComponent {
/**
* Constructor.
* @param config the configuration
*/
public EsSharedStateComponent(Map<String, String> config) {
super(config);
}
/**
* @see io.apiman.gateway.engine.components.ISharedStateComponent#getProperty(java.lang.String, java.lang.String, java.lang.Object, io.apiman.gateway.engine.async.IAsyncResultHandler)
*/
@Override
public <T> void getProperty(final String namespace, final String propertyName, final T defaultValue,
final IAsyncResultHandler<T> handler) {
if (defaultValue == null) {
handler.handle(AsyncResultImpl.<T>create(new Exception("Null defaultValue is not allowed."))); //$NON-NLS-1$
return;
}
String id = getPropertyId(namespace, propertyName);
try {
GetResponse response = getClient().get(new GetRequest(getFullIndexName()).id(id), RequestOptions.DEFAULT);
if (response.isExists()) {
try {
T value;
if (defaultValue.getClass().isPrimitive() || defaultValue instanceof String) {
value = (T) readPrimitive(response);
} else {
String sourceAsString = response.getSourceAsString();
value = (T) JSON_MAPPER.readValue(sourceAsString, defaultValue.getClass());
}
handler.handle(AsyncResultImpl.create(value));
} catch (Exception e) {
handler.handle(AsyncResultImpl.<T>create(e));
}
} else {
handler.handle(AsyncResultImpl.create(defaultValue));
}
} catch (Throwable e) {
handler.handle(AsyncResultImpl.<T>create(e));
}
}
/**
* @see io.apiman.gateway.engine.components.ISharedStateComponent#setProperty(java.lang.String, java.lang.String, java.lang.Object, io.apiman.gateway.engine.async.IAsyncResultHandler)
*/
@Override
public <T> void setProperty(final String namespace, final String propertyName, final T value, final IAsyncResultHandler<Void> handler) {
if (value == null) {
handler.handle(AsyncResultImpl.<Void>create(new Exception("Null value is not allowed."))); //$NON-NLS-1$
return;
}
String source;
try {
if (value.getClass().isPrimitive() || value instanceof String) {
PrimitiveBean pb = new PrimitiveBean();
pb.setValue(String.valueOf(value));
pb.setType(value.getClass().getName());
source = JSON_MAPPER.writeValueAsString(pb);
} else {
source = JSON_MAPPER.writeValueAsString(value);
}
} catch (Exception e) {
handler.handle(AsyncResultImpl.<Void>create(e));
return;
}
String id = getPropertyId(namespace, propertyName);
String json = source;
IndexRequest indexRequest = new IndexRequest(getFullIndexName()).source(json, XContentType.JSON).id(id);
try {
getClient().index(indexRequest, RequestOptions.DEFAULT);
handler.handle(AsyncResultImpl.create((Void) null));
} catch (Throwable e) {
handler.handle(AsyncResultImpl.<Void>create(e));
}
}
/**
* @see io.apiman.gateway.engine.components.ISharedStateComponent#clearProperty(java.lang.String, java.lang.String, io.apiman.gateway.engine.async.IAsyncResultHandler)
*/
@Override
public <T> void clearProperty(final String namespace, final String propertyName, final IAsyncResultHandler<Void> handler) {
String id = getPropertyId(namespace, propertyName);
DeleteRequest deleteRequest = new DeleteRequest(getFullIndexName(), id);
try {
getClient().delete(deleteRequest, RequestOptions.DEFAULT);
handler.handle(AsyncResultImpl.create((Void) null));
} catch (Throwable e) {
handler.handle(AsyncResultImpl.<Void>create(e));
}
}
/**
* @param namespace
* @param propertyName
*/
private String getPropertyId(String namespace, String propertyName) {
String qn = new QName(namespace, propertyName).toString();
return Base64.encodeBase64String(qn.getBytes());
}
/**
* Reads a stored primitive.
* @param response
*/
protected Object readPrimitive(GetResponse response) throws Exception {
String sourceAsString = response.getSourceAsString();
PrimitiveBean pb = JSON_MAPPER.readValue(sourceAsString,PrimitiveBean.class);
String value = pb.getValue();
Class<?> c = Class.forName(pb.getType());
return BackingStoreUtil.readPrimitive(c, value);
}
/**
* @see AbstractEsComponent#getDefaultIndexPrefix()
*/
@Override
protected String getDefaultIndexPrefix() {
return EsConstants.GATEWAY_INDEX_NAME;
}
@Override
public Map<String, EsIndexProperties> getEsIndices() {
EsIndexProperties indexDefinition = EsIndexProperties.builder()
.addProperty(EsConstants.ES_FIELD_ORGANIZATION_ID, KEYWORD_PROP)
.addProperty(EsConstants.ES_FIELD_TYPE, KEYWORD_PROP)
.addProperty(EsConstants.ES_FIELD_VALUE, TEXT_AND_KEYWORD_PROP_256)
.addProperty(EsConstants.ES_FIELD_VERSION, KEYWORD_PROP)
.build();
Map<String, EsIndexProperties> indexMap = new HashMap<>();
indexMap.put(EsConstants.INDEX_SHARED_STATE_PROPERTY, indexDefinition);
return indexMap;
}
/**
* get index full name for shared state property
* @return full index name
*/
private String getFullIndexName() {
return (getIndexPrefixWithJoiner() + EsConstants.INDEX_SHARED_STATE_PROPERTY).toLowerCase();
}
}
|
package com.lanking.uxb.zycon.activity.api.impl;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.google.common.collect.Lists;
import com.lanking.cloud.component.db.support.hibernate.Repo;
import com.lanking.cloud.domain.yoo.activity.holiday001.HolidayActivity01;
import com.lanking.cloud.domain.yoo.activity.holiday001.HolidayActivity01Cfg;
import com.lanking.cloud.domain.yoo.activity.holiday001.HolidayActivity01Exercise;
import com.lanking.cloud.domain.yoo.activity.holiday001.HolidayActivity01ExerciseQuestion;
import com.lanking.cloud.domain.yoo.activity.holiday001.HolidayActivity01Grade;
import com.lanking.cloud.domain.yoo.goods.lottery.CoinsLotterySeason;
import com.lanking.cloud.sdk.data.Params;
import com.lanking.uxb.zycon.activity.api.ZycHolidayActivity01Service;
import com.lanking.uxb.zycon.mall.form.LotterySeasonForm;
/**
* 寒假作业活动相关接口实现.
*
* @since 教师端 v1.2.0
*
*/
@Transactional(readOnly = true)
@Service
public class ZycHolidayActivity01ServiceImpl implements ZycHolidayActivity01Service {
@Autowired
@Qualifier("HolidayActivity01Repo")
private Repo<HolidayActivity01, Long> holidayActivity01Repo;
@Autowired
@Qualifier("HolidayActivity01ExerciseRepo")
private Repo<HolidayActivity01Exercise, Long> holidayActivity01ExerciseRepo;
@Autowired
@Qualifier("HolidayActivity01ExerciseQuestionRepo")
private Repo<HolidayActivity01ExerciseQuestion, Long> holidayActivity01ExerciseQuestionRepo;
@Autowired
@Qualifier("CoinsLotterySeasonRepo")
private Repo<CoinsLotterySeason, Long> lotterSeasonrepo;
@Override
public HolidayActivity01 get(long id) {
return holidayActivity01Repo.get(id);
}
@Transactional
@Override
public void init(long seasonId,LotterySeasonForm form) {
HolidayActivity01 h = new HolidayActivity01();
CoinsLotterySeason season = lotterSeasonrepo.get(seasonId);
h.setCode(countHolidayActivity01() + 1);
h.setCreateAt(new Date());
h.setName(season.getName());
h.setStartTime(season.getStartTime());
h.setEndTime(season.getEndTime());
HolidayActivity01Cfg cfg = new HolidayActivity01Cfg();
cfg.setCode(countHolidayActivity01() + 1);
cfg.setLuckyDrawOneHomework(1);
cfg.setMinClassStudents(20);
List<Integer> submitRateThreshold = new ArrayList<Integer>();
submitRateThreshold.add(100);
submitRateThreshold.add(80);
submitRateThreshold.add(50);
cfg.setSubmitRateThreshold(submitRateThreshold);
List<Integer> luckyDrawThreshold = new ArrayList<Integer>();
luckyDrawThreshold.add(10);
luckyDrawThreshold.add(8);
luckyDrawThreshold.add(6);
cfg.setLuckyDrawThreshold(luckyDrawThreshold);
cfg.setSeasonId(seasonId);
// 初中(苏科版15/沪科新版27/人教新版30/23华师大版/19鲁五四新版/31北师新版 柴林森提供)
List<Integer> textbookCategoryCodes2 = Lists.newArrayList();
textbookCategoryCodes2.add(15);
textbookCategoryCodes2.add(27);
textbookCategoryCodes2.add(30);
textbookCategoryCodes2.add(23);
textbookCategoryCodes2.add(19);
textbookCategoryCodes2.add(31);
cfg.setTextbookCategoryCodes2(textbookCategoryCodes2);
// // 高中(苏教版12/人教A版13/人教B版14 柴林森提供 2017.6.23)
// List<Integer> textbookCategoryCodes3 = Lists.newArrayList();
// textbookCategoryCodes3.add(12);
// textbookCategoryCodes3.add(13);
// textbookCategoryCodes3.add(14);
// cfg.setTextbookCategoryCodes3(textbookCategoryCodes3);
List<HolidayActivity01Grade> grades2 = Lists.newArrayList();
grades2.add(HolidayActivity01Grade.PHASE_2_1);
grades2.add(HolidayActivity01Grade.PHASE_2_2);
grades2.add(HolidayActivity01Grade.PHASE_2_3);
grades2.add(HolidayActivity01Grade.PHASE_2_4);
cfg.setGrades2(grades2);
// List<HolidayActivity01Grade> grades3 = Lists.newArrayList();
// grades3.add(HolidayActivity01Grade.PHASE_3_1);
// grades3.add(HolidayActivity01Grade.PHASE_3_2);
// cfg.setGrades3(grades3);
// 时间段,小宋完善
List<List<Long>> periods = new ArrayList<List<Long>>();
List<Long> period1 = Lists.newArrayList();
Date phase1Start = null;
Date phase1End = null;
Date phase2Start = null;
Date phase2End = null;
Date phase3Start = null;
Date phase3End = null;
Date phase4Start = null;
Date phase4End = null;
SimpleDateFormat simFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
try {
phase1Start = form.getPhase1Start() == null ? null : simFormat.parse(form.getPhase1Start());
phase1End = form.getPhase1End() == null ? null : simFormat.parse(form.getPhase1End());
phase2Start = form.getPhase2Start() == null ? null : simFormat.parse(form.getPhase2Start());
phase2End = form.getPhase2End() == null ? null : simFormat.parse(form.getPhase2End());
phase3Start = form.getPhase3Start() == null ? null : simFormat.parse(form.getPhase3Start());
phase3End = form.getPhase3End() == null ? null : simFormat.parse(form.getPhase3End());
phase4Start = form.getPhase4Start() == null ? null : simFormat.parse(form.getPhase4Start());
phase4End = form.getPhase4End() == null ? null : simFormat.parse(form.getPhase4End());
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
period1.add(phase1Start.getTime());
period1.add(phase1End.getTime());
periods.add(period1);
List<Long> period2 = Lists.newArrayList();
period2.add(phase2Start.getTime());
period2.add(phase2End.getTime());
periods.add(period2);
List<Long> period3 = Lists.newArrayList();
period3.add(phase3Start.getTime());
period3.add(phase3End.getTime());
periods.add(period3);
List<Long> period4 = Lists.newArrayList();
period4.add(phase4Start.getTime());
period4.add(phase4End.getTime());
periods.add(period4);
cfg.setPeriods(periods);
h.setCfg(cfg);
holidayActivity01Repo.save(h);
}
@Override
@Transactional
public void deleteAllExerciseAndQuestion(Long activityCode) {
holidayActivity01ExerciseQuestionRepo.execute("$deleteAll", Params.param("activityCode", activityCode));
holidayActivity01ExerciseRepo.execute("$deleteAll", Params.param("activityCode", activityCode));
}
@Override
public Long countHolidayActivity01() {
return holidayActivity01Repo.find("$countHolidayActivity01").count();
}
}
|
package Calculatrice;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.BorderFactory;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
public class Calculatrice extends JFrame {
private JPanel container = new JPanel();
String[] tab_string = {"1", "2", "3", "4", "5", "6", "7", "8", "9", "0",
".", "=", "C", "+", "-", "*", "/"};
JButton[] tab_button = new JButton[tab_string.length];
private JLabel ecran = new JLabel();
private Dimension dim = new Dimension(50, 40);
private Dimension dim2 = new Dimension(50, 31);
private double chiffre1;
private boolean clicOperateur = false, update = false;
private String operateur = "";
public Calculatrice(){
this.setSize(240, 260);
this.setTitle("Calculette");
this.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
this.setLocationRelativeTo(null);
this.setResizable(false);
initComposant();
this.setContentPane(container);
this.setVisible(true);
}
private void initComposant(){
Font police = new Font("Arial", Font.BOLD, 20);
ecran = new JLabel("0");
ecran.setFont(police);
ecran.setHorizontalAlignment(JLabel.RIGHT);
ecran.setPreferredSize(new Dimension(220, 20));
JPanel operateur = new JPanel();
operateur.setPreferredSize(new Dimension(55, 225));
JPanel chiffre = new JPanel();
chiffre.setPreferredSize(new Dimension(165, 225));
JPanel panEcran = new JPanel();
panEcran.setPreferredSize(new Dimension(220, 30));
for(int i = 0; i < tab_string.length; i++){
tab_button[i] = new JButton(tab_string[i]);
tab_button[i].setPreferredSize(dim);
switch(i){
case 11 :
tab_button[i].addActionListener(new EgalListener());
chiffre.add(tab_button[i]);
break;
case 12 :
tab_button[i].setForeground(Color.red);
tab_button[i].addActionListener(new ResetListener());
operateur.add(tab_button[i]);
break;
case 13 :
tab_button[i].addActionListener(new PlusListener());
tab_button[i].setPreferredSize(dim2);
operateur.add(tab_button[i]);
break;
case 14 :
tab_button[i].addActionListener(new MoinsListener());
tab_button[i].setPreferredSize(dim2);
operateur.add(tab_button[i]);
break;
case 15 :
tab_button[i].addActionListener(new MultiListener());
tab_button[i].setPreferredSize(dim2);
operateur.add(tab_button[i]);
break;
case 16 :
tab_button[i].addActionListener(new DivListener());
tab_button[i].setPreferredSize(dim2);
operateur.add(tab_button[i]);
break;
default :
chiffre.add(tab_button[i]);
tab_button[i].addActionListener(new ChiffreListener());
break;
}
}
panEcran.add(ecran);
panEcran.setBorder(BorderFactory.createLineBorder(Color.black));
container.add(panEcran, BorderLayout.NORTH);
container.add(chiffre, BorderLayout.CENTER);
container.add(operateur, BorderLayout.EAST);
}
private void calcul(){
if(operateur.equals("+")){
chiffre1 = chiffre1 +
Double.valueOf(ecran.getText()).doubleValue();
ecran.setText(String.valueOf(chiffre1));
}
if(operateur.equals("-")){
chiffre1 = chiffre1 -
Double.valueOf(ecran.getText()).doubleValue();
ecran.setText(String.valueOf(chiffre1));
}
if(operateur.equals("*")){
chiffre1 = chiffre1 *
Double.valueOf(ecran.getText()).doubleValue();
ecran.setText(String.valueOf(chiffre1));
}
if(operateur.equals("/")){
try{
chiffre1 = chiffre1 /
Double.valueOf(ecran.getText()).doubleValue();
ecran.setText(String.valueOf(chiffre1));
}catch(ArithmeticException e){
ecran.setText("0");
}
}
}
class ChiffreListener implements ActionListener{
public void actionPerformed(ActionEvent e) {
//On affiche le chiffre en plus dans le label
String str = ((JButton)e.getSource()).getText();
if(update){
update = false;
}
else{
if(!ecran.getText().equals("0"))
str = ecran.getText() + str;
}
ecran.setText(str);
}
}
class EgalListener implements ActionListener{
public void actionPerformed(ActionEvent arg0) {
calcul();
update = true;
clicOperateur = false;
}
}
class PlusListener implements ActionListener{
public void actionPerformed(ActionEvent arg0) {
if(clicOperateur){
calcul();
ecran.setText(String.valueOf(chiffre1));
}
else{
chiffre1 = Double.valueOf(ecran.getText()).doubleValue();
clicOperateur = true;
}
operateur = "+";
update = true;
}
}
class MoinsListener implements ActionListener{
public void actionPerformed(ActionEvent arg0) {
if(clicOperateur){
calcul();
ecran.setText(String.valueOf(chiffre1));
}
else{
chiffre1 = Double.valueOf(ecran.getText()).doubleValue();
clicOperateur = true;
}
operateur = "-";
update = true;
}
}
class MultiListener implements ActionListener{
public void actionPerformed(ActionEvent arg0) {
if(clicOperateur){
calcul();
ecran.setText(String.valueOf(chiffre1));
}
else{
chiffre1 = Double.valueOf(ecran.getText()).doubleValue();
clicOperateur = true;
}
operateur = "*";
update = true;
}
}
class DivListener implements ActionListener{
public void actionPerformed(ActionEvent arg0) {
if(clicOperateur){
calcul();
ecran.setText(String.valueOf(chiffre1));
}
else{
chiffre1 = Double.valueOf(ecran.getText()).doubleValue();
clicOperateur = true;
}
operateur = "/";
update = true;
}
}
class ResetListener implements ActionListener{
public void actionPerformed(ActionEvent arg0) {
clicOperateur = false;
update = true;
chiffre1 = 0;
operateur = "";
ecran.setText("");
}
}
}
|
package org.sakaiproject.gradebookng.business.model;
import java.io.Serializable;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.builder.HashCodeBuilder;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
/**
* Describes the type of column imported
*/
@NoArgsConstructor
//@AllArgsConstructor
public class ImportedColumn implements Serializable {
public ImportedColumn(String title, String points, Type type) {
columnTitle = title;
this.points = points;
this.type = type;
}
private static final long serialVersionUID = 1L;
@Getter
@Setter
private String columnTitle;
@Getter
@Setter
private String unparsedTitle;
@Getter
@Setter
private String points;
@Getter
@Setter
private Type type = Type.GB_ITEM_WITHOUT_POINTS;
public enum Type {
GB_ITEM_WITH_POINTS,
GB_ITEM_WITHOUT_POINTS,
COMMENTS,
USER_ID,
USER_NAME,
IGNORE;
}
public String getFriendlyType()
{
String returnVal = "Ignore";
switch(this.type)
{
case GB_ITEM_WITHOUT_POINTS:
case GB_ITEM_WITH_POINTS:
returnVal = "Grades";
break;
case COMMENTS:
returnVal = "Comments";
break;
case USER_ID:
returnVal = "Student ID";
break;
case USER_NAME:
returnVal = "Student Name";
break;
}
return returnVal;
}
/**
* Helper to determine if the type of column can be ignored
* @return
*/
public boolean isIgnorable() {
if(this.type == Type.USER_ID || this.type == Type.USER_NAME || this.type == Type.IGNORE) {
return true;
}
return false;
}
/**
* Column titles are the only thing we care about for comparisons so that we can filter out duplicates.
* Must also match type and exclude IGNORE
*/
@Override
public boolean equals(final Object o) {
final ImportedColumn other = (ImportedColumn) o;
if(StringUtils.isBlank(this.columnTitle) || StringUtils.isBlank(other.columnTitle)){
return false;
}
if(this.type == Type.IGNORE || other.type == Type.IGNORE){
return false;
}
if(StringUtils.equalsIgnoreCase(this.columnTitle, other.getColumnTitle()) && this.type == other.getType()){
return true;
}
return false;
}
@Override
public int hashCode() {
return new HashCodeBuilder()
.append(this.columnTitle)
.append(this.type)
.toHashCode();
}
}
|
package com.aspose.html.examples.document;
import com.aspose.html.examples.Utils;
import java.io.File;
import java.io.FileOutputStream;
public class LoadHtmlDoc {
/**
* The main entry point for the application.
*/
public static void main(String[] args) throws Exception
{
//ExStart:LoadHtmlDoc
// The path to the documents directory.
String dataDir = Utils.getDataDir();
final File file = new File( dataDir + "input.html");
final FileOutputStream sw = new FileOutputStream(file);
try
{
sw.write("<p>this is a simple text".getBytes());
}
finally
{
if (sw != null)
(sw).close();
}
//ExEnd:LoadHtmlDoc
}
}
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.engine;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.activiti.engine.impl.ProcessEngineInfoImpl;
import org.activiti.engine.impl.util.IoUtil;
import org.activiti.engine.impl.util.ReflectUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Helper for initializing and closing process engines in server environments. <br>
* All created {@link ProcessEngine}s will be registered with this class. <br>
* The activiti-webapp-init webapp will call the {@link #init()} method when the webapp is deployed and it will call the {@link #destroy()} method when the webapp is destroyed, using a
* context-listener (<code>org.activiti.impl.servlet.listener.ProcessEnginesServletContextListener</code>). That way, all applications can just use the {@link #getProcessEngines()} to obtain
* pre-initialized and cached process engines. <br>
* <br>
* Please note that there is <b>no lazy initialization</b> of process engines, so make sure the context-listener is configured or {@link ProcessEngine}s are already created so they were registered on
* this class.<br>
* <br>
* The {@link #init()} method will try to build one {@link ProcessEngine} for each flowable.cfg.xml file found on the classpath. If you have more then one, make sure you specify different
* process.engine.name values.
*
* @author Tom Baeyens
* @author Joram Barrez
*/
public abstract class ProcessEngines {
private static Logger log = LoggerFactory.getLogger(ProcessEngines.class);
public static final String NAME_DEFAULT = "default";
protected static boolean isInitialized;
protected static Map<String, ProcessEngine> processEngines = new HashMap<String, ProcessEngine>();
protected static Map<String, ProcessEngineInfo> processEngineInfosByName = new HashMap<String, ProcessEngineInfo>();
protected static Map<String, ProcessEngineInfo> processEngineInfosByResourceUrl = new HashMap<String, ProcessEngineInfo>();
protected static List<ProcessEngineInfo> processEngineInfos = new ArrayList<ProcessEngineInfo>();
/**
* Initializes all process engines that can be found on the classpath for resources <code>flowable.cfg.xml</code> (plain Activiti style configuration) and for resources
* <code>activiti-context.xml</code> (Spring style configuration).
*/
public static synchronized void init() {
if (!isInitialized()) {
if (processEngines == null) {
// Create new map to store process-engines if current map is null
processEngines = new HashMap<String, ProcessEngine>();
}
ClassLoader classLoader = ReflectUtil.getClassLoader();
Enumeration<URL> resources = null;
try {
resources = classLoader.getResources("flowable.cfg.xml");
} catch (IOException e) {
throw new ActivitiIllegalArgumentException("problem retrieving flowable.cfg.xml resources on the classpath: " + System.getProperty("java.class.path"), e);
}
// Remove duplicated configuration URL's using set. Some classloaders may return identical URL's twice, causing duplicate startups
Set<URL> configUrls = new HashSet<URL>();
while (resources.hasMoreElements()) {
configUrls.add(resources.nextElement());
}
for (Iterator<URL> iterator = configUrls.iterator(); iterator.hasNext();) {
URL resource = iterator.next();
log.info("Initializing process engine using configuration '{}'", resource.toString());
initProcessEnginFromResource(resource);
}
try {
resources = classLoader.getResources("activiti-context.xml");
} catch (IOException e) {
throw new ActivitiIllegalArgumentException("problem retrieving activiti-context.xml resources on the classpath: " + System.getProperty("java.class.path"), e);
}
while (resources.hasMoreElements()) {
URL resource = resources.nextElement();
log.info("Initializing process engine using Spring configuration '{}'", resource.toString());
initProcessEngineFromSpringResource(resource);
}
setInitialized(true);
} else {
log.info("Process engines already initialized");
}
}
protected static void initProcessEngineFromSpringResource(URL resource) {
try {
Class<?> springConfigurationHelperClass = ReflectUtil.loadClass("org.activiti.spring.SpringConfigurationHelper");
Method method = springConfigurationHelperClass.getDeclaredMethod("buildProcessEngine", new Class<?>[] { URL.class });
ProcessEngine processEngine = (ProcessEngine) method.invoke(null, new Object[] { resource });
String processEngineName = processEngine.getName();
ProcessEngineInfo processEngineInfo = new ProcessEngineInfoImpl(processEngineName, resource.toString(), null);
processEngineInfosByName.put(processEngineName, processEngineInfo);
processEngineInfosByResourceUrl.put(resource.toString(), processEngineInfo);
} catch (Exception e) {
throw new ActivitiException("couldn't initialize process engine from spring configuration resource " + resource.toString() + ": " + e.getMessage(), e);
}
}
/**
* Registers the given process engine. No {@link ProcessEngineInfo} will be available for this process engine. An engine that is registered will be closed when the {@link ProcessEngines#destroy()}
* is called.
*/
public static void registerProcessEngine(ProcessEngine processEngine) {
processEngines.put(processEngine.getName(), processEngine);
}
/**
* Unregisters the given process engine.
*/
public static void unregister(ProcessEngine processEngine) {
processEngines.remove(processEngine.getName());
}
private static ProcessEngineInfo initProcessEnginFromResource(URL resourceUrl) {
ProcessEngineInfo processEngineInfo = processEngineInfosByResourceUrl.get(resourceUrl.toString());
// if there is an existing process engine info
if (processEngineInfo != null) {
// remove that process engine from the member fields
processEngineInfos.remove(processEngineInfo);
if (processEngineInfo.getException() == null) {
String processEngineName = processEngineInfo.getName();
processEngines.remove(processEngineName);
processEngineInfosByName.remove(processEngineName);
}
processEngineInfosByResourceUrl.remove(processEngineInfo.getResourceUrl());
}
String resourceUrlString = resourceUrl.toString();
try {
log.info("initializing process engine for resource {}", resourceUrl);
ProcessEngine processEngine = buildProcessEngine(resourceUrl);
String processEngineName = processEngine.getName();
log.info("initialised process engine {}", processEngineName);
processEngineInfo = new ProcessEngineInfoImpl(processEngineName, resourceUrlString, null);
processEngines.put(processEngineName, processEngine);
processEngineInfosByName.put(processEngineName, processEngineInfo);
} catch (Throwable e) {
log.error("Exception while initializing process engine: {}", e.getMessage(), e);
processEngineInfo = new ProcessEngineInfoImpl(null, resourceUrlString, getExceptionString(e));
}
processEngineInfosByResourceUrl.put(resourceUrlString, processEngineInfo);
processEngineInfos.add(processEngineInfo);
return processEngineInfo;
}
private static String getExceptionString(Throwable e) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
return sw.toString();
}
private static ProcessEngine buildProcessEngine(URL resource) {
InputStream inputStream = null;
try {
inputStream = resource.openStream();
ProcessEngineConfiguration processEngineConfiguration = ProcessEngineConfiguration.createProcessEngineConfigurationFromInputStream(inputStream);
return processEngineConfiguration.buildProcessEngine();
} catch (IOException e) {
throw new ActivitiIllegalArgumentException("couldn't open resource stream: " + e.getMessage(), e);
} finally {
IoUtil.closeSilently(inputStream);
}
}
/** Get initialization results. */
public static List<ProcessEngineInfo> getProcessEngineInfos() {
return processEngineInfos;
}
/**
* Get initialization results. Only info will we available for process engines which were added in the {@link ProcessEngines#init()}. No {@link ProcessEngineInfo} is available for engines which
* were registered programmatically.
*/
public static ProcessEngineInfo getProcessEngineInfo(String processEngineName) {
return processEngineInfosByName.get(processEngineName);
}
public static ProcessEngine getDefaultProcessEngine() {
return getProcessEngine(NAME_DEFAULT);
}
/**
* obtain a process engine by name.
*
* @param processEngineName
* is the name of the process engine or null for the default process engine.
*/
public static ProcessEngine getProcessEngine(String processEngineName) {
if (!isInitialized()) {
init();
}
return processEngines.get(processEngineName);
}
/**
* retries to initialize a process engine that previously failed.
*/
public static ProcessEngineInfo retry(String resourceUrl) {
log.debug("retying initializing of resource {}", resourceUrl);
try {
return initProcessEnginFromResource(new URL(resourceUrl));
} catch (MalformedURLException e) {
throw new ActivitiIllegalArgumentException("invalid url: " + resourceUrl, e);
}
}
/**
* provides access to process engine to application clients in a managed server environment.
*/
public static Map<String, ProcessEngine> getProcessEngines() {
return processEngines;
}
/** closes all process engines. This method should be called when the server shuts down. */
public static synchronized void destroy() {
if (isInitialized()) {
Map<String, ProcessEngine> engines = new HashMap<String, ProcessEngine>(processEngines);
processEngines = new HashMap<String, ProcessEngine>();
for (String processEngineName : engines.keySet()) {
ProcessEngine processEngine = engines.get(processEngineName);
try {
processEngine.close();
} catch (Exception e) {
log.error("exception while closing {}", (processEngineName == null ? "the default process engine" : "process engine " + processEngineName), e);
}
}
processEngineInfosByName.clear();
processEngineInfosByResourceUrl.clear();
processEngineInfos.clear();
setInitialized(false);
}
}
public static boolean isInitialized() {
return isInitialized;
}
public static void setInitialized(boolean isInitialized) {
ProcessEngines.isInitialized = isInitialized;
}
}
|
/*
* Copyright (c) 2000, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package sun.jvm.hotspot.debugger.win32.coff;
/** Models the information stored in one of the {@link
sun.jvm.hotspot.debugger.win32.coff.OptionalHeaderDataDirectories}. (Some
of the descriptions are taken directly from Microsoft's
documentation and are copyrighted by Microsoft.) */
public interface DataDirectory {
/** The relative virtual address of the table. The RVA is the
address of the table, when loaded, relative to the base address
of the image. */
public int getRVA();
/** The size in bytes of this directory. */
public int getSize();
}
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.synapse.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.synapse.fluent.models.GeoBackupPolicyInner;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
/** The response to a list geo backup policies request. */
@Fluent
public final class GeoBackupPolicyListResult {
@JsonIgnore private final ClientLogger logger = new ClientLogger(GeoBackupPolicyListResult.class);
/*
* The list of geo backup policies.
*/
@JsonProperty(value = "value")
private List<GeoBackupPolicyInner> value;
/**
* Get the value property: The list of geo backup policies.
*
* @return the value value.
*/
public List<GeoBackupPolicyInner> value() {
return this.value;
}
/**
* Set the value property: The list of geo backup policies.
*
* @param value the value value to set.
* @return the GeoBackupPolicyListResult object itself.
*/
public GeoBackupPolicyListResult withValue(List<GeoBackupPolicyInner> value) {
this.value = value;
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
if (value() != null) {
value().forEach(e -> e.validate());
}
}
}
|
/*
* Copyright 2013 ubaldino.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.mitre.opensextant.extraction.TextEntity;
/**
*
* @author ubaldino
*/
public class TestExtraction {
/** */
public void test() {
TextEntity o1 = new TextEntity();
o1.end = 15;
o1.start = 10;
TextEntity o2 = new TextEntity();
o2.end = 17;
o2.start = 11;
System.out.println(o2.isWithin(o1));
System.out.println(o2.isSameMatch(o1));
System.out.println(o2.isOverlap(o1));
}
public static void main(String[] args) {
new TestExtraction().test();
}
}
|
package org.w3c.dom.mathml;
public interface MathMLBvarElement extends MathMLContentContainer {}
/* Location: /mnt/r/ConTenDoViewer.jar!/org/w3c/dom/mathml/MathMLBvarElement.class
* Java compiler version: 6 (50.0)
* JD-Core Version: 1.1.3
*/
|
package com.atguigu.gmall.sms.dao;
import com.atguigu.gmall.sms.entity.CouponSpuRelationEntity;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* 优惠券与产品关联
*
* @author daiyuquan
* @email 1206445365@qq.com
* @date 2019-12-03 12:59:40
*/
@Mapper
public interface CouponSpuRelationDao extends BaseMapper<CouponSpuRelationEntity> {
}
|
/*
* Copyright 2016 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.github.ambry.router;
/**
* Represents a byte range for performing ranged get requests.
*/
public class ByteRange {
private static final long UNDEFINED_OFFSET = -1;
private final ByteRangeType type;
private final long startOffset;
private final long endOffset;
/**
* Construct a range from a start offset to an end offset.
* @param startOffset the (inclusive) start byte offset.
* @param endOffset the (inclusive) end byte offset.
* @return A {@link ByteRange} with the specified offsets.
* @throws IllegalArgumentException
*/
public static ByteRange fromOffsetRange(long startOffset, long endOffset) {
if (startOffset < 0 || endOffset < startOffset) {
throw new IllegalArgumentException(
"Invalid range offsets provided for ByteRange; startOffset=" + startOffset + ", endOffset=" + endOffset);
}
return new ByteRange(startOffset, endOffset, ByteRangeType.OFFSET_RANGE);
}
/**
* Construct a range from a start offset to the end of an object.
* @param startOffset The (inclusive) start byte offset.
* @return A {@link ByteRange} with the specified start offset.
* @throws IllegalArgumentException
*/
public static ByteRange fromStartOffset(long startOffset) {
if (startOffset < 0) {
throw new IllegalArgumentException("Invalid range offsets provided for ByteRange; startOffset=" + startOffset);
}
return new ByteRange(startOffset, UNDEFINED_OFFSET, ByteRangeType.FROM_START_OFFSET);
}
/**
* Construct a range that represents the last N bytes of an object.
* @param lastNBytes the number of bytes to read from the end of an object.
* @return A {@link ByteRange} representing the last N bytes of an objects.
* @throws IllegalArgumentException
*/
public static ByteRange fromLastNBytes(long lastNBytes) {
if (lastNBytes < 0) {
throw new IllegalArgumentException("Invalid range offsets provided for ByteRange; lastNBytes=" + lastNBytes);
}
return new ByteRange(lastNBytes, UNDEFINED_OFFSET, ByteRangeType.LAST_N_BYTES);
}
/**
* Construct a range from byte offsets.
* @param startOffset The (inclusive) start byte offset, or the number of bytes to read from the end of an object,
* in the case of a {@link ByteRangeType#LAST_N_BYTES} range type.
* @param endOffset The (inclusive) end byte offset, or {@link ByteRange#UNDEFINED_OFFSET}.
* @param type The {@link ByteRangeType} for the range.
*/
private ByteRange(long startOffset, long endOffset, ByteRangeType type) {
this.type = type;
this.startOffset = startOffset;
this.endOffset = endOffset;
}
/**
* Get the start offset for this range.
* @return The inclusive start offset for this range.
* @throws UnsupportedOperationException if the range does not have a defined start offset (i.e. not of the type
* {@link ByteRangeType#OFFSET_RANGE} or
* {@link ByteRangeType#FROM_START_OFFSET})
*/
public long getStartOffset() {
switch (getType()) {
case FROM_START_OFFSET:
case OFFSET_RANGE:
return startOffset;
default:
throw new UnsupportedOperationException("Cannot get start offset for range type: " + type);
}
}
/**
* Get the end offset for this range.
* @return The inclusive end offset for this range.
* @throws UnsupportedOperationException if the range does not have a defined start offset
* (i.e. not of the type {@link ByteRangeType#OFFSET_RANGE})
*/
public long getEndOffset() {
switch (getType()) {
case OFFSET_RANGE:
return endOffset;
default:
throw new UnsupportedOperationException("Cannot get end offset for range type: " + type);
}
}
/**
* Get the number of bytes to read from the end of an object.
* @return The number of bytes to read from the end of the object.
* @throws UnsupportedOperationException if the range is not of the type {@link ByteRangeType#LAST_N_BYTES})
*/
public long getLastNBytes() {
switch (getType()) {
case LAST_N_BYTES:
return startOffset;
default:
throw new UnsupportedOperationException("Cannot get last N bytes for range type: " + type);
}
}
/**
* Get the {@link ByteRangeType} for the range.
* @return the {@link ByteRangeType} for the range.
*/
public ByteRangeType getType() {
return type;
}
/**
* @return the size of the range, in bytes.
* @throws UnsupportedOperationException for {@link ByteRangeType#FROM_START_OFFSET} type ranges.
*/
public long getRangeSize() {
switch (getType()) {
case OFFSET_RANGE:
return getEndOffset() - getStartOffset() + 1;
case LAST_N_BYTES:
return getLastNBytes();
default:
throw new UnsupportedOperationException("Cannot determine range size for range type: " + type);
}
}
/**
* Given the total size of a blob, generate a new {@link ByteRange} of type {@link ByteRangeType#OFFSET_RANGE} with
* defined start and end offsets that are verified to be within the supplied total blob size.
* @param totalSize the total size of the blob that this range corresponds to.
* @return the {@link ByteRange} with start and end offsets
* @throws IllegalArgumentException if the byte range exceeds the total size of the blob.
*/
public ByteRange toResolvedByteRange(long totalSize) {
switch (getType()) {
case LAST_N_BYTES:
if (getLastNBytes() <= totalSize) {
return new ByteRange(totalSize - getLastNBytes(), totalSize - 1, ByteRangeType.OFFSET_RANGE);
}
break;
case FROM_START_OFFSET:
if (getStartOffset() < totalSize) {
return new ByteRange(getStartOffset(), totalSize - 1, ByteRangeType.OFFSET_RANGE);
}
break;
case OFFSET_RANGE:
if (getEndOffset() < totalSize) {
return new ByteRange(getStartOffset(), getEndOffset(), ByteRangeType.OFFSET_RANGE);
}
break;
}
throw new IllegalArgumentException("ByteRange " + this + " exceeds the total blob size " + totalSize);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("ByteRange{").append("type=").append(type);
switch (type) {
case LAST_N_BYTES:
sb.append(", lastNBytes=").append(getLastNBytes());
break;
case FROM_START_OFFSET:
sb.append(", startOffset=").append(getStartOffset());
break;
case OFFSET_RANGE:
sb.append(", startOffset=").append(getStartOffset()).append(", endOffset=").append(getEndOffset());
break;
}
return sb.append('}').toString();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ByteRange byteRange = (ByteRange) o;
if (startOffset != byteRange.startOffset) {
return false;
}
if (endOffset != byteRange.endOffset) {
return false;
}
return type == byteRange.type;
}
@Override
public int hashCode() {
int result = type != null ? type.hashCode() : 0;
result = 31 * result + (int) (startOffset ^ (startOffset >>> 32));
result = 31 * result + (int) (endOffset ^ (endOffset >>> 32));
return result;
}
public enum ByteRangeType {
/**
* If this range specifies the number of bytes to read from the end of an object.
*/
LAST_N_BYTES,
/**
* If this range specifies a start offset to read from to the end of an object.
*/
FROM_START_OFFSET,
/**
* If this range specifies a start and end offset to read between.
*/
OFFSET_RANGE
}
}
|
package org.openapitools.model;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.openapitools.model.Category;
import org.openapitools.model.Tag;
import java.io.Serializable;
import javax.validation.constraints.*;
import javax.validation.Valid;
import io.swagger.annotations.*;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
import com.fasterxml.jackson.annotation.JsonTypeName;
@JsonTypeName("Pet")
@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaJAXRSSpecServerCodegen")
public class Pet implements Serializable {
private @Valid Long id;
private @Valid Category category;
private @Valid String name;
private @Valid Set<String> photoUrls = new LinkedHashSet<>();
private @Valid List<Tag> tags = new ArrayList<>();
public enum StatusEnum {
AVAILABLE(String.valueOf("available")), PENDING(String.valueOf("pending")), SOLD(String.valueOf("sold"));
private String value;
StatusEnum (String v) {
value = v;
}
public String value() {
return value;
}
@Override
@JsonValue
public String toString() {
return String.valueOf(value);
}
/**
* Convert a String into String, as specified in the
* <a href="https://download.oracle.com/otndocs/jcp/jaxrs-2_0-fr-eval-spec/index.html">See JAX RS 2.0 Specification, section 3.2, p. 12</a>
*/
public static StatusEnum fromString(String s) {
for (StatusEnum b : StatusEnum.values()) {
// using Objects.toString() to be safe if value type non-object type
// because types like 'int' etc. will be auto-boxed
if (java.util.Objects.toString(b.value).equals(s)) {
return b;
}
}
throw new IllegalArgumentException("Unexpected string value '" + s + "'");
}
@JsonCreator
public static StatusEnum fromValue(String value) {
for (StatusEnum b : StatusEnum.values()) {
if (b.value.equals(value)) {
return b;
}
}
throw new IllegalArgumentException("Unexpected value '" + value + "'");
}
}
private @Valid StatusEnum status;
/**
**/
public Pet id(Long id) {
this.id = id;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("id")
public Long getId() {
return id;
}
@JsonProperty("id")
public void setId(Long id) {
this.id = id;
}
/**
**/
public Pet category(Category category) {
this.category = category;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("category")
public Category getCategory() {
return category;
}
@JsonProperty("category")
public void setCategory(Category category) {
this.category = category;
}
/**
**/
public Pet name(String name) {
this.name = name;
return this;
}
@ApiModelProperty(example = "doggie", required = true, value = "")
@JsonProperty("name")
@NotNull
public String getName() {
return name;
}
@JsonProperty("name")
public void setName(String name) {
this.name = name;
}
/**
**/
public Pet photoUrls(Set<String> photoUrls) {
this.photoUrls = photoUrls;
return this;
}
@ApiModelProperty(required = true, value = "")
@JsonProperty("photoUrls")
@NotNull
public Set<String> getPhotoUrls() {
return photoUrls;
}
@JsonProperty("photoUrls")
@JsonDeserialize(as = LinkedHashSet.class)
public void setPhotoUrls(Set<String> photoUrls) {
this.photoUrls = photoUrls;
}
public Pet addPhotoUrlsItem(String photoUrlsItem) {
if (this.photoUrls == null) {
this.photoUrls = new LinkedHashSet<>();
}
this.photoUrls.add(photoUrlsItem);
return this;
}
public Pet removePhotoUrlsItem(String photoUrlsItem) {
if (photoUrlsItem != null && this.photoUrls != null) {
this.photoUrls.remove(photoUrlsItem);
}
return this;
}
/**
**/
public Pet tags(List<Tag> tags) {
this.tags = tags;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("tags")
public List<Tag> getTags() {
return tags;
}
@JsonProperty("tags")
public void setTags(List<Tag> tags) {
this.tags = tags;
}
public Pet addTagsItem(Tag tagsItem) {
if (this.tags == null) {
this.tags = new ArrayList<>();
}
this.tags.add(tagsItem);
return this;
}
public Pet removeTagsItem(Tag tagsItem) {
if (tagsItem != null && this.tags != null) {
this.tags.remove(tagsItem);
}
return this;
}
/**
* pet status in the store
**/
public Pet status(StatusEnum status) {
this.status = status;
return this;
}
@ApiModelProperty(value = "pet status in the store")
@JsonProperty("status")
public StatusEnum getStatus() {
return status;
}
@JsonProperty("status")
public void setStatus(StatusEnum status) {
this.status = status;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Pet pet = (Pet) o;
return Objects.equals(this.id, pet.id) &&
Objects.equals(this.category, pet.category) &&
Objects.equals(this.name, pet.name) &&
Objects.equals(this.photoUrls, pet.photoUrls) &&
Objects.equals(this.tags, pet.tags) &&
Objects.equals(this.status, pet.status);
}
@Override
public int hashCode() {
return Objects.hash(id, category, name, photoUrls, tags, status);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class Pet {\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" category: ").append(toIndentedString(category)).append("\n");
sb.append(" name: ").append(toIndentedString(name)).append("\n");
sb.append(" photoUrls: ").append(toIndentedString(photoUrls)).append("\n");
sb.append(" tags: ").append(toIndentedString(tags)).append("\n");
sb.append(" status: ").append(toIndentedString(status)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
package org.apache.cayenne.testdo.relationships_delete_rules.auto;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.List;
import org.apache.cayenne.BaseDataObject;
import org.apache.cayenne.exp.ExpressionFactory;
import org.apache.cayenne.exp.property.ListProperty;
import org.apache.cayenne.exp.property.NumericProperty;
import org.apache.cayenne.exp.property.PropertyFactory;
import org.apache.cayenne.testdo.relationships_delete_rules.DeleteRuleTest1;
import org.apache.cayenne.testdo.relationships_delete_rules.DeleteRuleTest3;
/**
* Class _DeleteRuleTest2 was generated by Cayenne.
* It is probably a good idea to avoid changing this class manually,
* since it may be overwritten next time code is regenerated.
* If you need to make any customizations, please use subclass.
*/
public abstract class _DeleteRuleTest2 extends BaseDataObject {
private static final long serialVersionUID = 1L;
public static final NumericProperty<Integer> DEL_RULE_TEST2_ID_PK_PROPERTY = PropertyFactory.createNumeric(ExpressionFactory.dbPathExp("DEL_RULE_TEST2_ID"), Integer.class);
public static final String DEL_RULE_TEST2_ID_PK_COLUMN = "DEL_RULE_TEST2_ID";
public static final ListProperty<DeleteRuleTest3> DELETE_RULE_TEST3ARRAY = PropertyFactory.createList("deleteRuleTest3Array", DeleteRuleTest3.class);
public static final ListProperty<DeleteRuleTest1> TEST1ARRAY = PropertyFactory.createList("test1Array", DeleteRuleTest1.class);
protected Object deleteRuleTest3Array;
protected Object test1Array;
public void addToDeleteRuleTest3Array(DeleteRuleTest3 obj) {
addToManyTarget("deleteRuleTest3Array", obj, true);
}
public void removeFromDeleteRuleTest3Array(DeleteRuleTest3 obj) {
removeToManyTarget("deleteRuleTest3Array", obj, true);
}
@SuppressWarnings("unchecked")
public List<DeleteRuleTest3> getDeleteRuleTest3Array() {
return (List<DeleteRuleTest3>)readProperty("deleteRuleTest3Array");
}
public void addToTest1Array(DeleteRuleTest1 obj) {
addToManyTarget("test1Array", obj, true);
}
public void removeFromTest1Array(DeleteRuleTest1 obj) {
removeToManyTarget("test1Array", obj, true);
}
@SuppressWarnings("unchecked")
public List<DeleteRuleTest1> getTest1Array() {
return (List<DeleteRuleTest1>)readProperty("test1Array");
}
@Override
public Object readPropertyDirectly(String propName) {
if(propName == null) {
throw new IllegalArgumentException();
}
switch(propName) {
case "deleteRuleTest3Array":
return this.deleteRuleTest3Array;
case "test1Array":
return this.test1Array;
default:
return super.readPropertyDirectly(propName);
}
}
@Override
public void writePropertyDirectly(String propName, Object val) {
if(propName == null) {
throw new IllegalArgumentException();
}
switch (propName) {
case "deleteRuleTest3Array":
this.deleteRuleTest3Array = val;
break;
case "test1Array":
this.test1Array = val;
break;
default:
super.writePropertyDirectly(propName, val);
}
}
private void writeObject(ObjectOutputStream out) throws IOException {
writeSerialized(out);
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
readSerialized(in);
}
@Override
protected void writeState(ObjectOutputStream out) throws IOException {
super.writeState(out);
out.writeObject(this.deleteRuleTest3Array);
out.writeObject(this.test1Array);
}
@Override
protected void readState(ObjectInputStream in) throws IOException, ClassNotFoundException {
super.readState(in);
this.deleteRuleTest3Array = in.readObject();
this.test1Array = in.readObject();
}
}
|
package mykalah.service;
import mykalah.data.Player;
import mykalah.data.PlayerRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Isolation;
import org.springframework.transaction.annotation.Transactional;
@Service
@Transactional(isolation = Isolation.SERIALIZABLE)
public class PlayerServiceImpl implements PlayerService {
public PlayerServiceImpl() {
}
@Autowired
private PlayerRepository playerRepository;
public Player findPlayerByName(String username) {
return playerRepository.findPlayerByName(username);
}
public Player save(Player player) {
return playerRepository.save(player);
}
}
|
package com.hitchhikerprod.deathlord;
import com.google.common.base.CaseFormat;
import java.util.EnumSet;
import java.util.Formatter;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class Equipment {
private Optional<Item> handWeapon;
private Optional<Item> missileWeapon;
private Optional<Item> bodyArmor;
private Optional<Item> shield;
private Optional<Item> miscArmor;
private Optional<Item> miscMagic;
private Optional<Item> tool;
private Optional<Item> scroll;
private short[] bytes;
public Equipment(short[] data) {
bytes = data;
handWeapon = Item.fromDisk(data[IDX_HAND_WEAPON], data[IDX_HAND_WEAPON + 0x8]);
missileWeapon = Item.fromDisk(data[IDX_MISSILE_WEAPON], data[IDX_MISSILE_WEAPON + 0x8]);
bodyArmor = Item.fromDisk(data[IDX_BODY_ARMOR], data[IDX_BODY_ARMOR + 0x8]);
shield = Item.fromDisk(data[IDX_SHIELD], data[IDX_SHIELD + 0x8]);
miscArmor = Item.fromDisk(data[IDX_MISC_ARMOR], data[IDX_MISC_ARMOR + 0x8]);
miscMagic = Item.fromDisk(data[IDX_MISC_MAGIC], data[IDX_MISC_MAGIC + 0x8]);
tool = Item.fromDisk(data[IDX_TOOL], data[IDX_TOOL + 0x8]);
scroll = Item.fromDisk(data[IDX_SCROLL], data[IDX_SCROLL + 0x8]);
}
@Override
public String toString() {
StringBuilder buffer = new StringBuilder();
Formatter fmt = new Formatter(buffer);
String equipment = " Equip: " +
Stream.of(handWeapon, missileWeapon, bodyArmor, shield, miscArmor, tool, miscMagic, scroll)
.filter(Optional::isPresent)
.map(o -> o.get().toString())
.collect(Collectors.joining(", "));
buffer.append(equipment);
/*
handWeapon.ifPresent (i -> buffer.append(" Hand:").append(i.toString()));
missileWeapon.ifPresent (i -> buffer.append(" Missile:").append(i.toString()));
bodyArmor.ifPresent (i -> buffer.append(" Armor:").append(i.toString()));
shield.ifPresent (i -> buffer.append(" Shield:").append(i.toString()));
miscArmor.ifPresent (i -> buffer.append(" Helm:").append(i.toString()));
tool.ifPresent (i -> buffer.append(" Tool:").append(i.toString()));
miscMagic.ifPresent (i -> buffer.append(" Misc:").append(i.toString()));
scroll.ifPresent (i -> buffer.append(" Scroll:").append(i.toString()));
*/
if (bytes[0x0f] == 0) buffer.append(", Boat");
for (int i = 0x10; i < 0x20; i++) {
if (bytes[i] != 0xff) fmt.format(" $%02x:%2x", i, bytes[i]);
}
return buffer.toString();
}
enum Slot {
HAND_WEAPON(0x0), MISSILE_WEAPON(0x1), BODY_ARMOR(0x2), SHIELD(0x3), MISC_ARMOR(0x4), MISC_MAGIC(0x5),
TOOL(0x6), SCROLL(0x7);
private int diskValue;
Slot(int diskValue) { this.diskValue = diskValue; }
private static Optional<Slot> from(int val) {
return EnumSet.allOf(Slot.class).stream()
.filter(a -> a.diskValue == val)
.findFirst();
}
private int toInt() { return diskValue; }
}
enum CreatureType {
DRAGON(0x0), GIANT(0x01), DEMON(0x02), UNDEAD(0x03), DEATHLORD(0x04), UNIQUE(0x05);
private int diskValue;
CreatureType(int diskValue) { this.diskValue = diskValue; }
private static Optional<CreatureType> from(int val) {
return EnumSet.allOf(CreatureType.class).stream()
.filter(a -> a.diskValue == val)
.findFirst();
}
private int toInt() { return diskValue; }
}
enum Effect {
INOCHI(0x00), ALNASU(0x01), ZUMA(0x02), KOROSU(0x03), MOINOCHI(0x04), TSUIHO(0x05), KAKUSU(0x06),
HOHYO(0x07), MOAKARI(0x08), DONASU(0x09), HITATE(0x0a), SANTATE(0x0b), PASSWALL(0x0c), LIGHT(0x0d), E(0x0e);
private int diskValue;
Effect(int diskValue) { this.diskValue = diskValue; }
private static Optional<Effect> from(int val) {
return EnumSet.allOf(Effect.class).stream()
.filter(a -> a.diskValue == val)
.findFirst();
}
private int toInt() { return diskValue; }
}
enum Item {
TANTO (0x00, Slot.HAND_WEAPON, 0x01, 0, 2, 3,-2,-1),
CLOAK (0x01, Slot.BODY_ARMOR, 0x02, 0, 0, 0, 0, 1),
JO_STICK (0x02, Slot.HAND_WEAPON, 0x03, 1, 1, 7,-1, 0),
HARA_ATE (0x03, Slot.BODY_ARMOR, 0x06, 1, 0, 0, 0, 2),
SAI (0x04, Slot.HAND_WEAPON, 0x05, 2, 2, 4, 0, 0),
GLOVES (0x05, Slot.MISC_ARMOR, 0x01, 0, 0, 0, 0, 1),
SMALL_SHIELD (0x06, Slot.SHIELD, 0x03, 1, 0, 0, 0, 1),
HARAME_DO (0x07, Slot.BODY_ARMOR, 0x0b, 1, 0, 0, 0, 3),
SLING (0x08, Slot.MISSILE_WEAPON, 0x03, 1, 1, 6,-2, 1),
GAUNTLETS (0x09, Slot.MISC_ARMOR, 0x07, 2, 0, 0, 0, 2),
BO_STAFF (0x0a, Slot.HAND_WEAPON, 0x02, 0, 1, 7, 0,-1),
MEDIUM_SHIELD (0x0b, Slot.SHIELD, 0x08, 2, 0, 0, 0, 2),
HARAMAKIDO (0x0c, Slot.BODY_ARMOR, 0x15, 2, 0, 0, 0, 4),
LIGHT_BOW (0x0d, Slot.MISSILE_WEAPON, 0x0b, 2, 2, 4,-1, 1),
JINGASA (0x0e, Slot.MISC_ARMOR, 0x0a, 2, 0, 0, 0, 1),
LOCK_PICK (0x0f, Slot.TOOL, 0x08,-1, 0, 0, 0, 0),
MASAKARI (0x10, Slot.HAND_WEAPON, 0x08, 3, 1, 8, 0, 0),
GREAT_SHIELD (0x11, Slot.SHIELD, 0x0c, 3, 0, 0,-1, 3),
CROSSBOW (0x12, Slot.MISSILE_WEAPON, 0x0f, 2, 1,10,+1, 1),
GLAIVE (0x13, Slot.HAND_WEAPON, 0x0c, 3, 1, 9, 0, 0),
KABUTO (0x14, Slot.MISC_ARMOR, 0x12, 3, 0, 0, 0, 2),
HEAVY_BOW (0x15, Slot.MISSILE_WEAPON, 0x15, 3, 2, 6, 0, 1),
HOLY_SYMBOL (0x16, Slot.TOOL, 0x0a,-1, 0, 0, 0, 0, null, null, Character.CharacterClass.SHISAI, 12, Effect.TSUIHO),
DO_MARU (0x17, Slot.BODY_ARMOR, 0x29, 3, 0, 0,-1, 5),
SHURIKEN (0x18, Slot.MISSILE_WEAPON, 0x07, 2, 3, 3, 0, 1, null, null, Character.CharacterClass.NINJA, null, null),
NAGINATA (0x19, Slot.HAND_WEAPON, 0x19, 3, 1,10,+1, 1),
GREAT_BOW (0x1a, Slot.MISSILE_WEAPON, 0x1e, 3, 2, 8,+1, 1),
YOROI (0x1b, Slot.BODY_ARMOR, 0x3d, 3, 0, 0,-2, 6),
NUNCHAKU (0x1c, Slot.HAND_WEAPON, 0x0a, 2, 2, 9,+3, 0, null, null, Character.CharacterClass.NINJA, null, null),
WAKIZASHI (0x1d, Slot.HAND_WEAPON, 0x28, 2, 2,10,+1, 0, null, null, Character.CharacterClass.SAMURAI, null, null),
KATANA (0x1e, Slot.HAND_WEAPON, 0x3c, 3, 2,14,+1, 1, null, null, Character.CharacterClass.SAMURAI, null, null),
SCROLL_HEAL (0x1f, Slot.SCROLL, 0x00,-1, 0, 0, 0, 0, null, null, Character.CharacterClass.SHISAI, 1, Effect.ALNASU),
TOSHI_CLOAK (0x20, Slot.BODY_ARMOR, 0x00, 0, 0, 0, 0, 2, null, null, null, 16, Effect.KAKUSU),
HARA_ATE_PLUS_ONE (0x21, Slot.BODY_ARMOR, 0x00, 1, 0, 0, 0, 3),
HARAME_DO_PLUS_ONE (0x22, Slot.BODY_ARMOR, 0x00, 1, 0, 0, 0, 4),
BRONZE_SHIELD (0x23, Slot.SHIELD, 0x00, 1, 0, 0, 0, 3),
GOLD_JINGASA (0x24, Slot.MISC_ARMOR, 0x00, 1, 0, 0, 0, 2),
SCROLL_WARD (0x25, Slot.SCROLL, 0x00,-1, 0, 0, 0, 0, null, null, Character.CharacterClass.MAHOTSUKAI, 1, Effect.HOHYO),
ROD_OF_LIGHT (0x26, Slot.HAND_WEAPON, 0x00, 0, 1, 8,+1, 0, null, null, null, 0x18, Effect.MOAKARI),
DRAGONSLAYER (0x27, Slot.HAND_WEAPON, 0x00, 2, 1,12,+1, 1, CreatureType.DRAGON, null, null, null, null),
GIANTSLAYER (0x28, Slot.HAND_WEAPON, 0x00, 2, 1,12,+1, 1, CreatureType.GIANT, null, null, null, null),
DEFENDER (0x29, Slot.HAND_WEAPON, 0x00, 2, 1,10,+1, 4, null, null, null, 10, Effect.HOHYO),
SCROLL_CURE (0x2a, Slot.SCROLL, 0x00,-1, 0, 0, 0, 0, null, null, Character.CharacterClass.SHIZEN, 1, Effect.DONASU),
HARAMAKIDO_PLUS_ONE (0x2b, Slot.BODY_ARMOR, 0x00, 2, 0, 0, 0, 5),
BERZERK_SWORD (0x2c, Slot.HAND_WEAPON, 0x00, 3, 4, 8,+1, 0, null, null, Character.CharacterClass.KICHIGAI, null, null),
TOSHI_BOW (0x2d, Slot.MISSILE_WEAPON, 0x00, 2, 3, 8,+3, 2, null, Character.Race.TOSHI, null, null, null),
SABLE_CLOAK (0x2e, Slot.BODY_ARMOR, 0x00, 0, 0, 0, 0, 3),
KOBITO_HAMMER (0x2f, Slot.MISSILE_WEAPON, 0x00, 1, 1,16,+1, 2, null, Character.Race.KOBITO, null, null, null),
SILVER_SHIELD (0x30, Slot.SHIELD, 0x00, 2, 0, 0, 0, 4),
SWORD_OF_FIRE (0x31, Slot.HAND_WEAPON, 0x00, 2, 1,12,+1, 1, CreatureType.UNDEAD, null, null, 10, Effect.HITATE),
ROD_OF_DEATH (0x32, Slot.HAND_WEAPON, 0x00, 1, 2,10,+2, 1),
THUNDERBLADE (0x33, Slot.HAND_WEAPON, 0x00, 3, 1,14,+2, 1, null, null, null, 8, Effect.ZUMA),
DO_MARU_PLUS_ONE (0x34, Slot.BODY_ARMOR, 0x00, 2, 0, 0, 0, 6),
YOROI_PLUS_ONE (0x35, Slot.BODY_ARMOR, 0x00, 3, 0, 0,-1, 7),
SCROLL_RAISE (0x36, Slot.SCROLL, 0x00,-1, 0, 0, 0, 0, null, null, Character.CharacterClass.SHISAI, 1, Effect.INOCHI),
UNHOLY_BLADE (0x37, Slot.HAND_WEAPON, 0x00, 3, 1,16,+3, 2, null, null, Character.CharacterClass.RONIN, 6, Effect.KOROSU),
HOLY_BLADE (0x38, Slot.HAND_WEAPON, 0x00, 3, 1,15,+3, 2, CreatureType.DEMON, null, Character.CharacterClass.KISHI, 6, Effect.ALNASU),
GOLDEN_CROWN (0x39, Slot.MISC_ARMOR, 0x00, 2, 0, 0, 0, 3),
DO_MARU_PLUS_TWO (0x3a, Slot.BODY_ARMOR, 0x00, 2, 0, 0, 0, 7),
POWERSTAFF (0x3b, Slot.HAND_WEAPON, 0x00, 0, 1,12,+2, 4, null, null, Character.CharacterClass.MAHOTSUKAI, 0x14, Effect.ZUMA),
RUNEBLADE (0x3c, Slot.HAND_WEAPON, 0x00, 3, 1,15,+2, 2, null, null, null, 6, Effect.KOROSU),
GOLDEN_GLOVES (0x3d, Slot.MISC_ARMOR, 0x00, 3, 0, 0,+2, 3, null, null, null, null, null),
FALCON_HELM (0x3e, Slot.MISC_ARMOR, 0x00, 3, 0, 0, 0, 4),
RING_OF_LIFE (0x3f, Slot.MISC_MAGIC, 0x00,-1, 0, 0, 0, 1, null, null, Character.CharacterClass.SHISAI, 6, Effect.MOINOCHI),
GOLDEN_YOROI (0x40, Slot.BODY_ARMOR, 0x00, 3, 0, 0, 0,10),
GOLDEN_SHIELD (0x41, Slot.SHIELD, 0x00, 2, 0, 0, 0, 5),
SKULL_SHIELD (0x42, Slot.SHIELD, 0x00, 3, 0, 0, 0, 6, null, null, Character.CharacterClass.RONIN, null, null),
LANTERN (0x43, Slot.TOOL, 0x00,-1, 0, 0, 0, 0, null, null, null, 0, Effect.LIGHT),
SHARKTOOTH (0x44, Slot.MISC_MAGIC, 0x00,-1, 0, 0, 0, 0, null, null, null, 0, null),
BLUE_CRYSTAL (0x45, Slot.MISC_MAGIC, 0x00,-1, 0, 0, 0, 0, null, null, null, 0, Effect.PASSWALL),
SUNSPEAR (0x46, Slot.HAND_WEAPON, 0x00, 0, 1,24,+3, 3, CreatureType.DEATHLORD, null, null, 0, null),
DOCUMENT (0x47, Slot.SCROLL, 0x00,-1, 0, 0, 0, 0, null, null, null, 0, Effect.E),
RUBY_RING (0x48, Slot.MISC_MAGIC, 0x00,-1, 0, 0, 0, 2, null, null, null, 0, Effect.HITATE),
EMERALD_ROD (0x49, Slot.HAND_WEAPON, 0x00, 1, 1,16,+4, 6, CreatureType.DEMON, null, null, 0, Effect.SANTATE),
BLACK_ORB (0x4a, Slot.MISC_MAGIC, 0x00,-1, 0, 0, 0, 0, null, null, null, 0, null);
public int diskValue;
public Slot slot;
public int unknownAttribute;
public int weightClass;
public int numAttacks;
public int maxDamage;
public int attackBonus;
public int armorClassBonus;
public Optional<CreatureType> slays;
public Optional<Character.Race> raceRestriction;
public Optional<Character.CharacterClass> classRestriction;
public Optional<Integer> maxCharges;
public int charges;
public Optional<Effect> specialEffect;
Item(int diskValue, Slot slot, int unknownAttribute, int weightClass, int numAttacks, int maxDamage,
int attackBonus, int armorClassBonus) {
this.diskValue = diskValue;
this.slot = slot;
this.unknownAttribute = unknownAttribute;
this.weightClass = weightClass;
this.numAttacks = numAttacks;
this.maxDamage = maxDamage;
this.attackBonus = attackBonus;
this.armorClassBonus = armorClassBonus;
this.slays = Optional.empty();
this.raceRestriction = Optional.empty();
this.classRestriction = Optional.empty();
this.maxCharges = Optional.empty();
this.specialEffect = Optional.empty();
}
Item(int diskValue, Slot slot, int unknownAttribute, int weightClass, int numAttacks, int maxDamage,
int attackBonus, int armorClassBonus, CreatureType slays, Character.Race raceRestriction,
Character.CharacterClass classRestriction, Integer maxCharges, Effect specialEffect)
{
this.diskValue = diskValue;
this.slot = slot;
this.unknownAttribute = unknownAttribute;
this.weightClass = weightClass;
this.numAttacks = numAttacks;
this.maxDamage = maxDamage;
this.attackBonus = attackBonus;
this.armorClassBonus = armorClassBonus;
this.slays = Optional.ofNullable(slays);
this.raceRestriction = Optional.ofNullable(raceRestriction);
this.classRestriction = Optional.ofNullable(classRestriction);
this.maxCharges = Optional.ofNullable(maxCharges);
this.specialEffect = Optional.ofNullable(specialEffect);
}
public static Optional<Item> fromDisk(short diskValue, short charges) {
Optional<Item> item = EnumSet.allOf(Item.class).stream()
.filter(i -> i.diskValue == (int) diskValue)
.findFirst();
item.ifPresent(i -> {
if (i.maxCharges.isPresent()) i.charges = charges;
});
return item;
}
public String unenumify() {
return CaseFormat.UPPER_UNDERSCORE.to(CaseFormat.UPPER_CAMEL, name());
}
public String toString() {
String camelCase;
if (this.maxCharges.isPresent())
camelCase = this.unenumify() + "(" + this.charges + ")";
else
camelCase = this.unenumify();
camelCase = camelCase.replace("PlusOne", "+1");
camelCase = camelCase.replace("PlusTwo", "+2");
return camelCase;
}
}
// base index = 0x20, 0x40, 0x60, 0x80, 0xa0, 0xc0
private static final int IDX_HAND_WEAPON = 0x00;
private static final int IDX_MISSILE_WEAPON = 0x01;
private static final int IDX_BODY_ARMOR = 0x02;
private static final int IDX_SHIELD = 0x03;
private static final int IDX_MISC_ARMOR = 0x04;
private static final int IDX_MISC_MAGIC = 0x05;
private static final int IDX_TOOL = 0x06;
private static final int IDX_SCROLL = 0x07;
}
|
package com.transferwise.kafka.tkms.demoapp;
import com.transferwise.common.baseutils.ExceptionUtils;
import com.transferwise.kafka.tkms.api.ITransactionalKafkaMessageSender;
import com.transferwise.kafka.tkms.api.TkmsMessage;
import java.nio.charset.StandardCharsets;
import java.time.Instant;
import java.util.concurrent.ThreadLocalRandom;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.transaction.support.TransactionTemplate;
@Component
@Slf4j
public class MessagesProducer {
@Autowired
private ITransactionalKafkaMessageSender tkms;
@Autowired
private TransactionTemplate transactionTemplate;
@Autowired
private PaceTracker paceTracker;
@SneakyThrows
public void produce(long threadCount, long batchCount, long batchSize) {
paceTracker.startTracking();
StringBuilder sb = new StringBuilder("Hello World!");
int additionalMessageBytes = 1000;
for (int i = 0; i < additionalMessageBytes; i++) {
sb.append((char) ('a' + ThreadLocalRandom.current().nextInt(26)));
}
String textMessage = sb.toString();
Thread[] threads = new Thread[(int) threadCount];
for (long t = 0; t < threadCount; t++) {
long finalT = t;
threads[(int) t] = new Thread(() -> {
try {
for (long i = 0; i < batchCount; i++) {
long finalI = i;
transactionTemplate.executeWithoutResult(status -> {
for (long j = 0; j < batchSize; j++) {
String key = String.valueOf(finalT * batchCount * batchSize + finalI * batchSize + j);
TkmsMessage message = new TkmsMessage()
.setTopic("MyTopic")
.setTimestamp(Instant.now())
.setKey(key).setValue(textMessage.getBytes(StandardCharsets.UTF_8));
tkms.sendMessage(message);
paceTracker.messagesInserted(1);
ExceptionUtils.doUnchecked(() -> {
// Can be used to check if keeping transactions open is creating any long locks somewhere.
// Thread.sleep(10000);
});
}
});
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}
});
}
for (int t = 0; t < threadCount; t++) {
threads[t].start();
}
for (int t = 0; t < threadCount; t++) {
threads[t].join();
}
}
}
|
package com.backstage.common.annotation;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* 数据权限过滤注解
*
* @author jack.lin
*/
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface DataScope
{
/**
* 部门表的别名
*/
public String deptAlias() default "";
/**
* 用户表的别名
*/
public String userAlias() default "";
}
|
package media;
public class Main {
public static void main(String[] args) {
Catalog catalog = new Catalog();
Movie movie = new Movie("Titanic", "C:\\Users\\Gabi\\Desktop\\item\\movie.mp4");
try {
movie.setYear(1997);
} catch (Exception e1) {
System.out.println("Exception occurred:" + e1);
}
try {
movie.setRating(9.8);
} catch (Exception e2) {
System.out.println("Exception occurred:" + e2);
}
Book book = new Book("Povesti", "C:\\Users\\Gabi\\Desktop\\item\\book.pdf");
try {
book.setYear(2020);
} catch (Exception e1) {
System.out.println("Exception occurred:" + e1);
}
book.setAuthor("Ion Creanga");
catalog.add(movie);
catalog.add(book);
catalog.list();
catalog.play(1);
catalog.save();
catalog.load();
System.out.println();
catalog.list();
}
}
|
/*
* Copyright (c) 2015-2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.carbon.identity.provisioning.connector;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.identity.provisioning.IdentityProvisioningException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
public class InweboProvisioningConnectorConfig {
private static final Log log = LogFactory.getLog(InweboProvisioningConnectorConfig.class);
private Properties configs;
public InweboProvisioningConnectorConfig(Properties configs) {
this.configs = configs;
}
public List<String> getRequiredAttributeNames() {
List<String> requiredAttributeList = new ArrayList<String>();
return requiredAttributeList;
}
public String getUserIdClaim() throws IdentityProvisioningException {
return null;
}
public String getValue(String key) {
return this.configs.getProperty(key);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.examples.java.relational.util;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import java.util.ArrayList;
import java.util.List;
/**
* Provides the default data sets used for the Weblog Analysis example program.
* The default data sets are used, if no parameters are given to the program.
*/
public class WebLogData {
public static final Object[][] DOCUMENTS = {
new Object[]{"url_0", "dolor ad amet enim laoreet nostrud veniam aliquip ex nonummy diam dolore tincidunt tation exerci exerci wisi dolor nostrud "},
new Object[]{"url_1", "wisi minim adipiscing nibh adipiscing ut nibh Lorem Ut nonummy euismod nibh wisi sit consectetuer exerci sed aliquip aliquip dolore aliquam enim dolore veniam aliquam euismod suscipit ad adipiscing exerci aliquip consectetuer euismod aliquip ad exerci ex nibh ex erat exerci laoreet lobortis quis "},
new Object[]{"url_2", "diam sed convection aliquip amet commodo nonummy sed sed commodo commodo diam commodo adipiscing ad exerci magna exerci tation quis lobortis "},
new Object[]{"url_3", "exerci suscipit sed lobortis amet lobortis aliquip nibh nostrud ad convection commodo ad nibh sed minim amet ad ea ea "},
new Object[]{"url_4", "sit enim dolor quis laoreet ullamcorper veniam adipiscing ex quis commodo "},
new Object[]{"url_5", "elit aliquip ea nisl oscillations sit dolor ipsum tincidunt ullamcorper dolore enim adipiscing laoreet elit ea volutpat adipiscing ea nibh nostrud Ut aliquam veniam Lorem laoreet veniam aliquip "},
new Object[]{"url_6", "consectetuer ad sed suscipit euismod aliquip quis ullamcorper oscillations tation consectetuer tation amet suscipit nibh enim nonummy veniam commodo commodo diam euismod dolor Ut aliquip diam ex ad nonummy ad tincidunt minim exerci consectetuer veniam convection aliquam ut ut Lorem euismod sed ipsum volutpat "},
new Object[]{"url_7", "Ut volutpat veniam ut consectetuer diam ut aliquam dolor nostrud erat consectetuer adipiscing exerci consectetuer Ut ullamcorper suscipit aliquam sed dolor nisl "},
new Object[]{"url_8", "suscipit amet wisi nisl veniam lobortis sit Lorem aliquam nostrud aliquam ipsum ut laoreet suscipit Lorem laoreet editors adipiscing ullamcorper veniam erat consectetuer ut lobortis dolore elit sed tincidunt ipsum tation ullamcorper nonummy adipiscing ex ad laoreet ipsum suscipit lobortis lobortis Ut nonummy adipiscing erat volutpat aliquam "},
new Object[]{"url_9", "nonummy commodo tation editors ut quis sit quis lobortis ea dolore oscillations diam ad dolor lobortis nisl ad veniam ullamcorper quis magna volutpat sit ipsum consectetuer dolore exerci commodo magna erat enim ut suscipit "},
new Object[]{"url_10", "amet erat magna consectetuer tation tation aliquip nibh aliquam sed adipiscing ut commodo ex erat tincidunt aliquam ipsum Ut Ut sit tincidunt adipiscing suscipit minim sed erat dolor consectetuer Lorem consectetuer Lorem amet nibh diam ea ex enim suscipit wisi dolor nonummy magna enim euismod ullamcorper ut suscipit adipiscing "},
new Object[]{"url_11", "ex quis exerci tation diam elit nostrud nostrud ut ipsum elit amet diam laoreet amet consectetuer volutpat sed lobortis "},
new Object[]{"url_12", "elit suscipit sit ullamcorper ut ad erat ut dolor nostrud quis nisl enim erat dolor convection ad minim ut veniam nostrud sed editors adipiscing volutpat Ut aliquip commodo sed euismod adipiscing erat adipiscing dolore nostrud minim sed lobortis ea diam "},
new Object[]{"url_13", "enim ut quis commodo veniam minim erat lobortis ad diam ex dolor tincidunt exerci ut aliquip tincidunt minim ut magna sed enim wisi veniam oscillations Lorem consectetuer "},
new Object[]{"url_14", "nibh ipsum ullamcorper volutpat ut wisi dolor quis amet euismod quis ipsum ipsum minim tation volutpat sit exerci volutpat amet nonummy euismod veniam consectetuer sit consectetuer tincidunt nibh aliquam lobortis tation veniam ut ullamcorper wisi magna Ut volutpat consectetuer erat quis dolore ea tation "},
new Object[]{"url_15", "ad wisi sed enim aliquam oscillations nibh Lorem lobortis veniam nibh laoreet nonummy sed nibh Lorem adipiscing diam magna nostrud magna oscillations ut oscillations elit nostrud diam editors Lorem "},
new Object[]{"url_16", "nostrud volutpat veniam exerci tincidunt nostrud quis elit ipsum ea nonummy volutpat dolor elit lobortis magna nisl ut ullamcorper magna Lorem exerci nibh nisl magna editors erat aliquam aliquam ullamcorper sit aliquam sit nostrud oscillations consectetuer adipiscing suscipit convection exerci ea ullamcorper ex nisl "},
new Object[]{"url_17", "ad ex aliquam erat aliquam elit veniam laoreet ut amet amet nostrud ut adipiscing Ut Lorem suscipit ex magna ullamcorper aliquam ullamcorper ullamcorper amet amet commodo aliquam volutpat nonummy nonummy tincidunt amet tation tincidunt volutpat ut veniam nisl erat dolor enim nonummy nostrud adipiscing laoreet adipiscing "},
new Object[]{"url_18", "lobortis ipsum ex tincidunt tincidunt editors euismod consectetuer ipsum adipiscing lobortis exerci adipiscing nonummy nisl dolore nonummy erat exerci nisl ut dolore wisi volutpat lobortis magna "},
new Object[]{"url_19", "ipsum tation laoreet tation adipiscing wisi nibh diam Ut suscipit ad wisi "},
new Object[]{"url_20", "diam Lorem enim wisi ad lobortis dolor Ut ipsum amet dolore consectetuer nisl exerci nisl nonummy minim Ut erat oscillations ut Lorem nostrud dolore Ut dolore exerci ad ipsum dolore ex dolore aliquip sed aliquam ex aliquip magna amet ex dolore oscillations aliquip tation magna Ut "},
new Object[]{"url_21", "lobortis ut amet ex nisl ullamcorper tincidunt ut elit diam quis suscipit ad amet ipsum magna Ut ex tincidunt "},
new Object[]{"url_22", "amet commodo nisl ad quis lobortis ut commodo sit ut erat exerci lobortis suscipit nibh ut nostrud ut adipiscing commodo commodo quis quis nostrud nisl ipsum nostrud laoreet Lorem nostrud erat nostrud amet consectetuer laoreet oscillations wisi sit magna nibh amet "},
new Object[]{"url_23", "adipiscing suscipit suscipit aliquip suscipit consectetuer minim magna ea erat nibh sit suscipit sed dolor oscillations nonummy volutpat ut tincidunt "},
new Object[]{"url_24", "commodo sed tincidunt aliquip aliquip dolore commodo nonummy sed erat ut ex exerci dolore adipiscing tincidunt ex diam amet aliquam "},
new Object[]{"url_25", "consectetuer consectetuer exerci quis ea veniam aliquam laoreet minim ex "},
new Object[]{"url_26", "dolor exerci euismod minim magna quis erat consectetuer sed ex erat dolore quis ut oscillations ullamcorper Lorem exerci ex nibh ut exerci ullamcorper veniam nibh ut commodo ut Ut nostrud tincidunt tincidunt ad dolore Lorem ea tation enim erat nibh ut ea nonummy sed sed wisi nisl dolore "},
new Object[]{"url_27", "amet elit ea ea nostrud editors Ut nostrud amet laoreet adipiscing ut nisl nonummy tincidunt ea ipsum ex dolore dolore oscillations sit minim Ut wisi ut laoreet minim elit "},
new Object[]{"url_28", "wisi exerci volutpat Ut nostrud euismod minim Ut sit euismod ut ea magna consectetuer nisl ad minim tation nisl adipiscing Lorem aliquam quis exerci erat minim aliquip sit Lorem wisi wisi ut "},
new Object[]{"url_29", "amet sed laoreet amet aliquam minim enim tincidunt Lorem sit aliquip amet suscipit ut laoreet elit suscipit erat ut tincidunt suscipit ipsum sed euismod elit dolore euismod dolore ut dolor nostrud ipsum tincidunt commodo adipiscing aliquam ut wisi dolor dolor suscipit "},
new Object[]{"url_30", "euismod Lorem ex tincidunt amet enim minim suscipit exerci diam veniam amet nostrud ea ea "},
new Object[]{"url_31", "ex ipsum sit euismod euismod ullamcorper tincidunt ut wisi ea adipiscing sed diam tation ipsum dolor aliquam veniam nonummy aliquip aliquip Lorem ut minim nisl tation sit exerci ullamcorper Ut dolor euismod aliquam consectetuer ad nonummy commodo exerci "},
new Object[]{"url_32", "volutpat ipsum lobortis nisl veniam minim adipiscing dolor editors quis nostrud amet nostrud "},
new Object[]{"url_33", "commodo wisi aliquip ut aliquam sed nostrud ex diam ad nostrud enim ut amet enim ea ad sed tation nostrud suscipit ea magna magna Lorem amet lobortis ut quis nibh aliquam aliquam exerci aliquip lobortis consectetuer enim wisi ea nisl laoreet erat dolore "},
new Object[]{"url_34", "tincidunt adipiscing enim tation nibh Ut dolore tincidunt tation laoreet suscipit minim aliquam volutpat laoreet suscipit tincidunt nibh ut ut sit nostrud nonummy tincidunt exerci sit ad sed consectetuer minim dolor dolore laoreet nostrud nibh laoreet ea adipiscing exerci dolore ipsum "},
new Object[]{"url_35", "tation ut erat ut tation dolor Lorem laoreet Lorem elit adipiscing wisi aliquip nostrud elit Ut volutpat ea aliquam aliquip "},
new Object[]{"url_36", "lobortis enim ullamcorper adipiscing consectetuer aliquip wisi enim minim Ut minim elit elit aliquam exerci ullamcorper amet lobortis adipiscing diam laoreet consectetuer nostrud diam diam amet ut enim ullamcorper aliquip diam ut nostrud diam magna amet nonummy commodo wisi enim ullamcorper suscipit euismod dolore tincidunt magna suscipit elit "},
new Object[]{"url_37", "elit adipiscing nisl nisl ex aliquip nibh sed ut ad Lorem elit consectetuer ad volutpat lobortis amet veniam ipsum nibh ut consectetuer editors ad aliquam "},
new Object[]{"url_38", "elit quis nibh adipiscing sit consectetuer ut euismod quis tincidunt quis nisl consectetuer dolor diam suscipit quis dolore Lorem suscipit nonummy sed ex "},
new Object[]{"url_39", "nisl sit consectetuer elit oscillations enim ipsum enim nostrud adipiscing nostrud editors aliquam "},
new Object[]{"url_40", "sed wisi dolor diam commodo ullamcorper commodo nostrud ullamcorper laoreet minim dolore suscipit laoreet tation aliquip "},
new Object[]{"url_41", "ad consectetuer exerci nisl exerci amet enim diam lobortis Lorem ex volutpat volutpat nibh aliquam ut ullamcorper volutpat nostrud ut adipiscing ullamcorper "},
new Object[]{"url_42", "minim laoreet tation magna veniam ut ea sit ipsum tincidunt Ut amet ex aliquip ex euismod exerci wisi elit editors ad amet veniam ad editors "},
new Object[]{"url_43", "ut nisl ad ullamcorper nibh Ut editors exerci enim exerci ea laoreet veniam ea amet exerci volutpat amet ad "},
new Object[]{"url_44", "volutpat tincidunt enim amet sed tincidunt consectetuer ullamcorper nisl Ut adipiscing tation ad ad amet nonummy elit erat nibh Lorem erat elit laoreet consectetuer sed aliquip nostrud "},
new Object[]{"url_45", "sed aliquam ut ut consectetuer wisi euismod enim erat euismod quis exerci amet tation sit "},
new Object[]{"url_46", "lobortis oscillations tation aliquam dolore Lorem aliquip tation exerci ullamcorper aliquam aliquip lobortis ex tation dolor ut ut sed suscipit nisl ullamcorper sed editors laoreet aliquip enim dolor veniam tincidunt sed euismod tation "},
new Object[]{"url_47", "Lorem Lorem ut wisi ad ut tation consectetuer exerci convection tation ullamcorper sed dolore quis aliquam ipsum lobortis commodo nonummy "},
new Object[]{"url_48", "laoreet minim veniam nisl elit sit amet commodo ex ullamcorper suscipit aliquip laoreet convection Ut ex minim aliquam "},
new Object[]{"url_49", "lobortis nonummy minim amet sit veniam quis consectetuer tincidunt laoreet quis "},
new Object[]{"url_50", "lobortis nisl commodo dolor amet nibh editors enim magna minim elit euismod diam laoreet laoreet ad minim sed ut Ut lobortis adipiscing quis sed ut aliquam oscillations exerci tation consectetuer lobortis elit tincidunt consectetuer minim amet dolore quis aliquam Ut exerci sed aliquam quis quis ullamcorper Ut ex tincidunt "},
new Object[]{"url_51", "nostrud nisl ea erat ut suscipit Ut sit oscillations ullamcorper nonummy magna lobortis dolore editors tincidunt nostrud suscipit ex quis tation ut sit amet nostrud laoreet ex tincidunt "},
new Object[]{"url_52", "ea tation commodo elit sed ex sed quis enim nisl magna laoreet adipiscing amet sit nostrud consectetuer nibh tincidunt veniam ex veniam euismod exerci sed dolore suscipit nisl tincidunt euismod quis Ut enim euismod dolor diam exerci magna exerci ut exerci nisl "},
new Object[]{"url_53", "volutpat amet Ut lobortis dolor tation minim nonummy lobortis convection nostrud "},
new Object[]{"url_54", "ullamcorper commodo Ut amet sit nostrud aliquam ad amet wisi enim nostrud ipsum nisl veniam erat aliquam ex aliquam dolor dolor ut consectetuer euismod exerci elit exerci Ut ea minim enim consectetuer ad consectetuer nonummy convection adipiscing ad ullamcorper lobortis nonummy laoreet nonummy aliquam ullamcorper ad nostrud amet "},
new Object[]{"url_55", "wisi magna editors amet aliquam diam amet aliquip nisl consectetuer laoreet nonummy suscipit euismod diam enim tation elit ut lobortis quis euismod suscipit nostrud ea ea commodo lobortis dolore Ut nisl nostrud dolor laoreet euismod ea dolore aliquam ut Lorem exerci ex sit "},
new Object[]{"url_56", "ex dolor veniam wisi laoreet ut exerci diam ad ex ut ut laoreet ut nisl ullamcorper nisl "},
new Object[]{"url_57", "diam adipiscing Ut ut Lorem amet erat elit erat magna adipiscing euismod elit ullamcorper nostrud aliquam dolor ullamcorper sit tation tation "},
new Object[]{"url_58", "laoreet convection veniam lobortis dolore ut nonummy commodo erat lobortis veniam nostrud dolore minim commodo ut consectetuer magna erat ea dolore Lorem suscipit ex ipsum exerci sed enim ea tation suscipit enim adipiscing "},
new Object[]{"url_59", "amet ut ut Ut ad dolor quis ad magna exerci suscipit magna nibh commodo euismod amet euismod wisi diam suscipit dolore Lorem dolor ex amet exerci aliquip ut ut lobortis quis elit minim sed Lorem "},
new Object[]{"url_60", "ut ut amet ullamcorper amet euismod dolor amet elit exerci adipiscing sed suscipit sed exerci wisi diam veniam wisi suscipit ut quis nibh ullamcorper ex quis magna dolore volutpat editors minim ut sit aliquip oscillations nisl ipsum "},
new Object[]{"url_61", "nibh nostrud tincidunt lobortis adipiscing adipiscing ullamcorper ullamcorper ipsum nisl ullamcorper aliquip laoreet commodo ut tation wisi diam commodo aliquip commodo suscipit tincidunt volutpat elit enim laoreet ut nostrud ad nonummy ipsum "},
new Object[]{"url_62", "Ut ut minim enim amet euismod erat elit commodo consectetuer Ut quis dolor ex diam quis wisi tation tincidunt laoreet volutpat "},
new Object[]{"url_63", "ut erat volutpat euismod amet ea nonummy lobortis ut Ut ea veniam sed veniam nostrud "},
new Object[]{"url_64", "tation dolor suscipit minim nisl wisi consectetuer aliquip tation Ut commodo ut dolore consectetuer elit wisi nisl ipsum "},
new Object[]{"url_65", "ullamcorper nisl Lorem magna tation veniam aliquam diam amet euismod "},
new Object[]{"url_66", "euismod aliquam tincidunt Ut volutpat ea lobortis sit ut volutpat ut lobortis ut lobortis ut nisl amet dolor sed ipsum enim ullamcorper diam euismod nostrud wisi erat quis diam nibh Ut dolore sed amet tation enim diam "},
new Object[]{"url_67", "amet minim minim amet laoreet Lorem aliquam veniam elit volutpat magna adipiscing enim enim euismod laoreet sed ex sed aliquam ad ea ut adipiscing suscipit ex minim dolore minim ea laoreet nisl "},
new Object[]{"url_68", "aliquam ea volutpat ut wisi tation tation nibh nisl erat laoreet ea volutpat dolor dolor aliquam exerci quis ullamcorper aliquam ut quis suscipit "},
new Object[]{"url_69", "quis exerci ut aliquip wisi dolore magna nibh consectetuer magna tation ullamcorper lobortis sed amet adipiscing minim suscipit nibh nibh nostrud euismod enim "},
new Object[]{"url_70", "tation enim consectetuer adipiscing wisi laoreet diam aliquip nostrud elit nostrud aliquip ea minim amet diam dolore "},
new Object[]{"url_71", "consectetuer tincidunt nibh amet tation nonummy sit tation diam sed diam tation "},
new Object[]{"url_72", "Lorem ut nostrud nonummy minim quis euismod lobortis nostrud nonummy adipiscing tincidunt consectetuer ut nibh ad suscipit dolor ut elit dolore amet ut quis tation ullamcorper nonummy laoreet ullamcorper aliquam dolore convection dolor tincidunt ut ullamcorper ex dolor suscipit erat oscillations ad "},
new Object[]{"url_73", "elit Ut commodo ut ullamcorper ullamcorper ut euismod commodo diam aliquip suscipit consectetuer exerci tation nostrud ut wisi exerci sed ut elit sed volutpat Lorem nibh laoreet consectetuer ex Lorem elit aliquam commodo lobortis ad "},
new Object[]{"url_74", "quis magna laoreet commodo aliquam nisl ullamcorper veniam tation wisi consectetuer commodo consectetuer ad dolore aliquam dolor elit amet sit amet nibh commodo erat veniam aliquip dolore ad magna ad ipsum Ut exerci ea volutpat nisl amet nostrud sit "},
new Object[]{"url_75", "tincidunt suscipit sit aliquip aliquam adipiscing dolore exerci Ut suscipit ut sit laoreet suscipit wisi sit enim nonummy consectetuer dolore editors "},
new Object[]{"url_76", "veniam ullamcorper tation sit suscipit dolor suscipit veniam sit Lorem quis sed nostrud ad tincidunt elit adipiscing "},
new Object[]{"url_77", "volutpat sit amet veniam quis ipsum nibh elit enim commodo magna veniam magna convection "},
new Object[]{"url_78", "tation dolore minim elit nisl volutpat tation laoreet enim nostrud exerci dolore tincidunt aliquip Lorem ipsum nostrud quis adipiscing ullamcorper erat lobortis tation commodo Ut ipsum commodo magna ad ipsum ut enim "},
new Object[]{"url_79", "lobortis amet elit Lorem amet nonummy commodo tation ex ea amet Lorem ea nonummy commodo veniam volutpat nibh wisi ad ipsum euismod ea convection nostrud nisl erat veniam Ut aliquip ad aliquip editors wisi magna tation nostrud nonummy adipiscing ullamcorper aliquip "},
new Object[]{"url_80", "tincidunt nostrud nostrud magna ea euismod ea consectetuer nisl exerci ea dolor nisl commodo ex erat ipsum exerci suscipit ad nisl ea nonummy suscipit adipiscing laoreet sit euismod nibh adipiscing sed minim commodo amet "},
new Object[]{"url_81", "nostrud erat ut sed editors erat amet magna lobortis diam laoreet dolor amet nibh ut ipsum ipsum amet ut sed ut exerci elit suscipit wisi magna ut veniam nisl commodo enim adipiscing laoreet ad Lorem oscillations "},
new Object[]{"url_82", "quis commodo nibh nibh volutpat suscipit dolore magna tincidunt nibh ut ad ullamcorper ullamcorper quis enim ad ut tation minim laoreet veniam dolor sed tincidunt exerci exerci nostrud ullamcorper amet ut ut ullamcorper "},
new Object[]{"url_83", "sit suscipit volutpat elit tation elit sed sed dolor ex ex ipsum euismod laoreet magna lobortis ad "},
new Object[]{"url_84", "lobortis ipsum euismod enim ea tation veniam tation oscillations aliquip consectetuer euismod ut sed lobortis tation oscillations commodo euismod laoreet suscipit amet elit ullamcorper volutpat aliquam ea enim ullamcorper consectetuer laoreet tation quis ut commodo erat euismod dolor laoreet ullamcorper laoreet "},
new Object[]{"url_85", "adipiscing sit quis commodo consectetuer quis enim euismod exerci nonummy ea nostrud Ut veniam sit aliquip nisl enim "},
new Object[]{"url_86", "nostrud dolore veniam veniam wisi aliquip adipiscing diam sed quis ullamcorper "},
new Object[]{"url_87", "quis Lorem suscipit Ut nibh diam euismod consectetuer lobortis ipsum sed suscipit consectetuer euismod laoreet ut wisi nisl elit quis commodo adipiscing adipiscing suscipit aliquam nisl quis magna ipsum enim ad quis ea magna Lorem nibh ea "},
new Object[]{"url_88", "euismod commodo sed tincidunt Ut veniam consectetuer quis erat ex ea erat laoreet commodo nibh minim "},
new Object[]{"url_89", "tation diam editors Ut enim nibh Lorem volutpat quis diam suscipit exerci wisi ad "},
new Object[]{"url_90", "volutpat editors ea nibh wisi ad amet volutpat nisl ullamcorper nibh volutpat minim ex ut sit veniam Lorem consectetuer quis ad sit suscipit volutpat wisi diam sed tincidunt ipsum minim convection ea diam oscillations quis lobortis "},
new Object[]{"url_91", "enim minim nonummy ea minim euismod adipiscing editors volutpat magna sit magna ut ipsum ut "},
new Object[]{"url_92", "nisl Ut commodo amet euismod lobortis ea ea wisi commodo Lorem sit ipsum volutpat nonummy exerci erat elit exerci magna ad erat enim laoreet quis nostrud wisi ut veniam amet ullamcorper lobortis ad suscipit volutpat veniam nostrud nibh quis ipsum dolore consectetuer veniam ipsum aliquip dolore sed laoreet ipsum "},
new Object[]{"url_93", "nonummy aliquam ad lobortis Lorem erat ad tation Lorem exerci ex "},
new Object[]{"url_94", "nonummy dolore commodo exerci ex quis ut suscipit elit laoreet sit tation magna veniam ea sit nonummy veniam Lorem quis nibh aliquip exerci amet ullamcorper adipiscing erat nisl editors diam commodo ad euismod adipiscing ea suscipit exerci aliquip volutpat tation enim volutpat sit "},
new Object[]{"url_95", "sit suscipit oscillations ipsum nibh dolor ea dolore ea elit ipsum minim editors magna consectetuer ullamcorper commodo nonummy sit nostrud aliquip sit erat ullamcorper ullamcorper nibh veniam erat quis dolore nonummy "},
new Object[]{"url_96", "nostrud quis ut volutpat magna ad quis adipiscing Lorem commodo exerci laoreet magna adipiscing erat quis wisi ea ea laoreet enim convection ad dolor nisl amet nibh aliquam adipiscing tincidunt minim diam Lorem commodo adipiscing volutpat "},
new Object[]{"url_97", "laoreet laoreet suscipit nostrud dolore adipiscing volutpat Ut sed nisl diam ullamcorper ex ut ut dolor amet nostrud euismod dolore veniam veniam enim tation veniam ea minim minim volutpat tincidunt "},
new Object[]{"url_98", "quis lobortis amet wisi nostrud ipsum aliquam convection tincidunt dolore ullamcorper nibh lobortis volutpat ea nostrud oscillations minim nonummy enim ad lobortis exerci ipsum ullamcorper nibh nonummy diam amet enim veniam ut nostrud "},
new Object[]{"url_99", "aliquam wisi suscipit commodo diam amet amet magna nisl enim nostrud tation nisl nostrud nibh ut "}
};
public static final Object[][] RANKS = {
new Object[]{30, "url_0", 43},
new Object[]{82, "url_1", 39},
new Object[]{56, "url_2", 31},
new Object[]{96, "url_3", 36},
new Object[]{31, "url_4", 36},
new Object[]{29, "url_5", 6},
new Object[]{33, "url_6", 48},
new Object[]{66, "url_7", 40},
new Object[]{28, "url_8", 51},
new Object[]{9, "url_9", 4},
new Object[]{49, "url_10", 24},
new Object[]{26, "url_11", 12},
new Object[]{39, "url_12", 46},
new Object[]{84, "url_13", 53},
new Object[]{29, "url_14", 50},
new Object[]{21, "url_15", 12},
new Object[]{69, "url_16", 34},
new Object[]{11, "url_17", 38},
new Object[]{96, "url_18", 13},
new Object[]{56, "url_19", 48},
new Object[]{18, "url_20", 36},
new Object[]{31, "url_21", 21},
new Object[]{29, "url_22", 11},
new Object[]{71, "url_23", 30},
new Object[]{85, "url_24", 48},
new Object[]{19, "url_25", 45},
new Object[]{69, "url_26", 9},
new Object[]{20, "url_27", 51},
new Object[]{33, "url_28", 46},
new Object[]{75, "url_29", 38},
new Object[]{96, "url_30", 51},
new Object[]{73, "url_31", 40},
new Object[]{67, "url_32", 16},
new Object[]{24, "url_33", 24},
new Object[]{27, "url_34", 35},
new Object[]{33, "url_35", 35},
new Object[]{7, "url_36", 22},
new Object[]{83, "url_37", 41},
new Object[]{23, "url_38", 49},
new Object[]{41, "url_39", 33},
new Object[]{66, "url_40", 38},
new Object[]{4, "url_41", 52},
new Object[]{34, "url_42", 4},
new Object[]{28, "url_43", 12},
new Object[]{14, "url_44", 14},
new Object[]{41, "url_45", 11},
new Object[]{48, "url_46", 37},
new Object[]{75, "url_47", 41},
new Object[]{78, "url_48", 3},
new Object[]{63, "url_49", 28}
};
public static final Object[][] VISITS = {
new Object[]{"url_2", "2003-12-17"},
new Object[]{"url_9", "2008-11-11"},
new Object[]{"url_14", "2003-11-5"},
new Object[]{"url_46", "2009-2-16"},
new Object[]{"url_14", "2004-11-9"},
new Object[]{"url_36", "2001-3-9"},
new Object[]{"url_35", "2006-8-13"},
new Object[]{"url_22", "2008-1-18"},
new Object[]{"url_36", "2002-3-9"},
new Object[]{"url_13", "2007-7-17"},
new Object[]{"url_23", "2009-6-16"},
new Object[]{"url_16", "2000-7-15"},
new Object[]{"url_41", "2002-5-10"},
new Object[]{"url_6", "2004-11-9"},
new Object[]{"url_5", "2003-6-7"},
new Object[]{"url_22", "2002-11-5"},
new Object[]{"url_11", "2007-7-21"},
new Object[]{"url_38", "2009-12-2"},
new Object[]{"url_6", "2004-11-2"},
new Object[]{"url_46", "2000-6-4"},
new Object[]{"url_34", "2003-9-2"},
new Object[]{"url_31", "2008-2-24"},
new Object[]{"url_0", "2003-2-2"},
new Object[]{"url_47", "2003-7-8"},
new Object[]{"url_49", "2009-9-13"},
new Object[]{"url_11", "2003-4-2"},
new Object[]{"url_20", "2000-6-18"},
new Object[]{"url_38", "2000-2-22"},
new Object[]{"url_44", "2009-2-17"},
new Object[]{"url_26", "2000-6-21"},
new Object[]{"url_13", "2000-11-25"},
new Object[]{"url_47", "2005-4-19"},
new Object[]{"url_46", "2008-1-7"},
new Object[]{"url_33", "2004-12-24"},
new Object[]{"url_32", "2009-2-8"},
new Object[]{"url_26", "2000-9-21"},
new Object[]{"url_9", "2002-8-18"},
new Object[]{"url_38", "2002-11-27"},
new Object[]{"url_37", "2008-2-26"},
new Object[]{"url_1", "2007-3-22"},
new Object[]{"url_37", "2002-3-20"},
new Object[]{"url_27", "2008-11-12"},
new Object[]{"url_30", "2000-12-16"},
new Object[]{"url_48", "2000-12-17"},
new Object[]{"url_46", "2008-4-16"},
new Object[]{"url_29", "2006-3-9"},
new Object[]{"url_0", "2007-7-26"},
new Object[]{"url_46", "2009-12-15"},
new Object[]{"url_34", "2002-2-13"},
new Object[]{"url_24", "2009-3-1"},
new Object[]{"url_43", "2007-11-4"},
new Object[]{"url_3", "2004-2-16"},
new Object[]{"url_26", "2000-10-26"},
new Object[]{"url_42", "2004-7-14"},
new Object[]{"url_13", "2004-9-10"},
new Object[]{"url_21", "2000-2-21"},
new Object[]{"url_9", "2006-6-5"},
new Object[]{"url_46", "2001-12-17"},
new Object[]{"url_24", "2006-12-8"},
new Object[]{"url_25", "2006-9-2"},
new Object[]{"url_37", "2002-6-26"},
new Object[]{"url_18", "2006-6-2"},
new Object[]{"url_46", "2003-5-24"},
new Object[]{"url_32", "2000-10-17"},
new Object[]{"url_45", "2002-1-12"},
new Object[]{"url_12", "2005-12-13"},
new Object[]{"url_49", "2009-3-9"},
new Object[]{"url_31", "2001-9-19"},
new Object[]{"url_22", "2002-7-9"},
new Object[]{"url_27", "2005-2-3"},
new Object[]{"url_43", "2008-7-15"},
new Object[]{"url_20", "2000-3-23"},
new Object[]{"url_25", "2002-5-8"},
new Object[]{"url_41", "2004-4-27"},
new Object[]{"url_17", "2008-7-17"},
new Object[]{"url_26", "2009-12-16"},
new Object[]{"url_34", "2006-2-10"},
new Object[]{"url_8", "2009-4-14"},
new Object[]{"url_16", "2000-2-24"},
new Object[]{"url_2", "2009-2-10"},
new Object[]{"url_35", "2003-2-24"},
new Object[]{"url_34", "2008-3-16"},
new Object[]{"url_27", "2005-1-5"},
new Object[]{"url_8", "2008-12-10"},
new Object[]{"url_38", "2009-2-11"},
new Object[]{"url_38", "2006-11-3"},
new Object[]{"url_47", "2003-2-13"},
new Object[]{"url_8", "2008-11-17"},
new Object[]{"url_26", "2009-5-11"},
new Object[]{"url_12", "2007-11-26"},
new Object[]{"url_10", "2003-1-13"},
new Object[]{"url_8", "2005-9-23"},
new Object[]{"url_42", "2001-4-5"},
new Object[]{"url_30", "2009-12-10"},
new Object[]{"url_2", "2003-1-3"},
new Object[]{"url_2", "2009-2-19"},
new Object[]{"url_7", "2000-6-25"},
new Object[]{"url_15", "2004-9-26"},
new Object[]{"url_25", "2009-10-5"},
new Object[]{"url_23", "2009-8-9"},
new Object[]{"url_27", "2004-4-3"},
new Object[]{"url_37", "2008-6-9"},
new Object[]{"url_9", "2002-5-25"},
new Object[]{"url_43", "2009-5-18"},
new Object[]{"url_21", "2008-4-19"},
new Object[]{"url_12", "2001-12-25"},
new Object[]{"url_16", "2006-9-25"},
new Object[]{"url_27", "2002-1-2"},
new Object[]{"url_2", "2009-1-21"},
new Object[]{"url_31", "2009-3-20"},
new Object[]{"url_42", "2002-3-1"},
new Object[]{"url_31", "2001-11-26"},
new Object[]{"url_20", "2003-5-15"},
new Object[]{"url_32", "2004-1-22"},
new Object[]{"url_28", "2008-9-16"},
new Object[]{"url_27", "2006-7-3"},
new Object[]{"url_11", "2008-12-26"},
new Object[]{"url_15", "2004-8-16"},
new Object[]{"url_34", "2002-10-5"},
new Object[]{"url_44", "2000-2-15"},
new Object[]{"url_9", "2000-10-23"},
new Object[]{"url_45", "2005-4-24"},
new Object[]{"url_0", "2006-8-7"},
new Object[]{"url_48", "2003-8-7"},
new Object[]{"url_8", "2007-12-13"},
new Object[]{"url_42", "2003-8-2"},
new Object[]{"url_25", "2008-3-5"},
new Object[]{"url_3", "2007-3-9"},
new Object[]{"url_49", "2003-10-7"},
new Object[]{"url_18", "2007-12-6"},
new Object[]{"url_3", "2006-7-5"},
new Object[]{"url_27", "2000-9-14"},
new Object[]{"url_42", "2002-10-20"},
new Object[]{"url_44", "2007-1-13"},
new Object[]{"url_6", "2003-1-21"},
new Object[]{"url_40", "2009-10-20"},
new Object[]{"url_28", "2009-6-17"},
new Object[]{"url_22", "2000-2-17"},
new Object[]{"url_3", "2005-1-15"},
new Object[]{"url_9", "2008-12-9"},
new Object[]{"url_9", "2005-2-19"},
new Object[]{"url_28", "2000-4-22"},
new Object[]{"url_44", "2001-9-9"},
new Object[]{"url_43", "2008-6-21"},
new Object[]{"url_39", "2008-5-9"},
new Object[]{"url_15", "2006-9-15"},
new Object[]{"url_23", "2001-12-18"},
new Object[]{"url_14", "2002-5-23"},
new Object[]{"url_11", "2007-7-11"},
new Object[]{"url_34", "2000-12-8"},
new Object[]{"url_47", "2005-7-3"},
new Object[]{"url_38", "2004-3-26"},
new Object[]{"url_19", "2003-9-14"},
new Object[]{"url_24", "2007-7-16"},
new Object[]{"url_40", "2008-8-21"},
new Object[]{"url_17", "2007-12-4"},
new Object[]{"url_25", "2006-6-24"},
new Object[]{"url_2", "2000-10-8"},
new Object[]{"url_12", "2008-6-10"},
new Object[]{"url_11", "2004-11-24"},
new Object[]{"url_13", "2005-11-3"},
new Object[]{"url_43", "2005-1-2"},
new Object[]{"url_14", "2008-6-12"},
new Object[]{"url_43", "2001-8-27"},
new Object[]{"url_45", "2000-3-3"},
new Object[]{"url_0", "2006-9-27"},
new Object[]{"url_22", "2007-12-18"},
new Object[]{"url_25", "2006-4-4"},
new Object[]{"url_32", "2001-6-25"},
new Object[]{"url_6", "2007-6-9"},
new Object[]{"url_8", "2009-10-3"},
new Object[]{"url_15", "2003-2-23"},
new Object[]{"url_37", "2000-5-6"},
new Object[]{"url_27", "2004-3-21"},
new Object[]{"url_17", "2005-6-20"},
new Object[]{"url_2", "2004-2-27"},
new Object[]{"url_36", "2005-3-16"},
new Object[]{"url_1", "2009-12-3"},
new Object[]{"url_9", "2004-4-27"},
new Object[]{"url_18", "2009-5-26"},
new Object[]{"url_31", "2000-9-21"},
new Object[]{"url_12", "2008-9-25"},
new Object[]{"url_2", "2004-2-16"},
new Object[]{"url_28", "2008-11-12"},
new Object[]{"url_28", "2001-6-26"},
new Object[]{"url_12", "2006-3-15"},
new Object[]{"url_0", "2009-3-1"},
new Object[]{"url_36", "2006-10-13"},
new Object[]{"url_15", "2004-11-5"},
new Object[]{"url_32", "2008-2-11"},
new Object[]{"url_19", "2009-8-3"},
new Object[]{"url_2", "2006-8-6"},
new Object[]{"url_11", "2009-10-13"},
new Object[]{"url_21", "2002-9-14"},
new Object[]{"url_18", "2000-11-2"},
new Object[]{"url_35", "2006-5-15"},
new Object[]{"url_11", "2006-2-18"},
new Object[]{"url_0", "2001-4-25"},
new Object[]{"url_14", "2009-4-8"},
new Object[]{"url_16", "2009-4-7"}
};
public static DataSet<Tuple2<String, String>> getDocumentDataSet(ExecutionEnvironment env) {
List<Tuple2<String, String>> data = new ArrayList<Tuple2<String, String>>(100);
for (Object[] document : DOCUMENTS) {
data.add(new Tuple2<String, String>((String) document[0], (String) document[1]));
}
return env.fromCollection(data);
}
public static DataSet<Tuple3<Integer, String, Integer>> getRankDataSet(ExecutionEnvironment env) {
List<Tuple3<Integer, String, Integer>> data = new ArrayList<Tuple3<Integer, String, Integer>>(100);
for (Object[] rank : RANKS) {
data.add(new Tuple3<Integer, String, Integer>((Integer) rank[0], (String) rank[1], (Integer) rank[2]));
}
return env.fromCollection(data);
}
public static DataSet<Tuple2<String, String>> getVisitDataSet(ExecutionEnvironment env) {
List<Tuple2<String, String>> data = new ArrayList<Tuple2<String, String>>(100);
for (Object[] visit : VISITS) {
data.add(new Tuple2<String, String>((String) visit[0], (String) visit[1]));
}
return env.fromCollection(data);
}
}
|
/**
* shopmobile for tpshop
* ============================================================================
* 版权所有 2015-2099 深圳搜豹网络科技有限公司,并保留所有权利。
* 网站地址: http://www.tp-shop.cn
* ——————————————————————————————————————
* 这不是一个自由软件!您只能在不用于商业目的的前提下对程序代码进行修改和使用 .
* 不允许对程序代码以任何形式任何目的的再发布。
* ============================================================================
* Author: 飞龙 wangqh01292@163.com
* Date: @date 2015年11月3日 下午10:04:49
* Description: 商品收藏列表
*
* @version V1.0
*/
package com.soubao.tpshop.activity.person;
import android.os.Bundle;
import android.widget.ListView;
import com.chanven.lib.cptr.PtrClassicFrameLayout;
import com.chanven.lib.cptr.PtrDefaultHandler;
import com.chanven.lib.cptr.PtrFrameLayout;
import com.chanven.lib.cptr.loadmore.OnLoadMoreListener;
import com.soubao.tpshop.R;
import com.soubao.tpshop.activity.common.SPBaseActivity;
import com.soubao.tpshop.adapter.SPWalletLogAdapter;
import com.soubao.tpshop.http.base.SPFailuredListener;
import com.soubao.tpshop.http.base.SPSuccessListener;
import com.soubao.tpshop.http.person.SPPersonRequest;
import com.soubao.tpshop.model.person.SPWalletLog;
import org.androidannotations.annotations.AfterViews;
import org.androidannotations.annotations.EActivity;
import org.androidannotations.annotations.ViewById;
import java.util.List;
/**
* @author 飞龙
*/
@EActivity(R.layout.person_walletlog_list)
public class SPWalletLogtListActivity extends SPBaseActivity {
@ViewById(R.id.walletlog_listv)
ListView walletlogListv;
@ViewById(R.id.walletlog_list_view_frame)
PtrClassicFrameLayout ptrClassicFrameLayout;
SPWalletLogAdapter mAdapter;
List<SPWalletLog> mWalletLogs;
int pageIndex; //当前第几页:从1开始
/**
* 最大页数
*/
boolean maxIndex;
private String TAG = "SPWalletLogtListActivity";
@Override
protected void onCreate(Bundle bundle) {
setCustomerTitle(true, true, getString(R.string.title_balance_points));
super.onCreate(bundle);
}
@AfterViews
public void init() {
super.init();
}
@Override
public void initSubViews() {
ptrClassicFrameLayout.setPtrHandler(new PtrDefaultHandler() {
@Override
public void onRefreshBegin(PtrFrameLayout frame) {
//下拉刷新
refreshData();
}
});
ptrClassicFrameLayout.setOnLoadMoreListener(new OnLoadMoreListener() {
@Override
public void loadMore() {
//上拉加载更多
loadMoreData();
}
});
}
@Override
public void initData() {
mAdapter = new SPWalletLogAdapter(this);
walletlogListv.setAdapter(mAdapter);
refreshData();
}
public void refreshData() {
pageIndex = 1;
maxIndex = false;
showLoadingToast();
SPPersonRequest.getWalletLogsWithPage(pageIndex, new SPSuccessListener() {
@Override
public void onRespone(String msg, Object response) {
if (response != null) {
mWalletLogs = (List<SPWalletLog>) response;
//更新收藏数据
mAdapter.setData(mWalletLogs);
ptrClassicFrameLayout.setLoadMoreEnable(true);
} else {
maxIndex = true;
ptrClassicFrameLayout.setLoadMoreEnable(false);
}
ptrClassicFrameLayout.refreshComplete();
hideLoadingToast();
}
}, new SPFailuredListener(SPWalletLogtListActivity.this) {
@Override
public void onRespone(String msg, int errorCode) {
showToast(msg);
hideLoadingToast();
}
});
}
public void loadMoreData() {
if (maxIndex) {
return;
}
pageIndex++;
showLoadingToast();
SPPersonRequest.getWalletLogsWithPage(pageIndex, new SPSuccessListener() {
@Override
public void onRespone(String msg, Object response) {
if (response != null) {
List<SPWalletLog> tempWalletLog = (List<SPWalletLog>) response;
mWalletLogs.addAll(tempWalletLog);
//更新收藏数据
mAdapter.setData(mWalletLogs);
ptrClassicFrameLayout.setLoadMoreEnable(true);
} else {
pageIndex--;
maxIndex = true;
ptrClassicFrameLayout.setLoadMoreEnable(false);
}
ptrClassicFrameLayout.refreshComplete();
hideLoadingToast();
}
}, new SPFailuredListener() {
@Override
public void onRespone(String msg, int errorCode) {
hideLoadingToast();
showToast(msg);
pageIndex--;
}
});
}
@Override
public void initEvent() {
}
}
|
/*
This file is part of SQUIN and it falls under the
copyright as specified for the whole SQUIN package.
*/
package org.squin.common;
/**
* Represents a prioritized queue of arbitrary objects.
*
* @author Olaf Hartig (hartig@informatik.hu-berlin.de)
*/
public interface PrioritizedQueue<T>
{
/**
* Places the given object into this queue, whereas the actual place depends
* on the given priority.
* A higher priority guarantees a place closer to the front of the queue.
*/
public void queue ( T t, Priority p );
/**
* Returns the object from the front of this queue, that is, one of those
* objects queued with the highest priority among all objects in the queue
* (or null if the queue is empty).
*/
public T poll ();
/**
* Returns true if the queue contains the given object.
*/
public boolean contains ( T t );
}
|
/*
Copyright 2017 Digital Learning Sciences (DLS) at the
University Corporation for Atmospheric Research (UCAR),
P.O. Box 3000, Boulder, CO 80307
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.dlese.dpc.repository;
/**
* Indicates a problem occured when attempting to add, modify or delete a
* collection in the repository.
*
* @author John Weatherley
* @see RepositoryManager
*/
public class PutCollectionException extends Exception {
public static final String ERROR_CODE_COLLECTION_EXISTS_IN_ANOTHER_FORMAT = "COLLECTION_EXISTS_IN_ANOTHER_FORMAT";
public static final String ERROR_CODE_BAD_FORMAT_SPECIFIER = "BAD_FORMAT_SPECIFIER";
public static final String ERROR_CODE_BAD_KEY = "BAD_KEY";
public static final String ERROR_CODE_BAD_TITLE = "BAD_TITLE";
public static final String ERROR_CODE_BAD_ADDITIONAL_METADATA = "BAD_ADDITIONAL_METADATA";
public static final String ERROR_CODE_IO_ERROR = "IO_ERROR";
public static final String ERROR_CODE_INTERNAL_ERROR = "INTERNAL_ERROR";
private String _errorCode;
PutCollectionException(String message, String errorCode){
super(message);
_errorCode = errorCode;
}
public String getErrorCode(){
return _errorCode;
}
}
|
/*******************************************************************************
* Copyright (c) 2001-2014 Mathew A. Nelson and Robocode contributors
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://robocode.sourceforge.net/license/epl-v10.html
*******************************************************************************/
package net.sf.robocode.settings;
import java.awt.*;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Collection;
import java.util.Date;
/**
* @author Pavel Savara (original)
* @author Thales B.V. /Werner Struis (contributor naval)
*/
public interface ISettingsManager {
void saveProperties();
boolean getOptionsViewRobotNames();
void setOptionsViewRobotNames(boolean optionsViewRobotNames);
/* NAVAL */
boolean getOptionsViewNavalBoundingBox();
void setOptionsViewNavalBoundingBox(boolean optionsViewBoundingBox);
boolean getOptionsViewNavalBlindSpot();
void setOptionsViewNavalBlindSpot(boolean optionsViewNavalBlindSpot);
/*/NAVAL */
boolean getOptionsViewScanArcs();
void setOptionsViewScanArcs(boolean optionsViewScanArcs);
boolean getOptionsViewRobotEnergy();
void setOptionsViewRobotEnergy(boolean optionsViewRobotEnergy);
boolean getOptionsViewGround();
void setOptionsViewGround(boolean optionsViewGround);
boolean getOptionsViewTPS();
void setOptionsViewTPS(boolean optionsViewTPS);
boolean getOptionsViewFPS();
void setOptionsViewFPS(boolean optionsViewFPS);
boolean getOptionsViewExplosions();
void setOptionsViewExplosions(boolean optionsViewExplosions);
boolean getOptionsViewExplosionDebris();
void setOptionsViewExplosionDebris(boolean optionsViewExplosionDebris);
boolean getOptionsViewSentryBorder();
void setOptionsViewSentryBorder(boolean optionsViewSentryBorder);
boolean getOptionsViewPreventSpeedupWhenMinimized();
void setOptionsViewPreventSpeedupWhenMinimized(boolean preventSpeedupWhenMinimized);
int getOptionsRenderingAntialiasing();
void setOptionsRenderingAntialiasing(int optionsRenderingAntialiasing);
int getOptionsRenderingTextAntialiasing();
void setOptionsRenderingTextAntialiasing(int optionsRenderingTextAntialiasing);
int getOptionsRenderingMethod();
void setOptionsRenderingMethod(int optionsRenderingMethod);
RenderingHints getRenderingHints();
int getOptionsRenderingNoBuffers();
void setOptionsRenderingNoBuffers(int optionsRenderingNoBuffers);
boolean getOptionsRenderingBufferImages();
void setOptionsRenderingBufferImages(boolean optionsRenderingBufferImages);
boolean getOptionsRenderingForceBulletColor();
void setOptionsRenderingForceBulletColor(boolean optionsRenderingForceBulletColor);
int getOptionsBattleDesiredTPS();
void setOptionsBattleDesiredTPS(int optionsBattleDesiredTPS);
boolean getOptionsSoundEnableSound();
void setOptionsSoundEnableSound(boolean optionsSoundEnableSound);
boolean getOptionsSoundEnableGunshot();
void setOptionsSoundEnableGunshot(boolean optionsSoundEnableGunshot);
boolean getOptionsSoundEnableBulletHit();
void setOptionsSoundEnableBulletHit(boolean optionsSoundEnableBulletHit);
boolean getOptionsSoundEnableRobotDeath();
void setOptionsSoundEnableRobotDeath(boolean optionsSoundEnableRobotDeath);
boolean getOptionsSoundEnableWallCollision();
void setOptionsSoundEnableWallCollision(boolean optionsSoundEnableWallCollision);
boolean getOptionsSoundEnableRobotCollision();
void setOptionsSoundEnableRobotCollision(boolean optionsSoundEnableRobotCollision);
boolean getOptionsSoundEnableMixerVolume();
void setOptionsSoundMixer(String optionsSoundMixer);
String getOptionsSoundMixer();
void setOptionsSoundEnableMixerVolume(boolean optionsSoundEnableMixerVolume);
boolean getOptionsSoundEnableMixerPan();
void setOptionsSoundEnableMixerPan(boolean optionsSoundEnableMixerPan);
boolean getOptionsTeamShowTeamRobots();
void setOptionsTeamShowTeamRobots(boolean optionsTeamShowTeamRobots);
void setShipConfig(String configString);
void setRadarConfig(String configString);
void setCannonConfig(String configString);
String getShipConfigString();
String getRadarConfigString();
String getCannonConfigString();
String getDefaultShipConfigString();
String getDefaultRadarConfigString();
String getDefaultCannonConfigString();
String getFileThemeMusic();
String getFileBackgroundMusic();
String getFileEndOfBattleMusic();
String getFileGunshotSfx();
String getBulletHitsRobotSfx();
String getBulletHitsBulletSfx();
String getRobotDeathSfx();
String getRobotCollisionSfx();
String getWallCollisionSfx();
Date getVersionChecked();
void setVersionChecked(Date versionChecked);
long getRobotFilesystemQuota();
void setRobotFilesystemQuota(long robotFilesystemQuota);
long getConsoleQuota();
void setConsoleQuota(long consoleQuota);
long getCpuConstant();
void setCpuConstant(long cpuConstant);
Collection<String> getOptionsDevelopmentPaths();
void setOptionsDevelopmentPaths(Collection<String> paths);
Collection<String> getOptionsExcludedDevelopmentPaths();
void setOptionsExcludedDevelopmentPaths(Collection<String> excludedPaths);
Collection<String> getOptionsEnabledDevelopmentPaths();
boolean getOptionsCommonShowResults();
boolean getOptionsCommonDontHideRankings();
void setOptionsCommonDontHideRankings(boolean dontHide);
void setOptionsCommonAppendWhenSavingResults(boolean enable);
boolean getOptionsCommonAppendWhenSavingResults();
void setOptionsCommonShowResults(boolean enable);
boolean getOptionsCommonEnableReplayRecording();
boolean getOptionsCommonEnableAutoRecording();
boolean getOptionsCommonAutoRecordingXML();
void setOptionsCommonEnableReplayRecording(boolean enable);
void setOptionsCommonEnableAutoRecording(boolean enable);
void setOptionsCommonEnableAutoRecordingXML(boolean enable);
void setOptionsCommonNotifyAboutNewBetaVersions(boolean enable);
boolean getOptionsCommonNotifyAboutNewBetaVersions();
int getBattlefieldWidth();
int getDefaultBattlefieldWidth();
void setBattlefieldWidth(int battlefieldWidth);
int getBattleFieldHeight();
int getDefaultBattlefieldHeight();
void setBattlefieldHeight(int battlefieldHeight);
double getBattleGunCoolingRate();
double getBattleDefaultGunCoolingRate();
void setBattleGunCoolingRate(double gunCoolingRate);
long getBattleInactivityTime();
long getBattleDefaultInactivityTime();
void setBattleInactivityTime(long inactivityTime);
int getBattleSentryBorderSize();
int getBattleDefaultSentryBorderSize();
void setBattleSentryBorderSize(int sentryBorderSize);
boolean getBattleHideEnemyNames();
boolean getBattleDefaultHideEnemyNames();
void setBattleHideEnemyNames(boolean hideEnemyNames);
int getBattleNumberOfRounds();
int getBattleDefaultNumberOfRounds();
void setBattleNumberOfRounds(int numberOfRounds);
void store(FileOutputStream out, String desc) throws IOException;
void load(FileInputStream in) throws IOException;
String getLastRunVersion();
void setLastRunVersion(String lastRunVersion);
void addPropertyListener(ISettingsListener listener);
void removePropertyListener(ISettingsListener propertyListener);
public final static String
OPTIONS_VIEW_ROBOTNAMES = "robocode.options.view.robotNames",
OPTIONS_VIEW_SCANARCS = "robocode.options.view.scanArcs",
OPTIONS_VIEW_ROBOTENERGY = "robocode.options.view.robotEnergy",
OPTIONS_VIEW_GROUND = "robocode.options.view.ground",
OPTIONS_VIEW_TPS = "robocode.options.view.TPS",
OPTIONS_VIEW_FPS = "robocode.options.view.FPS",
OPTIONS_VIEW_EXPLOSIONS = "robocode.options.view.explosions",
OPTIONS_VIEW_EXPLOSION_DEBRIS = "robocode.options.view.explosionDebris",
OPTIONS_VIEW_SENTRY_BORDER = "robocode.options.view.sentryBorder",
OPTIONS_VIEW_NAVAL_BOUNDING_BOX = "robocode.options.view.naval.boundingBox",
OPTIONS_VIEW_NAVAL_BLIND_SPOT = "robocode.options.view.naval.blindSpot",
OPTIONS_BATTLE_DESIREDTPS = "robocode.options.battle.desiredTPS",
OPTIONS_VIEW_PREVENT_SPEEDUP_WHEN_MINIMIZED = "robocode.options.view.preventSpeedupWhenMinimized",
OPTIONS_RENDERING_ANTIALIASING = "robocode.options.rendering.antialiasing",
OPTIONS_RENDERING_TEXT_ANTIALIASING = "robocode.options.rendering.text.antialiasing",
OPTIONS_RENDERING_METHOD = "robocode.options.rendering.method",
OPTIONS_RENDERING_NO_BUFFERS = "robocode.options.rendering.noBuffers",
OPTIONS_RENDERING_BUFFER_IMAGES = "robocode.options.rendering.bufferImages",
OPTIONS_RENDERING_FORCE_BULLET_COLOR = "robocode.options.rendering.forceBulletColor",
OPTIONS_SOUND_ENABLESOUND = "robocode.options.sound.enableSound",
OPTIONS_SOUND_ENABLEGUNSHOT = "robocode.options.sound.enableGunshot",
OPTIONS_SOUND_ENABLEBULLETHIT = "robocode.options.sound.enableBulletHit",
OPTIONS_SOUND_ENABLEROBOTDEATH = "robocode.options.sound.enableRobotDeath",
OPTIONS_SOUND_ENABLEWALLCOLLISION = "robocode.options.sound.enableWallCollision",
OPTIONS_SOUND_ENABLEROBOTCOLLISION = "robocode.options.sound.enableRobotCollision",
OPTIONS_SOUND_MIXER = "robocode.options.sound.mixer",
OPTIONS_SOUND_ENABLEMIXERVOLUME = "robocode.options.sound.enableMixerVolume",
OPTIONS_SOUND_ENABLEMIXERPAN = "robocode.options.sound.enableMixerPan",
OPTIONS_COMMON_NOTIFY_ABOUT_NEW_BETA_VERSIONS = "robocode.options.common.notifyAboutNewBetaVersions",
OPTIONS_COMMON_SHOW_RESULTS = "robocode.options.common.showResults",
OPTIONS_COMMON_DONT_HIDE_RANKINGS = "robocode.options.common.dontHideRankings",
OPTIONS_COMMON_APPEND_WHEN_SAVING_RESULTS = "robocode.options.common.appendWhenSavingResults",
OPTIONS_COMMON_ENABLE_REPLAY_RECORDING = "robocode.options.common.enableReplayRecording",
OPTIONS_COMMON_ENABLE_AUTO_RECORDING = "robocode.options.common.enableAutoRecording",
OPTIONS_COMMON_AUTO_RECORDING_XML = "robocode.options.common.autoRecordingXML",
OPTIONS_TEAM_SHOWTEAMROBOTS = "robocode.options.team.showTeamRobots",
OPTIONS_DEVELOPMENT_PATH = "robocode.options.development.path",
OPTIONS_DEVELOPMENT_PATH_EXCLUDED = "robocode.options.development.path.excluded",
FILE_THEME_MUSIC = "robocode.file.music.theme",
FILE_BACKGROUND_MUSIC = "robocode.file.music.background",
FILE_END_OF_BATTLE_MUSIC = "robocode.file.music.endOfBattle",
FILE_GUNSHOT_SFX = "robocode.file.sfx.gunshot",
FILE_ROBOT_COLLISION_SFX = "robocode.file.sfx.robotCollision",
FILE_WALL_COLLISION_SFX = "robocode.file.sfx.wallCollision",
FILE_ROBOT_DEATH_SFX = "robocode.file.sfx.robotDeath",
FILE_BULLET_HITS_ROBOT_SFX = "robocode.file.sfx.bulletHitsRobot",
FILE_BULLET_HITS_BULLET_SFX = "robocode.file.sfx.bulletHitsBullet",
VERSIONCHECKED = "robocode.versionchecked",
ROBOT_FILESYSTEM_QUOTA = "robocode.robot.filesystem.quota",
CONSOLE_QUOTA = "robocode.console.quota",
CPU_CONSTANT = "robocode.cpu.constant",
LAST_RUN_VERSION = "robocode.version.lastrun",
BATTLE_DEFAULT_BATTLEFIELD_WIDTH = "2000",
BATTLE_DEFAULT_BATTLEFIELD_HEIGHT = "2000",
BATTLE_DEFAULT_NUMBER_OF_ROUNDS = "10",
BATTLE_DEFAULT_GUN_COOLING_RATE = "0.1",
BATTLE_DEFAULT_INACTIVITY_TIME = "450",
BATTLE_DEFAULT_SENTRY_BORDER_SIZE = "100",
BATTLE_DEFAULT_HIDE_ENEMY_NAMES = "false",
BATTLE_BATTLEFIELD_WIDTH = "robocode.battle.battlefieldWidth",
BATTLE_BATTLEFIELD_HEIGHT = "robocode.battle.battlefieldHeight",
BATTLE_NUMBER_OF_ROUNDS = "robocode.battle.numberOfBattles",
BATTLE_GUN_COOLING_RATE = "robocode.battle.gunCoolingRate",
BATTLE_INACTIVITY_TIME = "robocode.battle.inactivityTime",
BATTLE_SENTRY_BORDER_SIZE = "robocode.battle.sentryBorderSize",
BATTLE_HIDE_ENEMY_NAMES = "robocode.battle.hideEnemyNames",
BATTLE_CONFIG_RADAR = "robocode.battle.config.radar",
BATTLE_CONFIG_CANNON = "robocode.battle.config.cannon",
BATTLE_CONFIG_SHIP = "robocode.battle.config.ship",
BATTLE_CONFIG_DEFAULT_RADAR = "[100-10],[50-20]",
BATTLE_CONFIG_DEFAULT_CANNON = "[8.0-4.0-2.0-2.0-3.0-400.0],[16.0-4.0-2.0-1.0-3.0-700.0],[8-5-2-1-6-700]",
BATTLE_CONFIG_DEFAULT_SHIP = "[8-4-2-1-3],[16-4-2-2-3],[8-5-2-1-6]";
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.coordinator.stream;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.samza.Partition;
import org.apache.samza.SamzaException;
import org.apache.samza.config.Config;
import org.apache.samza.config.MapConfig;
import org.apache.samza.coordinator.stream.messages.CoordinatorStreamMessage;
import org.apache.samza.coordinator.stream.messages.SetConfig;
import org.apache.samza.serializers.JsonSerde;
import org.apache.samza.serializers.Serde;
import org.apache.samza.system.IncomingMessageEnvelope;
import org.apache.samza.system.SystemAdmin;
import org.apache.samza.system.SystemConsumer;
import org.apache.samza.system.SystemStream;
import org.apache.samza.system.SystemStreamMetadata;
import org.apache.samza.system.SystemStreamMetadata.SystemStreamPartitionMetadata;
import org.apache.samza.system.SystemStreamPartition;
import org.apache.samza.system.SystemStreamPartitionIterator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A wrapper around a SystemConsumer that provides helpful methods for dealing
* with the coordinator stream.
*/
public class CoordinatorStreamSystemConsumer {
private static final Logger log = LoggerFactory.getLogger(CoordinatorStreamSystemConsumer.class);
private final Serde<List<?>> keySerde;
private final Serde<Map<String, Object>> messageSerde;
private final SystemStreamPartition coordinatorSystemStreamPartition;
private final SystemConsumer systemConsumer;
private final SystemAdmin systemAdmin;
private final Map<String, String> configMap;
private boolean isBootstrapped;
private boolean isStarted;
private Set<CoordinatorStreamMessage> bootstrappedStreamSet = new LinkedHashSet<CoordinatorStreamMessage>();
public CoordinatorStreamSystemConsumer(SystemStream coordinatorSystemStream, SystemConsumer systemConsumer, SystemAdmin systemAdmin, Serde<List<?>> keySerde, Serde<Map<String, Object>> messageSerde) {
this.coordinatorSystemStreamPartition = new SystemStreamPartition(coordinatorSystemStream, new Partition(0));
this.systemConsumer = systemConsumer;
this.systemAdmin = systemAdmin;
this.configMap = new HashMap<String, String>();
this.isBootstrapped = false;
this.keySerde = keySerde;
this.messageSerde = messageSerde;
}
public CoordinatorStreamSystemConsumer(SystemStream coordinatorSystemStream, SystemConsumer systemConsumer, SystemAdmin systemAdmin) {
this(coordinatorSystemStream, systemConsumer, systemAdmin, new JsonSerde<List<?>>(), new JsonSerde<Map<String, Object>>());
}
/**
* Retrieves the oldest offset in the coordinator stream, and registers the
* coordinator stream with the SystemConsumer using the earliest offset.
*/
public void register() {
log.debug("Attempting to register: {}", coordinatorSystemStreamPartition);
Set<String> streamNames = new HashSet<String>();
String streamName = coordinatorSystemStreamPartition.getStream();
streamNames.add(streamName);
Map<String, SystemStreamMetadata> systemStreamMetadataMap = systemAdmin.getSystemStreamMetadata(streamNames);
if (systemStreamMetadataMap == null) {
throw new SamzaException("Received a null systemStreamMetadataMap from the systemAdmin. This is illegal.");
}
SystemStreamMetadata systemStreamMetadata = systemStreamMetadataMap.get(streamName);
if (systemStreamMetadata == null) {
throw new SamzaException("Expected " + streamName + " to be in system stream metadata.");
}
SystemStreamPartitionMetadata systemStreamPartitionMetadata = systemStreamMetadata.getSystemStreamPartitionMetadata().get(coordinatorSystemStreamPartition.getPartition());
if (systemStreamPartitionMetadata == null) {
throw new SamzaException("Expected metadata for " + coordinatorSystemStreamPartition + " to exist.");
}
String startingOffset = systemStreamPartitionMetadata.getOldestOffset();
log.debug("Registering {} with offset {}", coordinatorSystemStreamPartition, startingOffset);
systemConsumer.register(coordinatorSystemStreamPartition, startingOffset);
}
/**
* Starts the underlying SystemConsumer.
*/
public void start() {
if (isStarted) {
log.info("Coordinator stream consumer already started");
return;
}
log.info("Starting coordinator stream system consumer.");
systemConsumer.start();
isStarted = true;
}
/**
* Stops the underlying SystemConsumer.
*/
public void stop() {
log.info("Stopping coordinator stream system consumer.");
systemConsumer.stop();
}
/**
* Read all messages from the earliest offset, all the way to the latest.
* Currently, this method only pays attention to config messages.
*/
public void bootstrap() {
log.info("Bootstrapping configuration from coordinator stream.");
SystemStreamPartitionIterator iterator = new SystemStreamPartitionIterator(systemConsumer, coordinatorSystemStreamPartition);
try {
while (iterator.hasNext()) {
IncomingMessageEnvelope envelope = iterator.next();
Object[] keyArray = keySerde.fromBytes((byte[]) envelope.getKey()).toArray();
Map<String, Object> valueMap = null;
if (envelope.getMessage() != null) {
valueMap = messageSerde.fromBytes((byte[]) envelope.getMessage());
}
CoordinatorStreamMessage coordinatorStreamMessage = new CoordinatorStreamMessage(keyArray, valueMap);
log.debug("Received coordinator stream message: {}", coordinatorStreamMessage);
bootstrappedStreamSet.add(coordinatorStreamMessage);
if (SetConfig.TYPE.equals(coordinatorStreamMessage.getType())) {
String configKey = coordinatorStreamMessage.getKey();
if (coordinatorStreamMessage.isDelete()) {
configMap.remove(configKey);
} else {
String configValue = new SetConfig(coordinatorStreamMessage).getConfigValue();
configMap.put(configKey, configValue);
}
}
}
log.debug("Bootstrapped configuration: {}", configMap);
isBootstrapped = true;
} catch (Exception e) {
throw new SamzaException(e);
}
}
public Set<CoordinatorStreamMessage> getBoostrappedStream() {
log.info("Returning the bootstrapped data from the stream");
if (!isBootstrapped)
bootstrap();
return bootstrappedStreamSet;
}
public Set<CoordinatorStreamMessage> getBootstrappedStream(String type) {
log.debug("Bootstrapping coordinator stream for messages of type {}", type);
bootstrap();
LinkedHashSet<CoordinatorStreamMessage> bootstrappedStream = new LinkedHashSet<CoordinatorStreamMessage>();
for (CoordinatorStreamMessage coordinatorStreamMessage : bootstrappedStreamSet) {
if (type.equalsIgnoreCase(coordinatorStreamMessage.getType())) {
bootstrappedStream.add(coordinatorStreamMessage);
}
}
return bootstrappedStream;
}
/**
* @return The bootstrapped configuration that's been read after bootstrap has
* been invoked.
*/
public Config getConfig() {
if (isBootstrapped) {
return new MapConfig(configMap);
} else {
throw new SamzaException("Must call bootstrap before retrieving config.");
}
}
/**
* Gets an iterator on the coordinator stream, starting from the starting offset the consumer was registered with.
*
* @return an iterator on the coordinator stream pointing to the starting offset the consumer was registered with.
*/
public SystemStreamPartitionIterator getStartIterator() {
return new SystemStreamPartitionIterator(systemConsumer, coordinatorSystemStreamPartition);
}
/**
* returns all unread messages after an iterator on the stream
*
* @param iterator the iterator pointing to an offset in the coordinator stream. All unread messages after this iterator are returned
* @return a set of unread messages after a given iterator
*/
public Set<CoordinatorStreamMessage> getUnreadMessages(SystemStreamPartitionIterator iterator) {
return getUnreadMessages(iterator, null);
}
/**
* returns all unread messages of a specific type, after an iterator on the stream
*
* @param iterator the iterator pointing to an offset in the coordinator stream. All unread messages after this iterator are returned
* @param type the type of the messages to be returned
* @return a set of unread messages of a given type, after a given iterator
*/
public Set<CoordinatorStreamMessage> getUnreadMessages(SystemStreamPartitionIterator iterator, String type) {
LinkedHashSet<CoordinatorStreamMessage> messages = new LinkedHashSet<CoordinatorStreamMessage>();
while (iterator.hasNext()) {
IncomingMessageEnvelope envelope = iterator.next();
Object[] keyArray = keySerde.fromBytes((byte[]) envelope.getKey()).toArray();
Map<String, Object> valueMap = null;
if (envelope.getMessage() != null) {
valueMap = messageSerde.fromBytes((byte[]) envelope.getMessage());
}
CoordinatorStreamMessage coordinatorStreamMessage = new CoordinatorStreamMessage(keyArray, valueMap);
if (type == null || type.equals(coordinatorStreamMessage.getType())) {
messages.add(coordinatorStreamMessage);
}
}
return messages;
}
/**
* Checks whether or not there are any messages after a given iterator on the coordinator stream
*
* @param iterator The iterator to check if there are any new messages after this point
* @return True if there are new messages after the iterator, false otherwise
*/
public boolean hasNewMessages(SystemStreamPartitionIterator iterator) {
if (iterator == null) {
return false;
}
return iterator.hasNext();
}
}
|
package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.teamcode.OpticalFlowBoard;
import org.firstinspires.ftc.teamcode.DbgLog;
/**
* Created by Cherry Pi on 12/27/2018.
*/
@TeleOp(name = "OpticalFlowBoardTest", group = "Testing")
public class OpticalFlowBoardTest extends LinearOpMode {
OpticalFlowBoard opticalFlowBoard;
@Override
public void runOpMode() throws InterruptedException {
DbgLog.msg("Starting opMode; retrieving the OpticalFlowBoard class");
opticalFlowBoard = hardwareMap.get(OpticalFlowBoard.class, "opticalFlowBoard");
DbgLog.msg("Fetching product id");
byte productID = opticalFlowBoard.getProductIDRaw();
byte cpProductID = opticalFlowBoard.getCPProductIDRaw();
telemetry.addData("Product ID", productID);
telemetry.addData("Cherry Pi Board Product ID", cpProductID);
DbgLog.msg("productId: %02x cpProductId: %02x", productID, cpProductID);
telemetry.update();
waitForStart();
while (opModeIsActive()) {
opticalFlowBoard.setLedStatus(false, false, false, false);
sleep(500);
opticalFlowBoard.setLedStatus(false, true, false, false);
sleep(500);
OpticalFlowBoard.Absolute absolute = opticalFlowBoard.readAbsolute();
if (absolute != null) {
telemetry.addData("x", absolute.getX());
telemetry.addData("y", absolute.getY());
telemetry.addData("timestamp", absolute.getTimestamp());
} else {
telemetry.addData("absolute not read", "");
}
telemetry.update();
}
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.springdata.examples;
import org.apache.ignite.Ignition;
import org.apache.ignite.client.IgniteClient;
import org.apache.ignite.configuration.ClientConfiguration;
import org.apache.ignite.springdata.repository.config.EnableIgniteRepositories;
import org.apache.ignite.springdata.repository.config.RepositoryConfig;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import static org.apache.ignite.configuration.ClientConnectorConfiguration.DFLT_PORT;
/**
* Example of Spring application configuration that represents beans required to configure Spring Data repository access
* to an Ignite cluster through the thin client.
*
* Note that both Ignite thin client and Ignite node approaches of Ignite cluster access configuration uses the same API.
* Ignite Spring Data integration automatically recognizes the type of provided bean and use the appropriate
* cluster connection.
*
* @see SpringApplicationConfiguration
*/
@Configuration
@EnableIgniteRepositories
public class IgniteClientSpringApplicationConfiguration {
/**
* Creates Apache Ignite thin client instance bean which will be used for accessing the Ignite cluster.
* Note, that the name of the current bean must match value of {@link RepositoryConfig#igniteInstance}
* property that {@link PersonRepository} is marked with. In this particular case, the default value of
* {@link RepositoryConfig#igniteInstance} property is used.
*
* @return Ignite Client.
*/
@Bean
public IgniteClient igniteInstance() {
return Ignition.startClient(new ClientConfiguration().setAddresses("127.0.0.1:" + DFLT_PORT));
}
}
|
package org.modelcatalogue.letter.annotator;
/**
* Highlights the matched text from the letter using the matched term.
*/
public interface Highlighter {
/**
* The highlighter which doesn't do anything.
*/
Highlighter NOOP = new AbstractHighlighter() {
@Override
public String getReplacement(String originalMatch, CandidateTerm matchedTerm) {
return originalMatch;
}
};
/**
* The highlighter which highlights the text with HTML fragments.
*/
Highlighter HTML = new HtmlHighlighter();
/**
* Returns the custom head for the highlighter which should be appended to the highlighted text.
* @return the custom head for the highlighter which should be appended to the highlighted text
*/
String getHead();
/**
* Returns the custom tail for the highlighter which should be appended to the highlighted text.
* @return the custom tail for the highlighter which should be appended to the highlighted text
*/
String getTail();
/**
* Returns the match from the fragments with more data encoded using the information from the matched term.
* @param originalMatch matched text
* @param matchedTerm related term
* @return the match from the fragments with more data encoded using the information from the matched term
*/
String getReplacement(String originalMatch, CandidateTerm matchedTerm);
}
|
package im.actor.core.entity.encryption;
import java.io.IOException;
import java.util.Comparator;
import im.actor.runtime.bser.BserObject;
import im.actor.runtime.bser.BserValues;
import im.actor.runtime.bser.BserWriter;
import im.actor.runtime.crypto.primitives.util.ByteStrings;
import im.actor.runtime.function.Predicate;
public class PeerSession extends BserObject {
public static Predicate<PeerSession> BY_THEIR_GROUP(final int theirKeyGroupId) {
return new Predicate<PeerSession>() {
@Override
public boolean apply(PeerSession session) {
return session.getTheirKeyGroupId() == theirKeyGroupId;
}
};
}
public static Predicate<PeerSession> BY_IDS(final int theirKeyGroupId, final long ownPreKeyId, final long theirPreKeyId) {
return new Predicate<PeerSession>() {
@Override
public boolean apply(PeerSession session) {
return session.getTheirKeyGroupId() == theirKeyGroupId &&
session.getOwnPreKeyId() == ownPreKeyId &&
session.getTheirPreKeyId() == theirPreKeyId;
}
};
}
public static final Comparator<PeerSession> COMPARATOR = new Comparator<PeerSession>() {
@Override
public int compare(PeerSession lhs, PeerSession rhs) {
return ByteStrings.compare(lhs.getMasterKey(), rhs.getMasterKey());
}
};
private long sid;
private int uid;
private int ownKeyGroupId;
private int theirKeyGroupId;
private long ownPreKeyId;
private long theirPreKeyId;
private byte[] masterKey;
public PeerSession(long sid, int uid, int ownKeyGroupId, int theirKeyGroupId,
long ownPreKeyId, long theirPreKeyId, byte[] masterKey) {
this.sid = sid;
this.uid = uid;
this.ownKeyGroupId = ownKeyGroupId;
this.theirKeyGroupId = theirKeyGroupId;
this.ownPreKeyId = ownPreKeyId;
this.theirPreKeyId = theirPreKeyId;
this.masterKey = masterKey;
}
public PeerSession(byte[] data) throws IOException {
load(data);
}
public byte[] getMasterKey() {
return masterKey;
}
public long getSid() {
return sid;
}
public int getUid() {
return uid;
}
public int getOwnKeyGroupId() {
return ownKeyGroupId;
}
public int getTheirKeyGroupId() {
return theirKeyGroupId;
}
public long getOwnPreKeyId() {
return ownPreKeyId;
}
public long getTheirPreKeyId() {
return theirPreKeyId;
}
@Override
public void parse(BserValues values) throws IOException {
sid = values.getLong(1);
uid = values.getInt(2);
ownKeyGroupId = values.getInt(3);
theirKeyGroupId = values.getInt(4);
ownPreKeyId = values.getLong(5);
theirPreKeyId = values.getLong(6);
masterKey = values.getBytes(7);
}
@Override
public void serialize(BserWriter writer) throws IOException {
writer.writeLong(1, sid);
writer.writeInt(2, uid);
writer.writeInt(3, ownKeyGroupId);
writer.writeInt(4, theirKeyGroupId);
writer.writeLong(5, ownPreKeyId);
writer.writeLong(6, theirPreKeyId);
writer.writeBytes(7, masterKey);
}
}
|
package org.ebes ;
/**
* Ebes.java
*
* @author Gustavo R. Zavala <grzavala@gmail.com> Antonio J. Nebro <antonio@lcc.uma.es> Juan J.
* Durillo <durillo@lcc.uma.es>
* @version 1.0
*/
import org.uma.jmetal.problem.doubleproblem.impl.AbstractDoubleProblem;
import org.uma.jmetal.solution.doublesolution.DoubleSolution;
import org.uma.jmetal.util.JMetalException;
import org.uma.jmetal.util.solutionattribute.impl.OverallConstraintViolation;
import java.io.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Scanner;
import java.util.logging.Level;
import java.util.logging.Logger;
/** Class representing problem Ebes Spatial Bars Structure (Estructuras de Barras Espaciales) */
@SuppressWarnings("serial")
public class Ebes extends AbstractDoubleProblem {
/**
* Constructor. Creates a default instance of the Ebes problem.
*
* @param solutionType The solution type must "Real" or "BinaryReal".
*/
/** Stores the number of Bar Groups */
protected int numberOfEval_;
/*
protected int maxEvaluations_ ;
public void setMaxEvaluations(int maxEvaluations) {
maxEvaluations_ = maxEvaluations;
} // setNumberOfElement
public int getMaxEvaluations() {
return maxEvaluations_;
} // setNumberOfElement
*/
/*
* Stores the number of Nodes of the problem
*/
protected int numberOfNodes;
public void setNumberOfNodes(int numberOfNodes) {
this.numberOfNodes = numberOfNodes;
} // setNumberOfNodes
public int getNumberOfNodes() {
return numberOfNodes;
} // getNumberOfNodes
/** Stores the number of Nodes of the problem */
protected int numberOfLibertyDegree_ = 6;
protected int numberOfNodesRestricts_;
public void numberOfNodesRestricts(int numberOfNodesRestricts) {
numberOfNodesRestricts_ = numberOfNodesRestricts;
} // set numberOfNodesRestricts
public int getNumberOfNodesRestricts() {
return numberOfNodesRestricts_;
} // get NumberOfNodes
/** Stores the number of Nodes of the problem */
protected double[][] nodeCheck_;
public double nodeCheck(int i, int j) {
return nodeCheck_[i][j];
} // get node check
protected int[][] geometryCheck_;
public int geometryCheck(int i, int j) {
return geometryCheck_[i][j];
} // get node check
/** Stores the number of Bar Groups */
protected int numberOfGroupElements_;
public void setnumberOfGroupElements(int i) {
numberOfGroupElements_ = i;
} // setNumberOfElement
public int getnumberOfGroupElements() {
return numberOfGroupElements_;
} // getNumberOfElements
/** Stores the number of Bar of the problem */
protected int numberOfElements_;
public void setNumberOfElements(int numberOfElements) {
numberOfElements_ = numberOfElements;
} // setNumberOfElement
public int getNumberOfElements() {
return numberOfElements_;
} // getNumberOfElements
public boolean lLoadsOwnWeight;
public boolean lSecondOrderGeometric;
public boolean lBuckling;
/** Stores the Elements Between Difference Greatest */
protected int elementsBetweenDiffGreat_;
public void setElementsBetweenDiffGreat(int elementsBetweenDiffGreat) {
elementsBetweenDiffGreat_ = elementsBetweenDiffGreat;
} // setNumberOfElement
public int getElementsBetweenDiffGreat() {
return elementsBetweenDiffGreat_;
} // getNumberOfElements
/** Stores the number of Load in Nodes of the problem */
protected int numberOfWeigthsNodes_;
public void setNumberOfWeigthsNodes(int numberOfWeigthsNodes) {
numberOfWeigthsNodes_ = numberOfWeigthsNodes;
} // setNumberOfWeigths
public int getNumberOfWeigthsNodes() {
return numberOfWeigthsNodes_;
} // getNumberOfWeigths
/** Stores the number of Load in ElementsNodes of the problem */
protected int numberOfWeigthsElements_;
public void setNumberOfWeigthsElements(int numberOfWeigthsElements) {
numberOfWeigthsElements_ = numberOfWeigthsElements;
} // setNumberOfWeigths
public int getNumberOfWeigthsElements() {
return numberOfWeigthsElements_;
} // getNumberOfWeigths
/** Stores the number a wide the diagonal matrix */
protected int matrixWidthBand_;
public void setMatrixWidthBand(int matrixWidthBand) {
matrixWidthBand_ = matrixWidthBand;
} // setMatrixWidtBand
public int getMatrixWidthBand() {
return matrixWidthBand_;
} // getMatrixWidtBand
protected int numberOfWeigthHypothesis_;
public void setNumberOfWeigthHypothesis(int numberOfWeigthHypothesis) {
numberOfWeigthHypothesis_ = numberOfWeigthHypothesis;
} // set numberOfLibertyDegree
public int getNumberOfWeigthHypothesis() {
return numberOfWeigthHypothesis_;
} // get numberOfLibertyDegree
public int numberOfConstraintsGeometric_;
public void setnumberOfConstraintsGeometric(int i) {
numberOfConstraintsGeometric_ = i;
} // set numberOfConstraintsGeometric_
public int getnumberOfConstraintsGeometric() {
return numberOfConstraintsGeometric_;
} // get numberOfConstraintsGeometric_
protected int numberOfConstraintsNodes_;
protected int numberOfGroupsToCheckGeometry_;
public void setNumberOfConstraintsNodes(int numberOfConstraintsNodes) {
numberOfConstraintsNodes_ = numberOfConstraintsNodes;
} // set numberOfConstraintsNodes
public int getNumberOfConstraintsNodes() {
return numberOfWeigthHypothesis_;
} // get numberOfRestrictionNodes
/** Stores the Node */
protected double[][] Node_;
public double getNode(int i, int j) {
return Node_[i][j];
} // getNodes
/** Stores the NodeRestrict */
protected double[][] NodeRestrict_;
public double getNodeRestrict(int i, int j) {
return NodeRestrict_[i][j];
} // getNodes
/** Stores the Groups */
protected double[][] Groups_;
public double getGroups(int i) {
return Groups_[i][MAX_COLUMN];
} // getGroups
/** Stores the Element */
protected double[][] Element_;
public double getElement(int i, int j) {
return Element_[i][j];
} // getElement
/** Stores the Load on Nodes */
protected double[][] WeightNode_;
public double getWeightNode(int i, int j) {
return WeightNode_[i][j];
} // getWeight
/** Stores the OverLoad on Elements */
protected double[][] OverloadInElement_;
public double getWeightElement(int i, int j) {
return OverloadInElement_[i][j];
} // getWeight
/** Stores the Load on Elements Itself */
protected double[][] WeightElement_;
public double getWeightElementItself(int i, int j) {
return WeightElement_[i][j];
} // getWeight
/** Stores the k */
protected double[] MatrixStiffness_;
public double MatrixStiffness(int i) {
return MatrixStiffness_[i];
} // get Strain i
/** Stores the k displacement */
protected double[][] DisplacementNodes_;
public double DisplacementNodes(int node, int hi) {
return DisplacementNodes_[node][hi];
} // get DisplacementNodes i
/** Stores the Effort in node i */
protected double[][][] Efforti_;
public double Efforti(int i, int element, int hypothesis) {
return Efforti_[i][element][hypothesis];
} // get Effort i
/** Stores the Effort in node j */
protected double[][][] Effortj_;
public double Effortj(int i, int element, int hypothesis) {
return Effortj_[i][element][hypothesis];
} // get Effort j
/** Stores the Axial force in node i */
protected double[] AxialForcei_;
public double AxialForcei_(int element) {
return AxialForcei_[element];
} // get Axial Force i
/** Stores the Axial force in node j */
protected double[] AxialForcej_;
public double AxialForcej_(int element) {
return AxialForcej_[element];
} // get Axial Force j
protected int strainAdmissibleCut_;
public void setStrainAdmissibleCut(int strainAdmissibleCut) {
strainAdmissibleCut_ = strainAdmissibleCut;
} // setStrainAdmissibleCompress
public int getStrainAdmissibleCut() {
return strainAdmissibleCut_;
} // getStrainAdmissibleCut
/** Stores the Strain in node i */
protected double[][][] Straini_;
public double Straini(int i, int element, int hypothesis) {
return Straini_[i][element][hypothesis];
} // get Strain i
/** Stores the Strain in node j */
protected double[][][] Strainj_;
public double getStrainj(int i, int element, int hypothesis) {
// i=0: Compression, =1: Traction, =2: Tangential
return Strainj_[i][element][hypothesis];
} // get Strain j
/** Stores the max omega for groups */
protected double[][] omegaMax_;
public double getOmegaMax(int group, int hypothesis) {
return omegaMax_[group][hypothesis];
} // get
/** Stores the max Nxx for groups */
protected double[][] NxxMax_;
public double getNxxMax(int group, int hypothesis) {
// normal (+)
return NxxMax_[group][hypothesis];
} // get
/** Stores the min Nxx for groups */
protected double[][] NxxMin_;
public double getNxxMin(int group, int hypothesis) {
// normal (-)
return NxxMin_[group][hypothesis];
} // get
/** Stores the max Mxz for groups */
protected double[][] MxzMax_;
public double getMxzMax(int group, int hypothesis) {
// flexor moment (+)
return MxzMax_[group][hypothesis];
} // get
/** Stores the max Mxz for groups */
protected double[][] MxzMin_;
public double getMxzMin(int group, int hypothesis) {
// flexor moment (-)
return MxzMin_[group][hypothesis];
} // get
/** Stores the max Mxy for groups */
protected double[][] MxyMax_;
public double getMxyMax(int group, int hypothesis) {
// flexor moment (+)
return MxyMax_[group][hypothesis];
} // get
/** Stores the min Mxy for groups */
protected double[][] MxyMin_;
public double getMxyMin(int group, int hypothesis) {
// flexor moment (-)
return MxyMin_[group][hypothesis];
} // get
/** Stores the max Nxx Strain for groups */
protected double[][] StrainNxxMax_;
public double getStrainNxxMax(int group, int hypothesis) {
// normal (+)
return StrainNxxMax_[group][hypothesis];
} // get Strain
protected double[][] StrainNxxMin_;
public double getStrainNxxMin(int group, int hypothesis) {
// normal (-)
return StrainNxxMin_[group][hypothesis];
} // get Strain
/** Stores the max Mxz Strain for groups */
protected double[][] StrainMxzMax_;
public double getStrainMxzMax(int group, int hypothesis) {
// normal (+)
return StrainMxzMax_[group][hypothesis];
} // get Strain
/** Stores the max Mxz Strain for groups */
protected double[][] StrainMxzMin_;
public double getStrainMxzMin(int group, int hypothesis) {
// normal (-)
return StrainMxzMin_[group][hypothesis];
} // get Strain
/** Stores the max Mxz Strain for groups */
protected double[][] StrainMxyMax_;
public double getStrainMxyMax(int group, int hypothesis) {
// normal (+)
return StrainMxyMax_[group][hypothesis];
} // get Strain
/** Stores the max Mxz Strain for groups */
protected double[][] StrainMxyMin_;
public double getStrainMxyMin(int group, int hypothesis) {
// normal (-)
return StrainMxyMin_[group][hypothesis];
} // get Strain
/** Stores the max Strain for elements */
protected double[][] StrainMax_;
public double getStrainMax(int group, int hypothesis) {
// normal (+)
return StrainMax_[group][hypothesis];
} // get Strain j
protected double[][] StrainMin_;
public double getStrainMin(int group, int hypothesis) {
// normal (+)
return StrainMin_[group][hypothesis];
} // get Strain j
/** Stores the max Strain for elements */
protected double[][] OldStrainMax_;
public double getOldStrainMax(int group, int hypothesis) {
// normal (+)
return OldStrainMax_[group][hypothesis];
} // get Strain j
protected double[][] OldStrainMin_;
public double getOldStrainMin(int group, int hypothesis) {
// normal (+)
return OldStrainMin_[group][hypothesis];
} // get Strain j
/** Stores the max Strain for elements */
protected double[][] StrainCutMax_;
public double getStrainCutMax(int group, int hypothesis) {
// Tangential
return StrainCutMax_[group][hypothesis];
} // get Strain j
/** Stores the min Strain for elements */
protected double[] StrainResidualMin_;
public double getStrainResidualMin(int hypothesis) {
// stress negative
return StrainResidualMin_[hypothesis];
} // get Strain j
/** Stores the max Strain for elements */
protected double[] StrainResidualMax_;
public double getStrainResidualMax(int hypothesis) {
// stress positive
return StrainResidualMax_[hypothesis];
} // get Strain j
/** Stores the Cut Strain Residual for elements */
protected double[] StrainResidualCut_;
public double getStrainResidualCut(int hypothesis) {
// stress cut
return StrainResidualCut_[hypothesis];
} // get Strain j
// ---- ANTONIO -----//
public int getGroupShape(int groupId) {
return (int) Groups_[groupId][SHAPE];
}
public int getVariablePosition(int groupId) {
return (int) Groups_[groupId][VAR_POSITION];
}
// ---- ANTONIO -----//
// NEW 20/05/2016
String GravitationalAxis_;
// -----------------------
//
double g_ = 9.81; // acceleration of gravity
// variables load beams
double[][][] cbi;
double[][][] cbj;
double[] Qi =
new double[numberOfLibertyDegree_]; // carga equivalente en nudo i referida al eje global
double[] Qj =
new double[numberOfLibertyDegree_]; // carga equivalente en nudo j referida al eje global
double[] pi =
new double
[numberOfLibertyDegree_]; // variable auxiliar carga equivalente en nudo i referida al eje
// local
double[] pj =
new double
[numberOfLibertyDegree_]; // variable auciliar carga equivalente en nudo j referida al eje
// local
double[][] PQ;
double Reaction_[][];
double[][] Kii = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] Kij = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] Kji = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] Kjj = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] KGii = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] KGij = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] KGji = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] KGjj = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] Rij = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] Rji = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] RTij = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] RTji = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] Rpij = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] Rpji = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] RpTij = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] RpTji = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
// second order geometric
double[][] KiiSOG = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] KijSOG = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] KjiSOG = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] KjjSOG = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
// matrix indexes of groups elements
int INDEX_ = 0; // index for the asociation with elements group
int GROUP_ = 1; // groups classification
int SHAPE = 2; // section type
int BETA = 3; // principal angle
int AREA = 4; // angle of the principal axis of inertia
int Az_ = 5; // static moment in Z local principal axis
int Ay_ = 6; // static moment in Y local principal axis
int Iz_ = 7; // inertia moment in Z local principal axis
int Iy_ = 8; // inertia moment in Y local principal axis
int It_ = 9; // inertia polar in Y and Z local principal axis
int Iw_ = 10; // warp modulus (módlo de alabeo)
int TypeMaterial_ = 11; // lengthwise modulus of elasticity
int E_ = 12; // lengthwise modulus of elasticity
int G_ = 13; // transversal modulus of elasticity
int BLijY_ = 14; // buckling beta coefficient
int BLijZ_ = 15; // buckling beta coefficient
int Fyz_ = 16; // Fyz
int Li_ = 17; // longitudinal de la barra rÃÂgida en nodo i
int Lj_ = 18; // longitud de la barra rÃÂgida en nodo j
int VARIABLES = 19; // cantidad de vaiables de decision
int Y_ = 20; // variable height in Y axis local principal
int Z_ = 21; // variable width in Z axis local principal
int eY_ = 22; // variable tickness in Y axis or coefficient thickness of the axis Y -> Ay
int eZ_ = 23; // variable tickness in Z axis or coefficient thickness of the axis Z -> Az
int uY_ = 24; // baricentro a la fibra extrema superior (up)
int dY_ = 25; // baricentro a la fibra extrema inferior (down)
int lZ_ = 26; // baricentro a la fibra extrema izquierda (left)
int rZ_ = 27; // //baricentro a la fibra extrema derecha (right)
int CONSTRAINT = 28; // cantidad de restricciones
int RATIO_YZ = 29; // ratio with heigth and width
int SPECIFIC_WEIGHT = 30; // material density
int STRESS = 31; // strain positive in the extreme fiber
int COMPRESSION = 32; // strain negative in the extreme fiber
int STRESS_CUT = 33; // cut strain in the section
int ELONGATION_POS = 34; // elongation + in %
int ELONGATION_NEG = 35; // elongation - in %
int VAR_Y_LOWER_LIMIT = 36;
int VAR_Y_UPPER_LIMIT = 37;
int VAR_Z_LOWER_LIMIT = 38;
int VAR_Z_UPPER_LIMIT = 39;
int VAR_eY_LOWER_LIMIT = 40;
int VAR_eY_UPPER_LIMIT = 41;
int VAR_eZ_LOWER_LIMIT = 42;
int VAR_eZ_UPPER_LIMIT = 43;
int VAR_POSITION = 44;
int DESCRIPTION = 45;
int MAX_COLUMN = 45;
// matrix indexes of weight element
int CARGA_UNIFORME_TOTAL = 0;
int CARGA_PUNTUAL = 1;
int CARGA_UNIFORME_PARCIAL = 2;
int CARGA_TRIANGULAR_I = 3;
int CARGA__TRIANGULAR_J = 4;
int CARGA_PARABOLICA = 5;
int CARGA_MOMENTO_PUNTUAL = 8;
int CARGA_MOMENTO_DISTRIBUIDO = 6;
int CARGA_TEMPERATURA = 10;
// reference weight of elements in node
// axis reference
int aX_ = 0; // to axis X
int aY_ = 1; // to axis Y
int aZ_ = 2; // to axis Z
int gX_ = 3; // to axis X flexor moment
int gY_ = 4; // to axis Y flexor moment
int gZ_ = 5; // to axis Z flexor moment
// matrix indexes of shape
public static final int CIRCLE = 0; // section type, 1 variable (diámetro)
public static final int HOLE_CIRCLE =
1; // section type, 2 variable (diámetro externo y espesor)
public static final int RECTANGLE = 2; // section type, 2 variables (y=alto, z=ancho)
public static final int HOLE_RECTANGLE = 3; // section type, 4 variables (y, z, eY_, eZ_)
public static final int I_SINGLE = 4; // section type, 4 variables (y(alma), z(ala), eY_, eZ_)
public static final int I_DOUBLE = 5; // section type, 4 variables (y(alma), z(ala), eY_, eZ_)
public static final int H_SINGLE = 6; // section type, 4 variables (y(alma), z(ala), eY_, eZ_)
public static final int H_DOUBLE = 7; // section type, 4 variables (y(alma), z(ala), eY_, eZ_)
public static final int L_SINGLE = 8; // section type, 4 variables (y(alma), z(ala), eY_, eZ_)
public static final int L_DOUBLE = 9; // section type, 4 variables (y(alma), z(ala), eY_, eZ_)
public static final int T_SINGLE = 10; // section type, 4 variables (y(alma), z(ala), eY_, eZ_)
public static final int T_DOUBLE = 11; // section type, 4 variables (y(alma), z(ala), eY_, eZ_)
int RIG_RIG = 0;
int RIG_ART = 1;
int ART_RIG = 10;
int ART_ART = 11;
// matrix indexes of structure elements
// int INDEX_=0; // id elements groups
int i_ = 1; // i, minor number node
int j_ = 2; // j, mayor number node
int L_ = 3; // length of element
int Vij_ = 4; // linked between nodes i and j
int Ei_ = 5; // rigidez elástica en nudo i
int Ej_ = 6; // rigidez elástica en nudo j
// beams load index
int QH_ = 0; // hipótesis de cargas
int QE_ = 1; // barra aplicada
int QT_ = 2; // tipo de cargas
int QAx_ = 3; // intensidad en sentido del eje local x
int QAy_ = 4; // intensidad en sentido del eje local y
int QAz_ = 5; // intensidad en sentido del eje local z
int Qa_ = 6; // distancia de aplicación de la carga respecto al nudo i
int Qb_ = 7; // longitud de la carga aplicada
// strain matrix
int STRAIN_COMPRESS = 0;
int STRAIN_TRACTION = 1;
int STRAIN_CUT = 2;
// selected objetive functions
int selectedOF = 12;
String[] OF_;
public OverallConstraintViolation<DoubleSolution> overallConstraintViolationDegree;
public Ebes() throws FileNotFoundException {
overallConstraintViolationDegree = new OverallConstraintViolation<DoubleSolution>();
String file = EBEsReadProblems() + ".ebe";
EBEsInitialize(file);
}
/**
* Constructor
*
* @throws FileNotFoundException
*/
public Ebes(String ebesFileName, String[] objectiveList) throws FileNotFoundException {
overallConstraintViolationDegree = new OverallConstraintViolation<DoubleSolution>();
OF_ = objectiveList;
EBEsInitialize(ebesFileName);
}
public void EBEsInitialize(String file) throws FileNotFoundException {
// CALCULAR dd Y CA (CANTIDADES DE NUDOS COARTADOS) AL CARGAR EL ARCHIVO
// CON ESTO EVITO RECALCULARLOS CADA VEZ QUE SE BUSCA UNA SOLUCIóN
// CONTAR EN PENALIZACIóN DE LA MATRIZ CA Y NO CN, CON ESTO
// EVITO RECORRER INNECESARIAMENTE TODOS LOS NUDOS
setName("Ebes");
numberOfEval_ = 1;
try {
// read file topology structural
EBEsReadDataFile(file);
} catch (JMetalException ex) {
Logger.getLogger(org.uma.jmetal.problem.multiobjective.ebes.Ebes.class.getName()).log(Level.SEVERE, null, ex);
}
// variables and restrictions
// la forma de la sección determina las cantidades de variables
// y la cantidad inicial de restricciones
// numberOfVariables_=0;
int numberOfConstraints_ = 0;
/*
for(int gr=0;gr<numberOfGroupElements_;gr++){
numberOfVariables_+= Groups_[gr][VARIABLES];
numberOfConstraintsGeometric_+= Groups_[gr][CONSTRAINT];
}
*/
// variable position, amount variables and geometric constraints
setNumberOfVariables(Variable_Position());
// geomtric constraints for shape
numberOfConstraints_ = numberOfConstraintsGeometric_;
// constraint for stress
numberOfConstraints_ += numberOfGroupElements_ * 3;
// total restrictions
numberOfConstraints_ += numberOfConstraintsNodes_;
setNumberOfConstraints(numberOfConstraints_);
// amount objectives
setNumberOfObjectives(OF_.length);
// problem data print
System.out.println("Structure");
System.out.println(" file: " + file);
System.out.println(" Number of Nodes: " + numberOfNodes);
System.out.println(" Number of Bars: " + numberOfElements_);
System.out.println(" Number of Groups: " + numberOfGroupElements_);
System.out.println("Optimization multi-objective: ");
System.out.println(" Number of objective function: " + getNumberOfObjectives());
String txt = "";
for (int i = 0; i < getNumberOfObjectives(); i++) {
txt = txt + OF_[i] + " ";
}
System.out.println(" " + txt);
System.out.println(" Number of Variables: " + getNumberOfVariables());
System.out.println(" Number of constraints for Geometric: " + numberOfConstraintsGeometric_);
System.out.println(" Number of constraints for Stress: " + (numberOfGroupElements_ * 3));
System.out.println(" Number of constraints for Deflection: " + numberOfConstraintsNodes_);
System.out.println(" Number of Constraints: " + numberOfConstraints_);
System.out.println(" Number of groups to check geometry: " + numberOfGroupsToCheckGeometry_);
// objectives
// Weight, Deflections, stress squared absolute error;
System.out.println("Algorithm configuration: ");
// Fill lower and upper limits
Double[] lowerLimit_ = new Double[getNumberOfVariables()];
Double[] upperLimit_ = new Double[getNumberOfVariables()];
int var = 0;
for (int gr = 0; gr < numberOfGroupElements_; gr++) {
var += Groups_[gr][VARIABLES];
if (Groups_[gr][SHAPE] == CIRCLE) {
lowerLimit_[var - 1] = Groups_[gr][VAR_Y_LOWER_LIMIT]; // diameter min
upperLimit_[var - 1] = Groups_[gr][VAR_Y_UPPER_LIMIT]; // diameter max
} else if (Groups_[gr][SHAPE] == HOLE_CIRCLE) {
lowerLimit_[var - 2] = Groups_[gr][VAR_Y_LOWER_LIMIT]; // diameter min
lowerLimit_[var - 1] = Groups_[gr][VAR_eY_LOWER_LIMIT]; // thickness min
upperLimit_[var - 2] = Groups_[gr][VAR_Y_UPPER_LIMIT]; // diameter max
upperLimit_[var - 1] = Groups_[gr][VAR_eY_UPPER_LIMIT]; // thickness max
} else if (Groups_[gr][SHAPE] == RECTANGLE) {
lowerLimit_[var - 2] = Groups_[gr][VAR_Y_LOWER_LIMIT]; // higth min for rectangle
lowerLimit_[var - 1] = Groups_[gr][VAR_Z_LOWER_LIMIT]; // witdth min
upperLimit_[var - 2] = Groups_[gr][VAR_Y_UPPER_LIMIT]; // higth max for rectangle
upperLimit_[var - 1] = Groups_[gr][VAR_Z_UPPER_LIMIT]; // width max for rectangle
} else if (Groups_[gr][SHAPE] == HOLE_RECTANGLE) {
lowerLimit_[var - 4] = Groups_[gr][VAR_Y_LOWER_LIMIT]; // height min
lowerLimit_[var - 3] = Groups_[gr][VAR_Z_LOWER_LIMIT]; // wide min
lowerLimit_[var - 2] =
Groups_[gr][VAR_eY_LOWER_LIMIT]; // tickness min in Y principal local axis
lowerLimit_[var - 1] =
Groups_[gr][VAR_eZ_LOWER_LIMIT]; // tickness min in Z principal local axis
upperLimit_[var - 4] = Groups_[gr][VAR_Y_UPPER_LIMIT]; // height max
upperLimit_[var - 3] = Groups_[gr][VAR_Z_UPPER_LIMIT]; // wide max
upperLimit_[var - 2] =
Groups_[gr][VAR_eY_UPPER_LIMIT]; // tickness max in Y principal local axis
upperLimit_[var - 1] =
Groups_[gr][VAR_eZ_UPPER_LIMIT]; // tickness max in Z principal local axis
} else if (Groups_[gr][SHAPE] == I_SINGLE) {
lowerLimit_[var - 4] = Groups_[gr][VAR_Y_LOWER_LIMIT]; // height min
lowerLimit_[var - 3] = Groups_[gr][VAR_Z_LOWER_LIMIT]; // wide min
lowerLimit_[var - 2] =
Groups_[gr][VAR_eY_LOWER_LIMIT]; // tickness min in Y principal local axis
lowerLimit_[var - 1] =
Groups_[gr][VAR_eZ_LOWER_LIMIT]; // ticknees min in Z principal local axis
upperLimit_[var - 4] = Groups_[gr][VAR_Y_UPPER_LIMIT]; // height max in y axis
upperLimit_[var - 3] = Groups_[gr][VAR_Z_UPPER_LIMIT]; // wide max in z axiz
upperLimit_[var - 2] =
Groups_[gr][VAR_eY_UPPER_LIMIT]; // tickness max in Y principal local axis
upperLimit_[var - 1] =
Groups_[gr][VAR_eZ_UPPER_LIMIT]; // tickness max in Z principal local axis
} else if (Groups_[gr][SHAPE] == I_DOUBLE) {
lowerLimit_[var - 4] = Groups_[gr][VAR_Y_LOWER_LIMIT]; // height min
lowerLimit_[var - 3] = Groups_[gr][VAR_Z_LOWER_LIMIT]; // wide min
lowerLimit_[var - 2] =
Groups_[gr][VAR_eY_LOWER_LIMIT]; // tickness min in Y principal local axis
lowerLimit_[var - 1] =
Groups_[gr][VAR_eZ_LOWER_LIMIT]; // tickness min in Z principal local axis
upperLimit_[var - 4] = Groups_[gr][VAR_Y_UPPER_LIMIT]; // height max
upperLimit_[var - 3] = Groups_[gr][VAR_Z_UPPER_LIMIT]; // wide max in z
upperLimit_[var - 2] =
Groups_[gr][VAR_eY_UPPER_LIMIT]; // tickness max in Y principal local axis
upperLimit_[var - 1] = Groups_[gr][VAR_eZ_UPPER_LIMIT]; // thickness max in plate z
} else if (Groups_[gr][SHAPE] == H_SINGLE) {
lowerLimit_[var - 4] = Groups_[gr][VAR_Y_LOWER_LIMIT]; // height min
lowerLimit_[var - 3] = Groups_[gr][VAR_Z_LOWER_LIMIT]; // wide min
lowerLimit_[var - 2] =
Groups_[gr][VAR_eY_LOWER_LIMIT]; // tickness min in Y principal local axis
lowerLimit_[var - 1] =
Groups_[gr][VAR_eZ_LOWER_LIMIT]; // ticknees min in Z principal local axis
upperLimit_[var - 4] = Groups_[gr][VAR_Y_UPPER_LIMIT]; // height max in y axis
upperLimit_[var - 3] = Groups_[gr][VAR_Z_UPPER_LIMIT]; // wide max in z axiz
upperLimit_[var - 2] =
Groups_[gr][VAR_eY_UPPER_LIMIT]; // tickness max in Y principal local axis
upperLimit_[var - 1] =
Groups_[gr][VAR_eZ_UPPER_LIMIT]; // tickness max in Z principal local axis
} else if (Groups_[gr][SHAPE] == H_DOUBLE) {
lowerLimit_[var - 4] = Groups_[gr][VAR_Y_LOWER_LIMIT]; // height min
lowerLimit_[var - 3] = Groups_[gr][VAR_Z_LOWER_LIMIT]; // wide min
lowerLimit_[var - 2] =
Groups_[gr][VAR_eY_LOWER_LIMIT]; // tickness min in Y principal local axis
lowerLimit_[var - 1] =
Groups_[gr][VAR_eZ_LOWER_LIMIT]; // tickness min in Z principal local axis
upperLimit_[var - 4] = Groups_[gr][VAR_Y_UPPER_LIMIT]; // height max
upperLimit_[var - 3] = Groups_[gr][VAR_Z_UPPER_LIMIT]; // wide max in z
upperLimit_[var - 2] =
Groups_[gr][VAR_eY_UPPER_LIMIT]; // tickness max in Y principal local axis
upperLimit_[var - 1] = Groups_[gr][VAR_eZ_UPPER_LIMIT]; // thickness max in plate z
} else if (Groups_[gr][SHAPE] == L_SINGLE) {
lowerLimit_[var - 4] = Groups_[gr][VAR_Y_LOWER_LIMIT]; // height min
lowerLimit_[var - 3] = Groups_[gr][VAR_Z_LOWER_LIMIT]; // wide min
lowerLimit_[var - 2] = Groups_[gr][VAR_eY_LOWER_LIMIT]; // ticknees min in plate y
lowerLimit_[var - 1] = Groups_[gr][VAR_eZ_LOWER_LIMIT]; // ticknees min in plate z
upperLimit_[var - 4] = Groups_[gr][VAR_Y_UPPER_LIMIT]; // height max
upperLimit_[var - 3] = Groups_[gr][VAR_Z_UPPER_LIMIT]; // wide max in z
upperLimit_[var - 2] = Groups_[gr][VAR_eY_UPPER_LIMIT]; // thickness max in
upperLimit_[var - 1] = Groups_[gr][VAR_eZ_UPPER_LIMIT]; // thickness max in
} else if (Groups_[gr][SHAPE] == L_DOUBLE) {
lowerLimit_[var - 4] = Groups_[gr][VAR_Y_LOWER_LIMIT]; // height min
lowerLimit_[var - 3] = Groups_[gr][VAR_Z_LOWER_LIMIT]; // wide min
lowerLimit_[var - 2] = Groups_[gr][VAR_eY_LOWER_LIMIT]; // ticknees min in
lowerLimit_[var - 1] = Groups_[gr][VAR_eZ_LOWER_LIMIT]; // ticknees min in
upperLimit_[var - 4] = Groups_[gr][VAR_Y_UPPER_LIMIT]; // height max
upperLimit_[var - 3] = Groups_[gr][VAR_Z_UPPER_LIMIT]; // wide max in z
upperLimit_[var - 2] = Groups_[gr][VAR_eY_UPPER_LIMIT]; // thickness max in
upperLimit_[var - 1] = Groups_[gr][VAR_eZ_UPPER_LIMIT]; // thickness max in
} else if (Groups_[gr][SHAPE] == T_SINGLE) {
lowerLimit_[var - 4] = Groups_[gr][VAR_Y_LOWER_LIMIT]; // height min
lowerLimit_[var - 3] = Groups_[gr][VAR_Z_LOWER_LIMIT]; // wide min
lowerLimit_[var - 2] = Groups_[gr][VAR_eY_LOWER_LIMIT]; // ticknees min in plate y
lowerLimit_[var - 1] = Groups_[gr][VAR_eZ_LOWER_LIMIT]; // ticknees min in plate z
upperLimit_[var - 4] = Groups_[gr][VAR_Y_UPPER_LIMIT]; // height max
upperLimit_[var - 3] = Groups_[gr][VAR_Z_UPPER_LIMIT]; // wide max in z
upperLimit_[var - 2] = Groups_[gr][VAR_eY_UPPER_LIMIT]; // thickness max in
upperLimit_[var - 1] = Groups_[gr][VAR_eZ_UPPER_LIMIT]; // thickness max in
} else if (Groups_[gr][SHAPE] == T_DOUBLE) {
lowerLimit_[var - 4] = Groups_[gr][VAR_Y_LOWER_LIMIT]; // height min
lowerLimit_[var - 3] = Groups_[gr][VAR_Z_LOWER_LIMIT]; // wide min
lowerLimit_[var - 2] = Groups_[gr][VAR_eY_LOWER_LIMIT]; // ticknees min in
lowerLimit_[var - 1] = Groups_[gr][VAR_eZ_LOWER_LIMIT]; // ticknees min in
upperLimit_[var - 4] = Groups_[gr][VAR_Y_UPPER_LIMIT]; // height max
upperLimit_[var - 3] = Groups_[gr][VAR_Z_UPPER_LIMIT]; // wide max in z
upperLimit_[var - 2] = Groups_[gr][VAR_eY_UPPER_LIMIT]; // thickness max in
upperLimit_[var - 1] = Groups_[gr][VAR_eZ_UPPER_LIMIT]; // thickness max in
} else {
System.out.println(
"Error in LIMITES LOWER/UPPER: transversal section not considerated for: "
+ gr
+ " group");
} // end if
} // gr
setVariableBounds(
new ArrayList<Double>(Arrays.<Double>asList(lowerLimit_)),
new ArrayList<Double>(Arrays.<Double>asList(upperLimit_)));
// greates difference between nodes
elementsBetweenDiffGreat_ = 0;
for (int ba = 0; ba < numberOfElements_; ba++) {
int i = (int) Element_[ba][i_];
int j = (int) Element_[ba][j_];
if (Math.abs(j - i) > elementsBetweenDiffGreat_) {
elementsBetweenDiffGreat_ = Math.abs(j - i);
}
}
matrixWidthBand_ = (elementsBetweenDiffGreat_ + 1) * numberOfLibertyDegree_;
} // end InitializeEBEs
// @Override
// public DoubleSolution createSolution() {
// return new DefaultDoubleSolution(this) ;
// }
/**
* Evaluates a solution
*
* @param solution The solution to evaluate
*/
@Override
public void evaluate(DoubleSolution solution) {
int hi = 0;
double[] fx = new double[getNumberOfObjectives()]; // functions
EBEsElementsTopology(solution); // transforma geometria a caracterÃÂsticas mecánicas
EBEsCalculus(); // metodo matricial de la rigidez para estructuras espaciales (3D)
// START OBJETIVES FUNCTION
for (int j = 0; j < getNumberOfObjectives(); j++) {
// total weight
if (OF_[j].equals("W")) {
// START structure total weight ---------------------
fx[j] = 0.0;
for (int ba = 0; ba < numberOfElements_; ba++) {
int idx = (int) Element_[ba][INDEX_];
fx[j] += Groups_[idx][AREA] * Element_[ba][L_] * Groups_[idx][SPECIFIC_WEIGHT];
}
solution.setObjective(j, fx[j]);
// END minimizing structure total weight ------------------------
}
// summation of deformations
else if (OF_[j].equals("D")) {
// START maximize displacement nodes ---------------------------------------------
fx[j] = 0.0;
for (int i = 0; i < nodeCheck_.length; i++) {
double xn = DisplacementNodes_[numberOfLibertyDegree_ * (int) nodeCheck_[i][0] + aX_][hi];
double yn = DisplacementNodes_[numberOfLibertyDegree_ * (int) nodeCheck_[i][0] + aY_][hi];
double zn = DisplacementNodes_[numberOfLibertyDegree_ * (int) nodeCheck_[i][0] + aZ_][hi];
fx[j] += Math.sqrt(Math.pow(xn, 2.0) + Math.pow(yn, 2.0) + Math.pow(zn, 2.0));
}
solution.setObjective(j, fx[j]);
// END minimizing sum of displacement in nodes ---------------------------------------------
}
// stress square absolute error
else if (OF_[j].equals("SSAE")) {
// START strain residual minimun ---------------------------------------------
// strain residualt global
fx[j] = StrainResidualMin_[hi] + StrainResidualMax_[hi];
solution.setObjective(j, fx[j]);
// END strain residual minimun ---------------------------------------------
}
// Efficiency of Nash-Sutcliffe for stress and compress
else if (OF_[j].equals("ENS")) {
fx[j] = FunctionENS(0);
solution.setObjective(j, fx[j]);
} else if (OF_[j].equals("MDV")) {
fx[j] = FunctionsMahalanobis_Distance_With_Variance(0);
solution.setObjective(j, fx[j]);
} else {
System.out.println("Error: not considerate START OBJECTIVES FUNCTION ");
}
}
numberOfEval_++;
// if((numberOfEval_ % 1000) == 0) System.out.println(numberOfEval_);
// END OBJETIVES FUNCTION
// maximizing the function objective ------------------------
// fx[1] *= -1.0;
// NOT USED -----------------------------------
/*
double l=0; // longitud total de todos los elementos
// total deflection of estructure
fx[1]=0;
for(int ba=0; ba<numberOfElements_; ba++){
l+=Element_[ba][L_];
int ni = (int)Element_[ba][i_];
int nj = (int)Element_[ba][j_];
double dxi=DisplacementNodes_[numberOfLibertyDegree_*ni+aX_][hi];
double dyi=DisplacementNodes_[numberOfLibertyDegree_*ni+aY_][hi];
double dzi=DisplacementNodes_[numberOfLibertyDegree_*ni+aZ_][hi];
double dxj=DisplacementNodes_[numberOfLibertyDegree_*nj+aX_][hi];
double dyj=DisplacementNodes_[numberOfLibertyDegree_*nj+aY_][hi];
double dzj=DisplacementNodes_[numberOfLibertyDegree_*nj+aZ_][hi];
// fx[1]+=Math.sqrt(Math.pow((dxi-dxj), 2.0)+Math.pow((dyi-dyj), 2.0)+Math.pow((dzi-dzj), 2.0))/l;
fx[1]+=(-dxi+dxj)/l;
}
*/
// END NOT USED ------------------------------------------------------------------------------
this.evaluateConstraints(solution);
} // evaluate
/**
* Evaluates the constraint overhead of a solution
*
* @param solution The solution
* @throws JMetalException
*/
public void evaluateConstraints(DoubleSolution solution) {
double[] constraint = new double[this.getNumberOfConstraints()];
double[] x = new double[getNumberOfVariables()];
for (int i = 0; i < getNumberOfVariables(); i++) {
x[i] = solution.getVariable(i);
}
double x1, x2, x3, x4;
int var = 0;
int con = 0;
// restricciones de relación de forma en las paredes b/t
for (int gr = 0; gr < numberOfGroupElements_; gr++) {
var += Groups_[gr][VARIABLES];
con += Groups_[gr][CONSTRAINT];
if (Groups_[gr][SHAPE] == CIRCLE) {
x1 = x[var - 1]; // diameter
} else if (Groups_[gr][SHAPE] == HOLE_CIRCLE) {
x1 = x[var - 2]; // diameter
x2 = x[var - 1]; // tickness plate
double ratio = x1 / x2;
if (ratio < x2 / x1) ratio = x2 / x1;
constraint[con - 1] = -ratio + Groups_[gr][RATIO_YZ]; // relación entre altura y base
} else if (Groups_[gr][SHAPE] == RECTANGLE) {
x1 = x[var - 2]; // higth (y axis)
x2 = x[var - 1]; // witdth (z axis)
double ratio = x1 / x2;
constraint[con - 2] =
+ratio - Groups_[gr][RATIO_YZ] * 0.75; // relación entre altura y base maxima
constraint[con - 1] =
-ratio + Groups_[gr][RATIO_YZ] * 1.5; // relación entre altura y base minima
} else if (Groups_[gr][SHAPE] == HOLE_RECTANGLE) {
x1 = x[var - 4]; // height (y axis)
x2 = x[var - 3]; // width (z axis)
x3 = x[var - 2]; // tickness along Y axis => tickness of width plate
x4 = x[var - 1]; // tickness along Z axis => tickness of heigth plate
double ratio = x1 / x2;
constraint[con - 4] =
+ratio - Groups_[gr][RATIO_YZ] * 0.75; // relación entre altura y base maxima
constraint[con - 3] =
-ratio + Groups_[gr][RATIO_YZ] * 1.25; // relación entre altura y base minima
double tb1 = -x3 * 15 + x1;
double tb2 = +x3 * 30 - x1;
double ta1 = -x4 * 10 + x2;
double ta2 = +x4 * 20 - x2;
// double ta2=-x2/x4+27;//0.3*Math.sqrt(Groups_[gr][E_]/(Groups_[gr][STRESS])); //
// relación entre espesor de la placa y altura de lados
constraint[con - 2] =
Math.min(
tb1,
tb2); // -x1/x3+1.12*Math.sqrt(Groups_[gr][E_]/(Groups_[gr][STRESS]*2)); //
// relación entre espesor de la placa y altura de lados
constraint[con - 1] =
Math.min(
ta1,
ta2); // -x2/x4+1.12*Math.sqrt(Groups_[gr][E_]/(Groups_[gr][STRESS]*2.0)); //
// relación entre espesor de la placa y altura de lados
} else if (Groups_[gr][SHAPE] == I_SINGLE) {
x1 = x[var - 4]; // height (y axis)
x2 = x[var - 3]; // width (Z axis)
x3 = x[var - 2]; // tickness along Y axis => tickness of width plate
x4 = x[var - 1]; // tickness along Z axis => tickness of heigth plate
double ratio = x1 / x2;
constraint[con - 4] =
+ratio - Groups_[gr][RATIO_YZ] * 0.75; // relación entre altura y base maxima
constraint[con - 3] =
-ratio + Groups_[gr][RATIO_YZ] * 1.25; // relación entre altura y base minima
double tb1 = -x3 * 15 + x1;
double tb2 = +x3 * 30 - x1;
double ta1 = -x4 * 10 + x2;
double ta2 = +x4 * 20 - x2;
// double tb1=-x3*20+x1;
// double tb2=-x1/x3+35;//0.6*Math.sqrt(Groups_[gr][E_]/(Groups_[gr][STRESS])); //
// relación entre espesor de la placa y altura de lados
// double ta1=-x4*15+x2;
// double ta2=-x2/x4+27;//0.3*Math.sqrt(Groups_[gr][E_]/(Groups_[gr][STRESS])); //
// relación entre espesor de la placa y altura de lados
constraint[con - 2] =
Math.min(
tb1,
tb2); // -x1/x3+1.12*Math.sqrt(Groups_[gr][E_]/(Groups_[gr][STRESS]*2)); //
// relación entre espesor de la placa y altura de lados
constraint[con - 1] =
Math.min(
ta1,
ta2); // -x2/x4+1.12*Math.sqrt(Groups_[gr][E_]/(Groups_[gr][STRESS]*2.0)); //
// relación entre espesor de la placa y altura de lados
} else if (Groups_[gr][SHAPE] == I_DOUBLE) {
x1 = x[var - 4]; // height (y axis)
x2 = x[var - 3]; // width (Z axis)
x3 = x[var - 2]; // tickness along Y axis => tickness of width plate
x4 = x[var - 1]; // tickness along Z axis => tickness of heigth plate
double ratio = x1 / x2;
constraint[con - 4] =
-ratio + Groups_[gr][RATIO_YZ]; // relación entre altura y base maxima
constraint[con - 3] =
+ratio - Groups_[gr][RATIO_YZ] * 0.85; // /2.0 relación entre altura y base minima
constraint[con - 2] =
-x1 / x3
+ 1.12
* Math.sqrt(
Groups_[gr][E_]
/ (Groups_[gr][
STRESS])); // relación entre espesor de la placa y altura de
// lados
constraint[con - 1] =
-x2 / x4
+ 0.35
* Math.sqrt(
Groups_[gr][E_]
/ (Groups_[gr][STRESS_CUT]
* 2.0)); // relación entre espesor de la placa y altura de lados
} else if (Groups_[gr][SHAPE] == H_SINGLE) {
x1 = x[var - 4]; // height (y axis)
x2 = x[var - 3]; // width (Z axis)
x3 = x[var - 2]; // tickness along Y axis => tickness of width plate
x4 = x[var - 1]; // tickness along Z axis => tickness of heigth plate
double ratio = x1 / x2;
constraint[con - 4] =
-ratio + Groups_[gr][RATIO_YZ] * 0.85; // /2.0 relación entre altura y base maxima
constraint[con - 3] =
+ratio - Groups_[gr][RATIO_YZ]; // relación entre altura y base minima
constraint[con - 2] =
-x1 / x3
+ 0.35
* Math.sqrt(
Groups_[gr][E_]
/ (Groups_[gr][STRESS_CUT]
* 2.0)); // relación entre espesor de la placa y altura de lados
constraint[con - 1] =
-x2 / x4
+ 1.12
* Math.sqrt(
Groups_[gr][E_]
/ (Groups_[gr][STRESS]
* 2.0)); // relación entre espesor de la placa y altura de lados
} else if (Groups_[gr][SHAPE] == H_DOUBLE) {
x1 = x[var - 4]; // height (y axis)
x2 = x[var - 3]; // width (Z axis)
x3 = x[var - 2]; // thickness along Y axis => thickness of width plate
x4 = x[var - 1]; // thickness along Z axis => thickness of heigth plate
double ratio = x1 / x2;
constraint[con - 4] =
-ratio + Groups_[gr][RATIO_YZ] * 0.1; // 2.0 relación entre altura y base maxima
constraint[con - 3] =
+ratio - Groups_[gr][RATIO_YZ]; // relación entre altura y base minima
constraint[con - 2] =
-x1 / x3
+ 1.12
* Math.sqrt(
Groups_[gr][E_]
/ (Groups_[gr][STRESS_CUT]
* 2.0)); // relación entre espesor de la placa y altura de lados
constraint[con - 1] =
-x2 / x4
+ 1.12
* Math.sqrt(
Groups_[gr][E_]
/ (Groups_[gr][STRESS]
* 2.0)); // relación entre espesor de la placa y altura de lados
} else if (Groups_[gr][SHAPE] == L_SINGLE) {
x1 = x[var - 4]; // height (y axis)
x2 = x[var - 3]; // width (Z axis)
x3 = x[var - 2]; // thickness along Y axis => thickness of width plate
x4 = x[var - 1]; // thickness along Z axis => thickness heigth plate
double ratio = x1 / x2;
constraint[con - 4] = -ratio + Groups_[gr][RATIO_YZ]; // ratio between height and base
constraint[con - 3] =
+ratio - Groups_[gr][RATIO_YZ]; // relación entre altura y base minima
constraint[con - 2] =
-x1 / x3
+ 1.12
* Math.sqrt(
Groups_[gr][E_]
/ (Groups_[gr][STRESS]
* 2.0)); // relación entre espesor de la placa y altura de lados
constraint[con - 1] =
-x2 / x4
+ 1.12
* Math.sqrt(
Groups_[gr][E_]
/ (Groups_[gr][STRESS]
* 2.0)); // relación entre espesor de la placa y altura de lados
} else if (Groups_[gr][SHAPE] == L_DOUBLE) {
x1 = x[var - 4]; // height (y axis)
x2 = x[var - 3]; // width (Z axis)
x3 = x[var - 2]; // tickness along Y axis => tickness of width plate
x4 = x[var - 1]; // tickness along Z axis => tickness of heigth plate
double ratio = x1 / x2;
constraint[con - 4] =
-ratio + Groups_[gr][RATIO_YZ]; // relación entre altura y base maxima
constraint[con - 3] =
+ratio - Groups_[gr][RATIO_YZ]; // relación entre altura y base minima
constraint[con - 2] =
-x1 / x3
+ 1.12
* Math.sqrt(
Groups_[gr][E_]
/ (Groups_[gr][STRESS]
* 2.0)); // relación entre espesor de la placa y altura de lados
constraint[con - 1] =
-x2 / x4
+ 1.12
* Math.sqrt(
Groups_[gr][E_]
/ (Groups_[gr][STRESS]
* 2.0)); // relación entre espesor de la placa y altura de lados
} else if (Groups_[gr][SHAPE] == T_SINGLE) {
x1 = x[var - 4]; // height (y axis)
x2 = x[var - 3]; // width (Z axis)
x3 = x[var - 2]; // tickness along Y axis => tickness of width plate
x4 = x[var - 1]; // tickness along Z axis => tickness of heigth plate
double ratio = x1 / x2;
constraint[con - 4] =
-ratio + Groups_[gr][RATIO_YZ]; // relación entre altura y base maxima
constraint[con - 3] =
+ratio - Groups_[gr][RATIO_YZ] * 0.9; // 2.0 relación entre altura y base minima
constraint[con - 2] =
-x1 / x3
+ 1.12
* Math.sqrt(
Groups_[gr][E_]
/ (Groups_[gr][STRESS]
* 2.0)); // relación entre espesor de la placa y altura de lados
constraint[con - 1] =
-x2 / x4
+ 1.12
* Math.sqrt(
Groups_[gr][E_]
/ (Groups_[gr][STRESS]
* 2.0)); // relación entre espesor de la placa y altura de lados
} else if (Groups_[gr][SHAPE] == T_DOUBLE) {
x1 = x[var - 4]; // height (y axis)
x2 = x[var - 3]; // width (Z axis)
x3 = x[var - 2]; // tickness along Y axis => tickness of width plate
x4 = x[var - 1]; // tickness along Z axis => tickness of heigth plate
double ratio = x1 / x2;
constraint[con - 4] =
-ratio + Groups_[gr][RATIO_YZ]; // relación entre altura y base maxima
constraint[con - 3] =
+ratio - Groups_[gr][RATIO_YZ] * 0.9; // 2.0 relación entre altura y base minima
constraint[con - 2] =
-x1 / x3
+ 1.12
* Math.sqrt(
Groups_[gr][E_]
/ (Groups_[gr][STRESS]
* 2.0)); // relación entre espesor de la placa y altura de lados
constraint[con - 1] =
-x2 / x4
+ 1.12
* Math.sqrt(
Groups_[gr][E_]
/ (Groups_[gr][STRESS]
* 2.0)); // relación entre espesor de la placa y altura de lados
} else {
System.out.println(
"Error in constraint: transverse section not considerated for: " + gr + " group");
}
} // next gr
/*
// LONGITUD TOTAL DE LA VIGA
double l=0;
for(int ba=0; ba<numberOfElements_; ba++){
l+=Element_[ba][2];
}
*/
// RESTRICCIONES POR TENSIÓN
for (int hi = 0; hi < numberOfWeigthHypothesis_; hi++) {
for (int gr = 0; gr < numberOfGroupElements_; gr++) {
// RESTRICCIONES DEBIDO A LA TENSIóN DE TRACCIóN
constraint[con] =
(-StrainMax_[gr][hi]
+ Groups_[gr][STRESS]); // -StrainMax_[gr][hi]*1.001+Groups_[gr][STRESS]
con += 1;
// constraint[con]=(-StrainMax_[gr][hi]+Groups_[gr][STRESS]*0.50); //
// -StrainMax_[gr][hi]*1.001+Groups_[gr][STRESS]
// con += 1;
// RESTRICCIONES DEBIDO A LA TENSIóN DE COMPRESIóN
constraint[con] =
(+StrainMin_[gr][hi]
- Groups_[gr][COMPRESSION]); // +StrainMin_[gr][hi]*0.999-Groups_[gr][COMPRESSION]
con += 1;
// constraint[con]=(+StrainMin_[gr][hi]+Groups_[gr][COMPRESSION]*0.50); //
// +StrainMin_[gr][hi]*0.999-Groups_[gr][COMPRESSION]
// con += 1;
// RESTRICCIONES DEBIDO A LA TENSIÓN DE CORTE
constraint[con] =
(-StrainCutMax_[gr][hi]
+ Groups_[gr][STRESS_CUT]); // -StrainCutMax_[gr][hi]*1.001+Groups_[gr][STRESS_CUT]
con += 1;
}
}
for (int hi = 0; hi < numberOfWeigthHypothesis_; hi++) {
// constraint of node displacement structure
double deltaN = 0;
for (int i = 0; i < nodeCheck_.length; i++) {
double xn = DisplacementNodes_[numberOfLibertyDegree_ * (int) nodeCheck_[i][0] + aX_][hi];
double yn = DisplacementNodes_[numberOfLibertyDegree_ * (int) nodeCheck_[i][0] + aY_][hi];
double zn = DisplacementNodes_[numberOfLibertyDegree_ * (int) nodeCheck_[i][0] + aZ_][hi];
deltaN = Math.sqrt(Math.pow(xn, 2) + Math.pow(yn, 2) + Math.pow(zn, 2));
constraint[con] = (-deltaN + nodeCheck_[i][1]);
con += 1;
}
}
for (int i = 0; i < getNumberOfConstraints(); i++) {
solution.setConstraint(i, constraint[i]);
}
}
public void EBEsElementsTopology(DoubleSolution solution) throws JMetalException {
// asignación de las variables para cada grupo
// y determinación de las caracterÃÂsticas mecánicas
double[] x = new double[solution.getNumberOfVariables()];
for (int i = 0; i < solution.getNumberOfVariables(); i++) {
x[i] = solution.getVariable(i);
}
double x1, x2, x3, x4;
int var = 0;
for (int gr = 0; gr < numberOfGroupElements_; gr++) {
var += Groups_[gr][VARIABLES];
if (Groups_[gr][SHAPE] == CIRCLE) {
x1 = x[var - 1]; // diameter
EBEsTransversalSectionCircular(gr, x1);
} else if (Groups_[gr][SHAPE] == HOLE_CIRCLE) {
x1 = x[var - 2]; // diameter
x2 = x[var - 1]; // tickness plate
EBEsTransversalSectionHoleCircular(gr, x1, x2);
} else if (Groups_[gr][SHAPE] == RECTANGLE) {
x1 = x[var - 2]; // higth (y axis)
x2 = x[var - 1]; // witdth (z axis)
EBEsTransversalSectionRectangle(gr, x1, x2);
} else if (Groups_[gr][SHAPE] == HOLE_RECTANGLE) {
x1 = x[var - 4]; // height (y axis)
x2 = x[var - 3]; // width (z axis)
x3 = x[var - 2]; // horizontal plate tickness (up and down)
x4 = x[var - 1]; // vertical plate tickness (left and right)
EBEsTransversalSectionHoleRectangle(gr, x1, x2, x3, x4);
} else if (Groups_[gr][SHAPE] == I_SINGLE) {
// EbesMutation(gr, 0, x);
x1 = x[var - 4]; // height (y axis)
x2 = x[var - 3]; // width (Z axis)
x3 = x[var - 2]; // plate tickness heigth and down
x4 = x[var - 1]; // vertical plate tickness (centre)
EBEsTransversalSection_I_Single(gr, x1, x2, x3, x4);
} else if (Groups_[gr][SHAPE] == I_DOUBLE) {
x1 = x[var - 4]; // height (y axis)
x2 = x[var - 3]; // width (Z axis)
x3 = x[var - 2]; // tickness heigth plate
x4 = x[var - 1]; // tickness width plate
EBEsTransversalSection_I_Double(gr, x1, x2, x3, x4);
} else if (Groups_[gr][SHAPE] == H_SINGLE) {
x1 = x[var - 4]; // height (y axis)
x2 = x[var - 3]; // width (Z axis)
x3 = x[var - 2]; // tickness heigth plate
x4 = x[var - 1]; // tickness width plate
EBEsTransversalSection_H_Single(gr, x1, x2, x3, x4);
} else if (Groups_[gr][SHAPE] == H_DOUBLE) {
x1 = x[var - 4]; // height (y axis)
x2 = x[var - 3]; // width (Z axis)
x3 = x[var - 2]; // tickness heigth plate
x4 = x[var - 1]; // tickness width plate
EBEsTransversalSection_H_Double(gr, x1, x2, x3, x4);
} else if (Groups_[gr][SHAPE] == L_SINGLE) {
x1 = x[var - 4]; // height (y axis)
x2 = x[var - 3]; // width (Z axis)
x3 = x[var - 2]; // tickness heigth plate
x4 = x[var - 1]; // tickness width plate
EBEsTransversalSection_L_Single(gr, x1, x2, x3, x4);
} else if (Groups_[gr][SHAPE] == L_DOUBLE) {
x1 = x[var - 4]; // height (y axis)
x2 = x[var - 3]; // width (Z axis)
x3 = x[var - 2]; // tickness heigth plate
x4 = x[var - 1]; // tickness width plate
EBEsTransversalSection_L_Double(gr, x1, x2, x3, x4);
} else if (Groups_[gr][SHAPE] == T_SINGLE) {
x1 = x[var - 4]; // height (y axis)
x2 = x[var - 3]; // width (Z axis)
x3 = x[var - 2]; // tickness heigth plate
x4 = x[var - 1]; // tickness width plate
EBEsTransversalSection_T_Single(gr, x1, x2, x3, x4);
} else if (Groups_[gr][SHAPE] == T_DOUBLE) {
x1 = x[var - 4]; // height (y axis)
x2 = x[var - 3]; // width (Z axis)
x3 = x[var - 2]; // tickness heigth plate
x4 = x[var - 1]; // tickness width plate
EBEsTransversalSection_T_Double(gr, x1, x2, x3, x4);
} else {
System.out.println(
"Error in VARIABLES: transversal section not considerated for: " + gr + " group");
}
} // next gr
}
public void EBEsWeigthElement() throws JMetalException {
// load by weight of the element
for (int el = 0; el < numberOfElements_; el++) {
int idx = (int) Element_[el][INDEX_];
if (GravitationalAxis_ == "Z") {
// gravitational in Z-AXIS
WeightElement_[el][QH_] = 0;
WeightElement_[el][QE_] = el;
WeightElement_[el][QT_] = CARGA_UNIFORME_TOTAL;
WeightElement_[el][QAx_] = 0.0;
WeightElement_[el][QAy_] = 0.0;
WeightElement_[el][QAz_] = -Groups_[idx][AREA] * Groups_[idx][SPECIFIC_WEIGHT];
WeightElement_[el][Qa_] = 0.0;
WeightElement_[el][Qb_] = 0.0;
} else {
// gravitational in Y-AXIS
WeightElement_[el][QH_] = 0;
WeightElement_[el][QE_] = el;
WeightElement_[el][QT_] = CARGA_UNIFORME_TOTAL;
WeightElement_[el][QAx_] = 0.0;
WeightElement_[el][QAy_] = -Groups_[idx][AREA] * Groups_[idx][SPECIFIC_WEIGHT];
WeightElement_[el][QAz_] = 0.0;
WeightElement_[el][Qa_] = 0.0;
WeightElement_[el][Qb_] = 0.0;
}
Qi = new double[numberOfLibertyDegree_];
Qj = new double[numberOfLibertyDegree_];
pi = new double[numberOfLibertyDegree_];
pj = new double[numberOfLibertyDegree_];
EBEsWeightDistributedUniformly(el, WeightElement_[el]);
int hi = 0;
int ni = (int) Element_[el][i_];
int nj = (int) Element_[el][j_];
// nudi i
PQ[numberOfLibertyDegree_ * ni + aX_][hi] += Qi[aX_];
PQ[numberOfLibertyDegree_ * ni + aY_][hi] += Qi[aY_];
PQ[numberOfLibertyDegree_ * ni + aZ_][hi] += Qi[aZ_];
PQ[numberOfLibertyDegree_ * ni + gX_][hi] += Qi[gX_];
PQ[numberOfLibertyDegree_ * ni + gY_][hi] += Qi[gY_];
PQ[numberOfLibertyDegree_ * ni + gZ_][hi] += Qi[gZ_];
// nudo j
PQ[numberOfLibertyDegree_ * nj + aX_][hi] += Qj[aX_];
PQ[numberOfLibertyDegree_ * nj + aY_][hi] += Qj[aY_];
PQ[numberOfLibertyDegree_ * nj + aZ_][hi] += Qj[aZ_];
PQ[numberOfLibertyDegree_ * nj + gX_][hi] += Qj[gX_];
PQ[numberOfLibertyDegree_ * nj + gY_][hi] += Qj[gY_];
PQ[numberOfLibertyDegree_ * nj + gZ_][hi] += Qj[gZ_];
// acumula cargas equivalentes en nudos en coordenadas LOCALES para la MISMA BARRA E
// HIPóTESIS
cbi[aX_][el][hi] += pi[aX_];
cbi[aY_][el][hi] += pi[aY_];
cbi[aZ_][el][hi] += pi[aZ_];
cbi[gX_][el][hi] += pi[gX_];
cbi[gY_][el][hi] += pi[gY_];
cbi[gZ_][el][hi] += pi[gZ_];
cbj[aX_][el][hi] += pj[aX_];
cbj[aY_][el][hi] += pj[aY_];
cbj[aZ_][el][hi] += pj[aZ_];
cbj[gX_][el][hi] += pj[gX_];
cbj[gY_][el][hi] += pj[gY_];
cbj[gZ_][el][hi] += pj[gZ_];
}
}
public void EBEsCalculus() throws JMetalException {
// Module de calc
// Effort in boundary element
Efforti_ = new double[numberOfLibertyDegree_][numberOfElements_][numberOfWeigthHypothesis_];
Effortj_ = new double[numberOfLibertyDegree_][numberOfElements_][numberOfWeigthHypothesis_];
// corrimientos y rotaciones de los nudos por hipótesis de cargas
DisplacementNodes_ =
new double[numberOfLibertyDegree_ * numberOfNodes][numberOfWeigthHypothesis_];
// Strain in extrem i por hipótesis de cargas
Straini_ = new double[3][numberOfElements_][numberOfWeigthHypothesis_];
// Strain in extrem j por hipótesis de cargas
Strainj_ = new double[3][numberOfElements_][numberOfWeigthHypothesis_];
// Strain minimun por hipótesis de cargas
// StrainMin_ = new double[2][numberOfElements_][numberOfWeigthHypothesis_];
OldStrainMin_ = StrainMin_;
StrainMin_ = new double[numberOfGroupElements_][numberOfWeigthHypothesis_];
// Strain maximus por hipótesis de cargas
// StrainMax_ = new double[2][numberOfElements_][numberOfWeigthHypothesis_];
OldStrainMax_ = StrainMax_;
StrainMax_ = new double[numberOfGroupElements_][numberOfWeigthHypothesis_];
// stress tangencial mamimun
StrainCutMax_ = new double[numberOfGroupElements_][numberOfWeigthHypothesis_];
// Negative Strain residual por hipótesis de cargas
StrainResidualMin_ = new double[numberOfWeigthHypothesis_];
// Positive Strain residual por hipótesis de cargas
StrainResidualMax_ = new double[numberOfWeigthHypothesis_];
// Cut Strain residual por hipótesis de cargas
StrainResidualCut_ = new double[numberOfWeigthHypothesis_];
MatrixStiffness_ =
new double
[numberOfLibertyDegree_
* numberOfLibertyDegree_
* numberOfNodes
* (elementsBetweenDiffGreat_ + 1)];
// load inself witch element
WeightElement_ = new double[numberOfElements_][8];
// variables load in extrem of beams
cbi = new double[numberOfLibertyDegree_][numberOfElements_][numberOfWeigthHypothesis_];
cbj = new double[numberOfLibertyDegree_][numberOfElements_][numberOfWeigthHypothesis_];
// total equivalent load nodes in all structure witch load on elements and load nodes
PQ = new double[numberOfLibertyDegree_ * numberOfNodes][numberOfWeigthHypothesis_];
Reaction_ = new double[numberOfLibertyDegree_ * numberOfNodes][numberOfWeigthHypothesis_];
EBEsWeightNodes();
if (lLoadsOwnWeight) EBEsWeigthElement();
EBEsOverloadWeightElement();
// checked if geometric second orden calculus
int NumIter = 0;
if (lSecondOrderGeometric) NumIter = 1;
// load hypotesis
for (int hi = 0; hi < numberOfWeigthHypothesis_; hi++) {
for (int countIter = 0; countIter <= NumIter; countIter++) {
EBEsMatrixWeight(hi);
EBEsMatrixGlobalFactory(countIter);
// imprime la matriz de rigidez
// EBEsPrintArchTxtMKG("1", hi);
EBEsMatrixGlobalPenalization();
// matriz penalizada
// EBEsPrintArchTxtMKG("2", hi);
EBEsEcuationSolution(hi);
EBEsEffortsElements3D(hi, countIter, DisplacementNodes_);
EBEsEffortsTotal3D(hi);
// ARCHIVADO DE LA SOLUCIÓN DE ESTABILIDAD Y MEC�NICA
// EBEsPrintArchTxtElements();
// EBEsPrintArchTxtDesp(hi);
// EBEsPrintArchTxtEfforts(hi);
// EBEsPrintArchTxtReaction(hi);
if (lSecondOrderGeometric && countIter == 0) {
EBEsAssignAxialForces(hi);
EBEsSteelingResults(hi);
}
} // next numIter
// para el proceso de optimización podemos prescindir
// EBEsNodesEquilibrium3D(hi);
// para el proceso de optimización podemos prescindir
// EBEsReactions3D(hi);
} // next hi
// cálculo de las tensiones, por barra calculo en los extremos
// faltarÃÂa calcular en los tramos y quedarme con el máximo
// tensiones en i
Straini_ = EBEsStrainNode(Efforti_);
// tensiones en extriemo j
Strainj_ = EBEsStrainNode(Effortj_);
// tensiones máximas en cada barra
// EBEsStrainMaxWhitElement();
EBEsStrainMaxWhitGroup();
// tensiones mÃÂnimaa en cada barra
// EBEsStrainMinWhitElement();
EBEsStrainMinWhitGroup();
// verificación de las tensiones máximas respecto a las
// tensiones admisibles
EBEsStrainResidualVerication();
// ARCHIVADO DE LAS TENSIONES
// EBEsPrintArchTxtStrain();
} // end EBEsCalculus
public void EBEsAssignAxialForces(int hi) {
AxialForcei_ = new double[numberOfElements_];
AxialForcej_ = new double[numberOfElements_];
for (int el = 0; el < numberOfElements_; el++) {
AxialForcei_[el] = Efforti_[aX_][el][hi];
AxialForcej_[el] = Effortj_[aX_][el][hi];
}
}
public void EBEsSteelingResults(int hi) {
// stiffness matrix steeling
for (int m = 0;
m
< numberOfLibertyDegree_
* numberOfLibertyDegree_
* numberOfNodes
* (elementsBetweenDiffGreat_ + 1);
m++) {
MatrixStiffness_[m] = 0.0;
}
// corrimientos y rotaciones de los nudos por hipótesis de cargas
for (int no = 0; no < numberOfLibertyDegree_ * numberOfNodes; no++) {
DisplacementNodes_[no][hi] = 0.0;
}
for (int el = 0; el < Element_.length; el++) {
// esfuerzos en extremo i de la barra para el sistema principal de la sección
Efforti_[aX_][el][hi] = 0.0;
Efforti_[aY_][el][hi] = 0.0;
Efforti_[aZ_][el][hi] = 0.0;
Efforti_[gX_][el][hi] = 0.0;
Efforti_[gY_][el][hi] = 0.0;
Efforti_[gZ_][el][hi] = 0.0;
// esfuerzos en extremo j de la elrra para el sistema principal de la sección
Effortj_[aX_][el][hi] = 0.0;
Effortj_[aY_][el][hi] = 0.0;
Effortj_[aZ_][el][hi] = 0.0;
Effortj_[gX_][el][hi] = 0.0;
Effortj_[gY_][el][hi] = 0.0;
Effortj_[gZ_][el][hi] = 0.0;
}
}
public void EBEsMatrixWeight(int hi) {
// formación del vector de fuerzas
for (int j = 0; j < Node_.length; j++) {
DisplacementNodes_[numberOfLibertyDegree_ * j + aX_][hi] =
PQ[numberOfLibertyDegree_ * j + aX_][hi];
DisplacementNodes_[numberOfLibertyDegree_ * j + aY_][hi] =
PQ[numberOfLibertyDegree_ * j + aY_][hi];
DisplacementNodes_[numberOfLibertyDegree_ * j + aZ_][hi] =
PQ[numberOfLibertyDegree_ * j + aZ_][hi];
DisplacementNodes_[numberOfLibertyDegree_ * j + gX_][hi] =
PQ[numberOfLibertyDegree_ * j + gX_][hi];
DisplacementNodes_[numberOfLibertyDegree_ * j + gY_][hi] =
PQ[numberOfLibertyDegree_ * j + gY_][hi];
DisplacementNodes_[numberOfLibertyDegree_ * j + gZ_][hi] =
PQ[numberOfLibertyDegree_ * j + gZ_][hi];
}
}
public void EBEsMatrixGlobalFactory(int countIter) throws JMetalException {
// select link between elements
for (int el = 0; el < numberOfElements_; el++) {
// int ni=(int)Element_[el][i_];
// int nj=(int)Element_[el][j_];
// the global coordinates
// double xi=Node_[ni][aX_]; double yi=Node_[ni][aY_]; double zi=Node_[ni][aZ_];
// double xj=Node_[nj][aX_]; double yj=Node_[nj][aY_]; double zj=Node_[nj][aZ_];
// long element
// Element_[el][L_]=Math.sqrt(Math.pow((xi-xj),2)+Math.pow((yi-yj),2)+Math.pow((zi-zj),2));
switch ((int) Element_[el][Vij_]) {
case 00:
EBEsMat3DL_iRig_jRig(el);
break;
case 01:
EBEsMat3DL_iRig_jArt(el);
break;
case 10:
EBEsMat3DL_iArt_jRig(el);
break;
case 11:
EBEsMat3DL_iArt_jArt(el);
break;
default:
System.out.println("invalid link");
return;
} // end switch
if (lSecondOrderGeometric && countIter == 1) {
EBEsMat3DL_SOG(el);
Kii = EBEsMatrixAdd(Kii, KiiSOG);
Kij = EBEsMatrixAdd(Kij, KijSOG);
Kji = EBEsMatrixAdd(Kji, KjiSOG);
Kjj = EBEsMatrixAdd(Kjj, KjjSOG);
}
// matriz de rotación de ejes principales de sección a ejes locales (xp,yp)
EBEsMatRot3DLpSaL(el);
// matriz de rotación de ejes locales a globales (x,y)
EBEsMatRot3DLaG(el);
// formación y cálculo de la matriz de rigidez de cada barra 3D en coordenadas globales
EBEsMat3DGij();
// FORMACION DE LA MATRIZ DE RIGIDEZ de la estructura en coordenada globales
EBEsMat3DG(el);
} // next el
}
public void EBEsMatrixGlobalPenalization() {
// penalización de la matriz asignando coacciones de nudos (apoyos)
for (int i = 0; i < numberOfNodesRestricts_; i++) {
int no = (int) NodeRestrict_[i][0];
// trasforma el número en código texto caracterizando las coacciones;
String strCxyz = String.valueOf((int) NodeRestrict_[i][1]);
String str = "";
for (int j = numberOfLibertyDegree_; j > strCxyz.length(); j--) {
str += "0";
}
strCxyz = str + strCxyz;
// penalización de la matriz de rigidez
char w0 = strCxyz.charAt(aX_); // sentido en X
if (w0 == '1') {
MatrixStiffness_[matrixWidthBand_ * (numberOfLibertyDegree_ * no + aX_)] =
1.0E+35; // coacción rÃÂgida en X
} // coacción rÃÂgida en X
w0 = strCxyz.charAt(aY_); // sentido en Y
if (w0 == '1') {
MatrixStiffness_[matrixWidthBand_ * (numberOfLibertyDegree_ * no + aY_)] =
1.0E+35; // coacción rÃÂgida en Y
} // coacción rÃÂgida en Y
w0 = strCxyz.charAt(aZ_); // sentido en Z
if (w0 == '1') {
MatrixStiffness_[matrixWidthBand_ * (numberOfLibertyDegree_ * no + aZ_)] =
1.0E+35; // coacción rÃÂgida en Z
} // coacción rÃÂgida en Z
w0 = strCxyz.charAt(gX_); // rotación alrededor del eje X
if (w0 == '1') {
MatrixStiffness_[matrixWidthBand_ * (numberOfLibertyDegree_ * no + gX_)] =
1.0E+35; // coacción rÃÂgida alrededor de X
} // coacción rÃÂgida de rotación en X
w0 = strCxyz.charAt(gY_); // rotación alrededor del eje Y
if (w0 == '1') {
MatrixStiffness_[matrixWidthBand_ * (numberOfLibertyDegree_ * no + gY_)] =
1.0E+35; // coacción rÃÂgida alrededor de Y
} // coacción rÃÂgida en Y
w0 = strCxyz.charAt(gZ_); // rotación alrededor del eje Z
if (w0 == '1') {
MatrixStiffness_[matrixWidthBand_ * (numberOfLibertyDegree_ * no + gZ_)] =
1.0E+35; // coacción rÃÂgida alrededor de Z
} // coacción rÃÂgida de rotación en Z
} // nex i
}
public void EBEsEffortsTotal3D(int hi) {
// ESFUERZOS EN EXTREMOS DE BARRA 3D EN COORDENADAS LOCALES
// i: rigido
// j: rigido
for (int el = 0; el < Element_.length; el++) {
// esfuerzos en extremo i de la barra para el sistema principal de la sección
Efforti_[aX_][el][hi] += -cbi[aX_][el][hi];
Efforti_[aY_][el][hi] += -cbi[aY_][el][hi];
Efforti_[aZ_][el][hi] += -cbi[aZ_][el][hi];
Efforti_[gX_][el][hi] += -cbi[gX_][el][hi];
Efforti_[gY_][el][hi] += -cbi[gY_][el][hi];
Efforti_[gZ_][el][hi] += -cbi[gZ_][el][hi];
// esfuerzos en extremo j de la elrra para el sistema principal de la sección
Effortj_[aX_][el][hi] += -cbj[aX_][el][hi];
Effortj_[aY_][el][hi] += -cbj[aY_][el][hi];
Effortj_[aZ_][el][hi] += -cbj[aZ_][el][hi];
Effortj_[gX_][el][hi] += -cbj[gX_][el][hi];
Effortj_[gY_][el][hi] += -cbj[gY_][el][hi];
Effortj_[gZ_][el][hi] += -cbj[gZ_][el][hi];
}
}
public void EBEsWeightNodes() {
for (int j = 0; j < numberOfWeigthsNodes_; j++) {
int hi = (int) WeightNode_[j][0];
int no = (int) WeightNode_[j][1];
// variables displacement
DisplacementNodes_[numberOfLibertyDegree_ * no + aX_][hi] = WeightNode_[j][2];
DisplacementNodes_[numberOfLibertyDegree_ * no + aY_][hi] = WeightNode_[j][3];
DisplacementNodes_[numberOfLibertyDegree_ * no + aZ_][hi] = WeightNode_[j][4];
DisplacementNodes_[numberOfLibertyDegree_ * no + gX_][hi] = WeightNode_[j][5];
DisplacementNodes_[numberOfLibertyDegree_ * no + gY_][hi] = WeightNode_[j][6];
DisplacementNodes_[numberOfLibertyDegree_ * no + gZ_][hi] = WeightNode_[j][7];
// variables total load = loads node
PQ[numberOfLibertyDegree_ * no + aX_][hi] = WeightNode_[j][2];
PQ[numberOfLibertyDegree_ * no + aY_][hi] = WeightNode_[j][3];
PQ[numberOfLibertyDegree_ * no + aZ_][hi] = WeightNode_[j][4];
PQ[numberOfLibertyDegree_ * no + gX_][hi] = WeightNode_[j][5];
PQ[numberOfLibertyDegree_ * no + gY_][hi] = WeightNode_[j][6];
PQ[numberOfLibertyDegree_ * no + gZ_][hi] = WeightNode_[j][7];
}
}
public void EBEsOverloadWeightElement() throws JMetalException {
// transfiere las cargas de las barras hacia los nudos
// bucle para todas las barras cargadas
for (int i = 0; i < numberOfWeigthsElements_; i++) {
Qi = new double[numberOfLibertyDegree_];
Qj = new double[numberOfLibertyDegree_];
pi = new double[numberOfLibertyDegree_];
pj = new double[numberOfLibertyDegree_];
// int hi = (int)OverloadInElement_[nQ][QH_];
// load element
int el = (int) OverloadInElement_[i][QE_];
// nodes element
// determinacion del tipo de cargas
switch ((int) OverloadInElement_[i][QT_]) {
case 0:
EBEsWeightDistributedUniformly(el, OverloadInElement_[i]);
break;
default:
System.out.println("invalid link");
return;
}
// acumula cargas equivalentes en nudos en coordenadas GLOBALES la MISMA BARRA E HIPóTESIS
// hipotesis asignada
int hi = (int) OverloadInElement_[i][QH_];
// identificación de las barras cargadas
int ni = (int) Element_[el][i_];
int nj = (int) Element_[el][j_];
// nudi i
PQ[numberOfLibertyDegree_ * ni + aX_][hi] += Qi[aX_];
PQ[numberOfLibertyDegree_ * ni + aY_][hi] += Qi[aY_];
PQ[numberOfLibertyDegree_ * ni + aZ_][hi] += Qi[aZ_];
PQ[numberOfLibertyDegree_ * ni + gX_][hi] += Qi[gX_];
PQ[numberOfLibertyDegree_ * ni + gY_][hi] += Qi[gY_];
PQ[numberOfLibertyDegree_ * ni + gZ_][hi] += Qi[gZ_];
// nudo j
PQ[numberOfLibertyDegree_ * nj + aX_][hi] += Qj[aX_];
PQ[numberOfLibertyDegree_ * nj + aY_][hi] += Qj[aY_];
PQ[numberOfLibertyDegree_ * nj + aZ_][hi] += Qj[aZ_];
PQ[numberOfLibertyDegree_ * nj + gX_][hi] += Qj[gX_];
PQ[numberOfLibertyDegree_ * nj + gY_][hi] += Qj[gY_];
PQ[numberOfLibertyDegree_ * nj + gZ_][hi] += Qj[gZ_];
// acumula cargas equivalentes en nudos en coordenadas LOCALES para la MISMA BARRA E
// HIPóTESIS
cbi[aX_][el][hi] += pi[aX_];
cbi[aY_][el][hi] += pi[aY_];
cbi[aZ_][el][hi] += pi[aZ_];
cbi[gX_][el][hi] += pi[gX_];
cbi[gY_][el][hi] += pi[gY_];
cbi[gZ_][el][hi] += pi[gZ_];
cbj[aX_][el][hi] += pj[aX_];
cbj[aY_][el][hi] += pj[aY_];
cbj[aZ_][el][hi] += pj[aZ_];
cbj[gX_][el][hi] += pj[gX_];
cbj[gY_][el][hi] += pj[gY_];
cbj[gZ_][el][hi] += pj[gZ_];
}
}
public void EBEsWeightDistributedUniformly(int el, double[] LoadInElement_)
throws JMetalException {
// nQ: numero de carga
// referida al sistema de ejes globales
// con sentidos y direcciones acordes a los tres ejes X Y Z
// Qi() cargas equivalentes aplicadas en el extremo i de la barra con sentido y dirección del
// sistema GLOBAL
// Qj() cargas equivalentes aplicadas en el extremo j de la barra con sentido y dirección del
// sistema GLOBAL
// pi() cargas equivalentes aplicadas en el extremo i con sentido y dirección del sistema
// LOCAL de cada barra
// pj() cargas equivalentes aplicadas en el extremo j con sentido y dirección del sistema
// LOCAL de cada barra
int vi, vj;
double xi, xj, yi, yj, zi, zj;
double[][] R = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
// longitud de la barra en coordenadas locales
// double lij = Math.sqrt(Math.pow((xj - xi), 2.0) + Math.pow((yj - yi), 2.0) + Math.pow((zj -
// zi), 2.0));
// longitudes de la barra en coordenadas locales
// vinculación de los extremos
switch ((int) Element_[el][Vij_]) {
case 00:
{
vi = 0;
vj = 0;
break;
}
case 01:
{
vi = 0;
vj = 1;
break;
}
case 10:
{
vi = 1;
vj = 0;
break;
}
case 11:
{
vi = 1;
vj = 1;
break;
}
default:
System.out.println("invalid link");
return;
} // end switch
int ni = (int) Element_[el][i_];
int nj = (int) Element_[el][j_];
// coordenadas de los extremso de la barra
xi = Node_[ni][aX_];
yi = Node_[ni][aY_];
zi = Node_[ni][aZ_];
xj = Node_[nj][aX_];
yj = Node_[nj][aY_];
zj = Node_[nj][aZ_];
double A1 = Math.asin((xi - xj) / Element_[el][L_]);
double lx = Element_[el][L_] * Math.cos(A1);
double B1 = Math.asin((yi - yj) / Element_[el][L_]);
double ly = Element_[el][L_] * Math.cos(B1);
double G1 = Math.asin((zi - zj) / Element_[el][L_]);
double lz = Element_[el][L_] * Math.cos(G1);
if (vi == 0 && vj == 0) {
// EMP-EMP, debe multiplicarse por la matriz de rotación para
// fuerza en sentido Global X
if (Math.abs(lx) < 0.0000001 && ly != 0 && lz != 0.0) {
Qi[aX_] = LoadInElement_[QAx_] * Math.abs((xi - xj)) / 2.0;
Qj[aX_] = Qi[aX_];
} else {
Qi[aX_] = LoadInElement_[QAx_] * lx / 2.0;
Qj[aX_] = Qi[aX_];
}
// fuerza sentido Global Y
if ((xi - xj) == 0 && (zi - zj) == 0.0) {
Qi[aY_] = LoadInElement_[QAy_] * Math.abs((yi - yj)) / 2.0;
Qj[aY_] = Qi[aY_];
} else {
Qi[aY_] = LoadInElement_[QAy_] * ly / 2.0;
Qj[aY_] = Qi[aY_];
}
// fuerza sentido Global Z
if (Math.abs(lz) < 0.0000001 && lx != 0 && ly != 0.0) {
Qi[aZ_] = LoadInElement_[QAz_] * Math.abs((zi - zj)) / 2.0;
Qj[aZ_] = Qi[aZ_];
} else {
Qi[aZ_] = LoadInElement_[QAz_] * lz / 2.0;
Qj[aZ_] = Qi[aZ_];
}
// momento rotación en Global X
Qi[gX_] =
(LoadInElement_[QAy_] * ly * (zi - zj) - LoadInElement_[QAz_] * lz * (yi - yj)) / 12.0;
Qj[gX_] = -Qi[gX_];
// momento rotación en Global Y
Qi[gY_] =
(LoadInElement_[QAz_] * lz * (xi - xj) - LoadInElement_[QAx_] * lx * (zi - zj)) / 12.0;
Qj[gY_] = -Qi[gY_];
// momento rotación en Global Z
Qi[gZ_] =
(LoadInElement_[QAx_] * lx * (yi - yj) - LoadInElement_[QAy_] * ly * (xi - xj)) / 12.0;
Qj[gZ_] = -Qi[gZ_];
} else if (vi == 1 && vj == 1) {
// ART-ART
// fuerza en sentido Global X
if (Math.abs(lx) < 0.0000001 && ly != 0 && lz != 0.0) {
Qi[aX_] = LoadInElement_[QAx_] * Math.abs((xi - xj)) / 2.0;
Qj[aX_] = Qi[aX_];
} else {
Qi[aX_] = LoadInElement_[QAx_] * lx / 2.0;
Qj[aX_] = Qi[aX_];
}
// fuerza sentido Global Y
if ((xi - xj) == 0 && (zi - zj) == 0.0) {
Qi[aY_] = LoadInElement_[QAy_] * Math.abs((yi - yj)) / 2.0;
Qj[aY_] = Qi[aY_];
} else {
Qi[aY_] = LoadInElement_[QAy_] * ly / 2.0;
Qj[aY_] = Qi[aY_];
}
// fuerza sentido Global Z
if (Math.abs(lz) < 0.0000001 && lx != 0 && ly != 0.0) {
Qi[aZ_] = LoadInElement_[QAz_] * Math.abs((zi - zj)) / 2.0;
Qj[aY_] = Qi[aY_];
} else {
// fuerza sentido Global Z
Qi[aY_] = LoadInElement_[QAz_] * lz / 2.0;
Qj[aY_] = Qi[aY_];
}
// momento rotación en x local
Qi[gX_] = 0.0;
Qj[gX_] = 0.0;
// momento rotación en y local
Qi[gY_] = 0.0;
Qj[gY_] = 0.0;
// momento rotación en z local
Qi[gZ_] = 0.0;
Qj[gZ_] = 0.0;
} else if (vi == 1 && vj == 0) {
// ART-EMP
// fuerza en sentido Global X
if (Math.abs(lx) < 0.0000001 && ly != 0 && lz != 0) {
Qi[aX_] = LoadInElement_[QAx_] * Math.abs((xi - xj)) / 2.0;
Qj[aX_] = Qi[aX_];
} else {
Qi[aX_] = LoadInElement_[QAx_] * lx / 2.0;
Qj[aX_] = Qi[aX_];
}
// fuerza sentido y local
if ((xi - xj) == 0.0 && (zi - zj) == 0.0) {
Qi[aY_] = 3.0 / 8.0 * LoadInElement_[QAy_] * Math.abs((yi - yj));
Qj[aY_] = 5.0 / 8.0 * LoadInElement_[QAy_] * Math.abs((yi - yj));
} else {
Qi[aY_] = 3.0 / 8.0 * LoadInElement_[QAy_] * ly;
Qj[aY_] = 5.0 / 8.0 * LoadInElement_[QAy_] * ly;
}
// fuerza sentido Global Z
if (Math.abs(lz) < 0.0000001 && lx != 0 && ly != 0.0) {
Qi[aZ_] = LoadInElement_[QAz_] * Math.abs((zi - zj)) / 2.0;
Qj[aZ_] = Qi[2];
} else {
Qi[aZ_] = 3.0 / 8.0 * LoadInElement_[QAz_] * lz;
Qj[aZ_] = 5.0 / 8.0 * LoadInElement_[QAz_] * lz;
}
// momento rotación en x local
Qi[gX_] = 0.0;
Qj[gX_] =
-(LoadInElement_[QAy_] * ly * (zi - zj) - LoadInElement_[QAz_] * lz * (yi - yj)) / 8.0;
// momento rotación en y local
Qi[gY_] = 0.0;
Qj[gY_] =
-(LoadInElement_[QAz_] * lz * (xi - xj) - LoadInElement_[QAx_] * lx * (zi - zj)) / 8.0;
// momento rotación en z local
Qi[gZ_] = 0.0;
Qj[gZ_] =
-(LoadInElement_[QAx_] * lx * (yi - yj) - LoadInElement_[QAy_] * ly * (xi - xj)) / 8.0;
} else if (vi == 0 && vj == 1) {
// EMP-ART
// fuerza en sentido Global X
if (Math.abs(lx) < 0.0000001 && ly != 0 && lz != 0) {
Qi[aX_] = LoadInElement_[QAx_] * Math.abs((xi - xj)) / 2.0;
Qj[aX_] = Qi[aX_];
} else {
Qi[aX_] = LoadInElement_[QAx_] * lx / 2.0;
Qj[aX_] = Qi[aX_];
}
// fuerza sentido y local
if ((xi - xj) == 0.0 && (zi - zj) == 0.0) {
Qi[aY_] = 5.0 / 8.0 * LoadInElement_[QAy_] * Math.abs((yi - yj));
Qj[aY_] = 3.0 / 8.0 * LoadInElement_[QAy_] * Math.abs((yi - yj));
} else {
Qi[aY_] = 5.0 / 8.0 * LoadInElement_[QAy_] * ly;
Qj[aY_] = 3.0 / 8.0 * LoadInElement_[QAy_] * ly;
}
// fuerza sentido Global Z
if (Math.abs(lz) < 0.0000001 && lx != 0.0 && ly != 0.0) {
Qi[aZ_] = LoadInElement_[QAz_] * Math.abs((zi - zj)) / 2.0;
Qj[aZ_] = Qi[aZ_];
} else {
Qi[aZ_] = 5.0 / 8.0 * LoadInElement_[QAz_] * lz / 2.0;
Qj[aZ_] = 3.0 / 8.0 * LoadInElement_[QAz_] * lz / 2.0;
}
// momento rotación en x local
Qi[gX_] =
(LoadInElement_[QAy_] * ly * (zi - zj) - LoadInElement_[QAz_] * lz * (yi - yj)) / 8.0;
Qj[gX_] = 0.0;
// momento rotación en y local
Qi[gY_] =
(LoadInElement_[QAz_] * lz * (xi - xj) - LoadInElement_[QAx_] * lx * (zi - zj)) / 8.0;
Qj[gY_] = 0.0;
// momento rotación en z local
Qi[gZ_] =
(LoadInElement_[QAx_] * lx * (yi - yj) - LoadInElement_[QAy_] * ly * (xi - xj)) / 8.0;
Qj[gZ_] = 0.0;
} else if (vi == 0 && vj == 9) {
// EMP_LIB
// fuerza en sentido Global X
if (Math.abs(lx) < 0.0000001 && ly != 0.0 && lz != 0.0) {
Qi[aX_] = LoadInElement_[QAx_] * Math.abs((xi - xj)) / 2.0;
Qj[aX_] = Qi[aX_];
} else {
Qi[aX_] = LoadInElement_[QAx_] * lx;
Qj[aX_] = 0.0;
}
// fuerza sentido Global Y
if ((xi - xj) == 0.0 && (zi - zj) == 0.0) {
Qi[aY_] = LoadInElement_[QAy_] * Math.abs((yi - yj));
Qj[aY_] = 0.0;
} else {
Qi[aY_] = LoadInElement_[QAy_] * ly;
Qj[aY_] = 0.0;
}
// fuerza sentido Global Z
if (Math.abs(lz) < 0.0000001 && lx != 0.0 && ly != 0.0) {
Qi[aZ_] = LoadInElement_[QAz_] * Math.abs((zi - zj)) / 2.0;
Qj[aZ_] = Qi[aZ_];
} else {
Qi[aZ_] = LoadInElement_[QAz_] * lz;
Qj[aZ_] = 0.0;
}
// momento rotación en Global X
Qi[gX_] =
(LoadInElement_[QAy_] * ly * (zi - zj) - LoadInElement_[QAz_] * lz * (yi - yj)) / 2.0;
Qj[gX_] = 0.0;
// momento rotación en Global Y
Qi[gY_] =
(LoadInElement_[QAz_] * lz * (xi - xj) - LoadInElement_[QAx_] * lx * (zi - zj)) / 2.0;
Qj[gY_] = 0.0;
// momento rotación en Global Z
Qi[gZ_] =
(LoadInElement_[QAx_] * lx * (yi - yj) - LoadInElement_[QAy_] * ly * (xi - xj)) / 2.0;
Qj[gZ_] = 0.0;
} else if (vi == 9 && vj == 0) {
// LIB_EMP
// fuerza en sentido Global X
if (Math.abs(lx) < 0.0000001 && ly != 0.0 && lz != 0) {
Qi[aX_] = LoadInElement_[QAx_] * Math.abs((xi - xj)) / 2.0;
Qj[aX_] = Qi[aX_];
} else {
Qi[aX_] = 0;
Qj[aX_] = LoadInElement_[QAx_] * lx;
}
// fuerza sentido Global Y
if ((xi - xj) == 0.0 && (zi - zj) == 0.0) {
Qi[aY_] = 0;
Qj[aY_] = LoadInElement_[QAy_] * Math.abs((yi - yj));
} else {
Qi[aY_] = 0.0;
Qj[aY_] = LoadInElement_[QAy_] * ly;
}
// fuerza sentido Global Z
if (Math.abs(lz) < 0.0000001 && lx != 0.0 && ly != 0.0) {
Qi[aZ_] = LoadInElement_[QAz_] * Math.abs((zi - zj)) / 2.0;
Qj[aZ_] = Qi[aZ_];
} else {
Qi[aZ_] = 0.0;
Qj[aZ_] = LoadInElement_[QAz_] * lz;
}
// momento rotación en Global X
Qi[gX_] = 0.0;
Qj[gX_] =
-(LoadInElement_[QAy_] * ly * (zi - zj) - LoadInElement_[QAz_] * lz * (yi - yj)) / 2.0;
// momento rotación en Global Y
Qi[gY_] = 0.0;
Qj[gY_] =
-(LoadInElement_[QAz_] * lz * (xi - xj) - LoadInElement_[QAx_] * lx * (zi - zj)) / 2.0;
// momento rotación en Global Z
Qi[gZ_] = 0.0;
Qj[gZ_] =
-(LoadInElement_[QAx_] * lx * (yi - yj) - LoadInElement_[QAy_] * ly * (xi - xj)) / 2.0;
} else if ((vi == 0 && vj == 2) || (vi == 2 && vj == 0)) {
System.out.println("invalid link");
} else if ((vi == 1 && vj == 2) || (vi == 2 && vj == 1)) {
System.out.println("invalid link");
} else if ((vi == 2 && vj == 3) || (vi == 3 && vj == 2)) {
System.out.println("invalid link");
}
// matriz de rotación de la barra del sistema principal al local
EBEsMatRot3DLpSaL(el);
// matriz de rotación de la barra del local al global
EBEsMatRot3DLaG(el);
// para el extremo ii
// pi = (Rpij * Rij) * Qi
R = EBEsMatrizMultiplicar(Rpij, Rij);
pi = EBEsMatrizVectorMultiplicar(R, Qi);
// para el extremo jj
// pj = ( Rpji * Rji) * Qj
R = EBEsMatrizMultiplicar(Rpji, Rji);
pj = EBEsMatrizVectorMultiplicar(R, Qj);
}
public void EBEsMatRot3DLpSaL(int e) {
// matriz de rotación 3D en ejes principales "yp,zp" de la sección "S" a ejes LOCALES
// "y,z"
// cuando los ejes principales de la sección están rotados un ángulo Beta respecto al
// sistema global
int i, j;
// cosenos directores de x local respecto al sistema global
double
lx; // cosenos directo respecto del eje local x (coincidente con el eje de la barra) y el
// eje X Global
double
mx; // cosenos directo respecto del eje local x (coincidente con el eje de la barra) y el
// eje Y Global
double
nx; // cosenos directo respecto del eje local x (coincidente con el eje de la barra) y el
// eje Z Global
// cosenos directores de y local respecto al sistema global
double
ly; // cosenos directo respecto del eje local y (coincidente con el eje de la barra) y el
// eje X Global
double
my; // cosenos directo respecto del eje local y (coincidente con el eje de la barra) y el
// eje Y Global
double
ny; // cosenos directo respecto del eje local y (coincidente con el eje de la barra) y el
// eje Z Global
// cosenos directores de z local respecto al sistema global
double
lz; // cosenos directo respecto del eje local z (coincidente con el eje de la barra) y el
// eje X Global
double
mz; // cosenos directo respecto del eje local z (coincidente con el eje de la barra) y el
// eje Y Global
double
nz; // cosenos directo respecto del eje local z (coincidente con el eje de la barra) y el
// eje Z Global
int idx = (int) Element_[e][INDEX_];
double beta = Groups_[idx][BETA];
// cosenos directores de x local respecto al sistema global XYZ
lx = 1.0;
mx = 0.0;
nx = 0.0;
// cosenos directores de y local respecto al sistema global XYZ
ly = 0.0;
my = Math.cos(beta * Math.PI / 180.0);
ny = Math.sin(beta * Math.PI / 180.0);
// cosenos directores de z local respecto al sistema global XYZ
lz = 0.0;
mz = -Math.sin(beta * Math.PI / 180.0);
nz = Math.cos(beta * Math.PI / 180.0);
// matriz de rotación de desplazamientos locales a ejes globales XYZ si los ejes principales
// de la
// sección "yp,zp" coinciden con los ejes locales "y,z" de la barra
// para el nudo i de la barra ij
Rpij[0][0] = lx;
Rpij[0][1] = mx;
Rpij[0][2] = nx;
Rpij[1][0] = ly;
Rpij[1][1] = my;
Rpij[1][2] = ny;
Rpij[2][0] = lz;
Rpij[2][1] = mz;
Rpij[2][2] = nz;
for (i = 0; i < 3; i++) {
for (j = 3; j < 6; j++) {
Rpij[i][j] = 0.0;
}
}
for (i = 3; i < 6; i++) {
for (j = 0; j < 3; j++) {
Rpij[i][j] = 0.0;
}
}
Rpij[3][3] = lx;
Rpij[3][4] = mx;
Rpij[3][5] = nx;
Rpij[4][3] = ly;
Rpij[4][4] = my;
Rpij[4][5] = ny;
Rpij[5][3] = lz;
Rpij[5][4] = mz;
Rpij[5][5] = nz;
// trasponer la matriz de rotación
RpTij = EBEsMatrizTraspuesta(Rpij);
// para el nudo j de la barra ij
lx = 1.0;
mx = 0.0;
nx = 0.0;
// cosenos directores de y local respecto al sistema global XYZ
ly = 0.0;
my = Math.cos(beta * Math.PI / 180.0);
ny = -Math.sin(beta * Math.PI / 180.0);
// cosenos directores de z local respecto al sistema global XYZ
lz = 0.0;
mz = Math.sin(beta * Math.PI / 180.0);
nz = Math.cos(beta * Math.PI / 180.0);
Rpji[0][0] = lx;
Rpji[0][1] = mx;
Rpji[0][2] = nx;
Rpji[1][0] = ly;
Rpji[1][1] = my;
Rpji[1][2] = ny;
Rpji[2][0] = lz;
Rpji[2][1] = mz;
Rpji[2][2] = nz;
for (i = 0; i < 3; i++) {
for (j = 3; j < 6; j++) {
Rpji[i][j] = 0.0;
}
}
for (i = 3; i < 6; i++) {
for (j = 0; j < 3; j++) {
Rpji[i][j] = 0.0;
}
}
Rpji[3][3] = lx;
Rpji[3][4] = mx;
Rpji[3][5] = nx;
Rpji[4][3] = ly;
Rpji[4][4] = my;
Rpji[4][5] = ny;
Rpji[5][3] = lz;
Rpji[5][4] = mz;
Rpji[5][5] = nz;
// trasponer la matriz de rotación
RpTji = EBEsMatrizTraspuesta(Rpji);
}
public double[][] EBEsMatrizTraspuesta(double m[][]) {
int row = m.length;
int col = m[0].length;
double[][] mt = new double[row][col];
for (int i = 0; i < row; i++) {
// cantidad de elementos de la 1ra dimensión
for (int j = 0; j < col; j++) {
// cantidad de elementos de la 2ra dimensión
mt[j][i] = m[i][j];
}
}
return mt;
}
public void EBEsEcuationSolution(int hi) throws JMetalException {
// Formacion del sistema de ecuaciones
// adaptive method of book
// LA ESTRUCTURA MET�LICA HOY
// PROGRAMACIÓNN TOMO III
// Ramón Arguellez �lvarez
int i, j;
int s1 = 1;
int s2, l5, l6, ln, r;
double det = 1.0;
double ff = 0.0, t;
int n2 = numberOfLibertyDegree_ * numberOfNodes;
for (i = 1; i < n2; i++) {
if (MatrixStiffness_[s1 - 1] >= 1.0E+25) {
s1 = s1 + matrixWidthBand_;
continue; // Salto1:
}
ln = i + 1;
l5 = s1 + 1;
for (j = 2; j < matrixWidthBand_ + 1; j++) {
if (ln - n2 > 0) {
break; // Salto2:
}
if (MatrixStiffness_[s1 - 1] == 0) {
ln = ln + 1;
l5 = l5 + 1;
continue; // Salto3;
}
t = MatrixStiffness_[l5 - 1] / MatrixStiffness_[s1 - 1];
l6 = (ln - 1) * matrixWidthBand_ + 1;
s2 = s1 + j - 1;
for (r = j; r < matrixWidthBand_ + 1; r++) {
MatrixStiffness_[l6 - 1] = MatrixStiffness_[l6 - 1] - t * MatrixStiffness_[s2 - 1];
l6 = l6 + 1;
s2 = s2 + 1;
} // next r
DisplacementNodes_[ln - 1][hi] =
DisplacementNodes_[ln - 1][hi] - t * DisplacementNodes_[i - 1][hi];
ln = ln + 1;
l5 = l5 + 1;
// Salto3:
} // next j
// Salto2:
s1 = s1 + matrixWidthBand_;
// Salto1:
} // next i
// Resolución del sistema
i = n2 + 1;
for (int s3 = 1; s3 < n2 + 1; s3++) {
i = i - 1;
ff = 0.0;
ln = i + 1;
l5 = s1 + 1;
for (j = 2; j < matrixWidthBand_ + 1; j++) {
if (ln - n2 > 0) {
break; // Salto4:
}
ff = ff + DisplacementNodes_[ln - 1][hi] * MatrixStiffness_[l5 - 1];
ln = ln + 1;
l5 = l5 + 1;
} // Next j
// Salto4:
if (Math.abs(MatrixStiffness_[s1 - 1]) <= 1.0E-35) {
DisplacementNodes_[i - 1][hi] = 1.0E-35;
} else {
DisplacementNodes_[i - 1][hi] =
(DisplacementNodes_[i - 1][hi] - ff) / MatrixStiffness_[s1 - 1];
}
if (MatrixStiffness_[s1 - 1] < 9.899999E+15) {
det = det * MatrixStiffness_[s1 - 1] / 100000.0;
}
s1 = s1 - matrixWidthBand_;
} // Next s3
} // end EcuationSolution
public void EBEsMat3DL_iRig_jRig(int e) throws JMetalException {
// The element element 3D than form rigid matrix in the local coordinates
// i: rigid
// j: rigid
// esfuerzos en nudo j por reacción de desplazamientos en i + esfuerzo en i
// double [][]Kii = new double [5][5];
// Elements
// the Element long
double Lij = Element_[e][L_];
// Secction
int idx = (int) Element_[e][INDEX_];
double S = Groups_[idx][AREA];
// inertia in axis local z
double Iz = Groups_[idx][Iz_];
// inertia in axis local y
double Iy = Groups_[idx][Iy_];
// inertia torsion
double Ip = Groups_[idx][It_];
// elastic modulus (Young)
double E = Groups_[idx][E_];
// elastic transversal modulus
double G = Groups_[idx][G_];
// esfuerzos en nudo j por reacción de desplazamientos en i + esfuerzo en i
Kii[0][0] = E * S / Lij;
Kii[0][1] = 0;
Kii[0][2] = 0;
Kii[0][3] = 0;
Kii[0][4] = 0;
Kii[0][5] = 0;
Kii[1][0] = 0;
Kii[1][1] = 12 * E * Iz / Math.pow(Lij, 3);
Kii[1][2] = 0;
Kii[1][3] = 0;
Kii[1][4] = 0;
Kii[1][5] = -6 * E * Iz / Math.pow(Lij, 2);
Kii[2][0] = 0;
Kii[2][1] = 0;
Kii[2][2] = 12 * E * Iy / Math.pow(Lij, 3);
Kii[2][3] = 0;
Kii[2][4] = 6 * E * Iy / Math.pow(Lij, 2);
Kii[2][5] = 0;
Kii[3][0] = 0;
Kii[3][1] = 0;
Kii[3][2] = 0;
Kii[3][3] = G * Ip / Lij;
Kii[3][4] = 0;
Kii[3][5] = 0;
Kii[4][0] = 0;
Kii[4][1] = 0;
Kii[4][2] = 6 * E * Iy / Math.pow(Lij, 2);
Kii[4][3] = 0;
Kii[4][4] = 4 * E * Iy / Lij;
Kii[4][5] = 0;
Kii[5][0] = 0;
Kii[5][1] = -6 * E * Iz / Math.pow(Lij, 2);
Kii[5][2] = 0;
Kii[5][3] = 0;
Kii[5][4] = 0;
Kii[5][5] = 4 * E * Iz / Lij;
// esfuerzos en nudo i por reacción de desplazamientos en j
Kij[0][0] = E * S / Lij;
Kij[0][1] = 0;
Kij[0][2] = 0;
Kij[0][3] = 0;
Kij[0][4] = 0;
Kij[0][5] = 0;
Kij[1][0] = 0;
Kij[1][1] = 12 * E * Iz / Math.pow(Lij, 3);
Kij[1][2] = 0;
Kij[1][3] = 0;
Kij[1][4] = 0;
Kij[1][5] = -6 * E * Iz / Math.pow(Lij, 2);
Kij[2][0] = 0;
Kij[2][1] = 0;
Kij[2][2] = -12 * E * Iy / Math.pow(Lij, 3);
Kij[2][3] = 0;
Kij[2][4] = -6 * E * Iy / Math.pow(Lij, 2);
Kij[2][5] = 0;
Kij[3][0] = 0;
Kij[3][1] = 0;
Kij[3][2] = 0;
Kij[3][3] = G * Ip / Lij;
Kij[3][4] = 0;
Kij[3][5] = 0;
Kij[4][0] = 0;
Kij[4][1] = 0;
Kij[4][2] = -6 * E * Iy / Math.pow(Lij, 2);
Kij[4][3] = 0;
Kij[4][4] = -2 * E * Iy / Lij;
Kij[4][5] = 0;
Kij[5][0] = 0;
Kij[5][1] = -6 * E * Iz / Math.pow(Lij, 2);
Kij[5][2] = 0;
Kij[5][3] = 0;
Kij[5][4] = 0;
Kij[5][5] = 2 * E * Iz / Lij;
// esfuerzos en nudo j por reacción de desplazamientos en i
Kji[0][0] = E * S / Lij;
Kji[0][1] = 0;
Kji[0][2] = 0;
Kji[0][3] = 0;
Kji[0][4] = 0;
Kji[0][5] = 0;
Kji[1][0] = 0;
Kji[1][1] = 12 * E * Iz / Math.pow(Lij, 3);
Kji[1][2] = 0;
Kji[1][3] = 0;
Kji[1][4] = 0;
Kji[1][5] = -6 * E * Iz / Math.pow(Lij, 2);
Kji[2][0] = 0;
Kji[2][1] = 0;
Kji[2][2] = -12 * E * Iy / Math.pow(Lij, 3);
Kji[2][3] = 0;
Kji[2][4] = -6 * E * Iy / Math.pow(Lij, 2);
Kji[2][5] = 0;
Kji[3][0] = 0;
Kji[3][1] = 0;
Kji[3][2] = 0;
Kji[3][3] = G * Ip / Lij;
Kji[3][4] = 0;
Kji[3][5] = 0;
Kji[4][0] = 0;
Kji[4][1] = 0;
Kji[4][2] = -6 * E * Iy / Math.pow(Lij, 2);
Kji[4][3] = 0;
Kji[4][4] = -2 * E * Iy / Lij;
Kji[4][5] = 0;
Kji[5][0] = 0;
Kji[5][1] = -6 * E * Iz / Math.pow(Lij, 2);
Kji[5][2] = 0;
Kji[5][3] = 0;
Kji[5][4] = 0;
Kji[5][5] = 2 * E * Iz / Lij;
// esfuerzos en nudo i por reacción de desplazamientos en j + esfuerzo en j
Kjj[0][0] = E * S / Lij;
Kjj[0][1] = 0;
Kjj[0][2] = 0;
Kjj[0][3] = 0;
Kjj[0][4] = 0;
Kjj[0][5] = 0;
Kjj[1][0] = 0;
Kjj[1][1] = 12 * E * Iz / Math.pow(Lij, 3);
Kjj[1][2] = 0;
Kjj[1][3] = 0;
Kjj[1][4] = 0;
Kjj[1][5] = -6 * E * Iz / Math.pow(Lij, 2);
Kjj[2][0] = 0;
Kjj[2][1] = 0;
Kjj[2][2] = 12 * E * Iy / Math.pow(Lij, 3);
Kjj[2][3] = 0;
Kjj[2][4] = 6 * E * Iy / Math.pow(Lij, 2);
Kjj[2][5] = 0;
Kjj[3][0] = 0;
Kjj[3][1] = 0;
Kjj[3][2] = 0;
Kjj[3][3] = G * Ip / Lij;
Kjj[3][4] = 0;
Kjj[3][5] = 0;
Kjj[4][0] = 0;
Kjj[4][1] = 0;
Kjj[4][2] = 6 * E * Iy / Math.pow(Lij, 2);
Kjj[4][3] = 0;
Kjj[4][4] = 4 * E * Iy / Lij;
Kjj[4][5] = 0;
Kjj[5][0] = 0;
Kjj[5][1] = -6 * E * Iz / Math.pow(Lij, 2);
Kjj[5][2] = 0;
Kjj[5][3] = 0;
Kjj[5][4] = 0;
Kjj[5][5] = 4 * E * Iz / Lij;
// PrintArchTxtMKLB(e);
}
public void EBEsMat3DL_iArt_jRig(int e) throws JMetalException {
// The element element 3D than form rigid matrix in the local coordinates
// i: rigid
// j: rigid
// esfuerzos en nudo j por reacción de desplazamientos en i + esfuerzo en i
// double [][]Kii = new double [5][5];
// Elements
// the Element long
double Lij = Element_[e][L_];
// index gropus
int idx = (int) Element_[e][INDEX_];
// Secction
double S = Groups_[idx][AREA];
// inertia in axis local z
double Iz = Groups_[idx][Iz_];
// inertia in axis local y
double Iy = Groups_[idx][Iy_];
// elastic modulus (Young)
double E = Groups_[idx][E_];
// elastic transversal modulus
// esfuerzos en nudo j por reacción de desplazamientos en i + esfuerzo en i
Kii[0][0] = E * S / Lij;
Kii[0][1] = 0;
Kii[0][2] = 0;
Kii[0][3] = 0;
Kii[0][4] = 0;
Kii[0][5] = 0;
Kii[1][0] = 0;
Kii[1][1] = 3 * E * Iz / Math.pow(Lij, 3);
Kii[1][2] = 0;
Kii[1][3] = 0;
Kii[1][4] = 0;
Kii[1][5] = 0;
Kii[2][0] = 0;
Kii[2][1] = 0;
Kii[2][2] = 3 * E * Iy / Math.pow(Lij, 3);
Kii[2][3] = 0;
Kii[2][4] = 0;
Kii[2][5] = 0;
Kii[3][0] = 0;
Kii[3][1] = 0;
Kii[3][2] = 0;
Kii[3][3] = 0;
Kii[3][4] = 0;
Kii[3][5] = 0;
Kii[4][0] = 0;
Kii[4][1] = 0;
Kii[4][2] = 0;
Kii[4][3] = 0;
Kii[4][4] = 0;
Kii[4][5] = 0;
Kii[5][0] = 0;
Kii[5][1] = 0;
Kii[5][2] = 0;
Kii[5][3] = 0;
Kii[5][4] = 0;
Kii[5][5] = 0;
// esfuerzos en nudo i por reacción de desplazamientos en j
Kij[0][0] = E * S / Lij;
Kij[0][1] = 0;
Kij[0][2] = 0;
Kij[0][3] = 0;
Kij[0][4] = 0;
Kij[0][5] = 0;
Kij[1][0] = 0;
Kij[1][1] = 3 * E * Iz / Math.pow(Lij, 3);
Kij[1][2] = 0;
Kij[1][3] = 0;
Kij[1][4] = 0;
Kij[1][5] = -3 * E * Iz / Math.pow(Lij, 2);
Kij[2][0] = 0;
Kij[2][1] = 0;
Kij[2][2] = -3 * E * Iy / Math.pow(Lij, 3);
Kij[2][3] = 0;
Kij[2][4] = -3 * E * Iy / Math.pow(Lij, 2);
Kij[2][5] = 0;
Kij[3][0] = 0;
Kij[3][1] = 0;
Kij[3][2] = 0;
Kij[3][3] = 0;
Kij[3][4] = 0;
Kij[3][5] = 0;
Kij[4][0] = 0;
Kij[4][1] = 0;
Kij[4][2] = 0;
Kij[4][3] = 0;
Kij[4][4] = 0;
Kij[4][5] = 0;
Kij[5][0] = 0;
Kij[5][1] = 0;
Kij[5][2] = 0;
Kij[5][3] = 0;
Kij[5][4] = 0;
Kij[5][5] = 0;
// esfuerzos en nudo j por reacción de desplazamientos en i
Kji[0][0] = E * S / Lij;
Kji[0][1] = 0;
Kji[0][2] = 0;
Kji[0][3] = 0;
Kji[0][4] = 0;
Kji[0][5] = 0;
Kji[1][0] = 0;
Kji[1][1] = 3 * E * Iz / Math.pow(Lij, 3);
Kji[1][2] = 0;
Kji[1][3] = 0;
Kji[1][4] = 0;
Kji[1][5] = 0;
Kji[2][0] = 0;
Kji[2][1] = 0;
Kji[2][2] = -3 * E * Iy / Math.pow(Lij, 3);
Kji[2][3] = 0;
Kji[2][4] = 0;
Kji[2][5] = 0;
Kji[3][0] = 0;
Kji[3][1] = 0;
Kji[3][2] = 0;
Kji[3][3] = 0;
Kji[3][4] = 0;
Kji[3][5] = 0;
Kji[4][0] = 0;
Kji[4][1] = 0;
Kji[4][2] = -3 * E * Iy / Math.pow(Lij, 2);
Kji[4][3] = 0;
Kji[4][4] = 0;
Kji[4][5] = 0;
Kji[5][0] = 0;
Kji[5][1] = -3 * E * Iz / Math.pow(Lij, 2);
Kji[5][2] = 0;
Kji[5][3] = 0;
Kji[5][4] = 0;
Kji[5][5] = 0;
// esfuerzos en nudo i por reacción de desplazamientos en j + esfuerzo en j
Kjj[0][0] = E * S / Lij;
Kjj[0][1] = 0;
Kjj[0][2] = 0;
Kjj[0][3] = 0;
Kjj[0][4] = 0;
Kjj[0][5] = 0;
Kjj[1][0] = 0;
Kjj[1][1] = 3 * E * Iz / Math.pow(Lij, 3);
Kjj[1][2] = 0;
Kjj[1][3] = 0;
Kjj[1][4] = 0;
Kjj[1][5] = -3 * E * Iz / Math.pow(Lij, 2);
Kjj[2][0] = 0;
Kjj[2][1] = 0;
Kjj[2][2] = 3 * E * Iy / Math.pow(Lij, 3);
Kjj[2][3] = 0;
Kjj[2][4] = 3 * E * Iy / Math.pow(Lij, 2);
Kjj[2][5] = 0;
Kjj[3][0] = 0;
Kjj[3][1] = 0;
Kjj[3][2] = 0;
Kjj[3][3] = 0;
Kjj[3][4] = 0;
Kjj[3][5] = 0;
Kjj[4][0] = 0;
Kjj[4][1] = 0;
Kjj[4][2] = 3 * E * Iy / Math.pow(Lij, 2);
Kjj[4][3] = 0;
Kjj[4][4] = 3 * E * Iy / Lij;
Kjj[4][5] = 0;
Kjj[5][0] = 0;
Kjj[5][1] = -3 * E * Iz / Math.pow(Lij, 2);
Kjj[5][2] = 0;
Kjj[5][3] = 0;
Kjj[5][4] = 0;
Kjj[5][5] = 3 * E * Iz / Lij;
// PrintArchTxtMKLB(e);
}
public void EBEsMat3DL_iRig_jArt(int e) throws JMetalException {
// The element element 3D than form rigid matrix in the local coordinates
// i: rigid
// j: rigid
// esfuerzos en nudo j por reacción de desplazamientos en i + esfuerzo en i
// double [][]Kii = new double [5][5];
// Elements
// the Element long
double Lij = Element_[e][L_];
// index gropus
int idx = (int) Element_[e][INDEX_];
// angle beta
double S = Groups_[idx][AREA];
// inertia in axis local z
double Iz = Groups_[idx][Iz_];
// inertia in axis local y
double Iy = Groups_[idx][Iy_];
// inertia torsion
// elastic modulus (Young)
double E = Groups_[idx][E_];
// elastic transversal modulus
// esfuerzos en nudo j por reacción de desplazamientos en i + esfuerzo en i
Kii[0][0] = E * S / Lij;
Kii[0][1] = 0;
Kii[0][2] = 0;
Kii[0][3] = 0;
Kii[0][4] = 0;
Kii[0][5] = 0;
Kii[1][0] = 0;
Kii[1][1] = 3 * E * Iz / Math.pow(Lij, 3);
Kii[1][2] = 0;
Kii[1][3] = 0;
Kii[1][4] = 0;
Kii[1][5] = -3 * E * Iz / Math.pow(Lij, 2);
Kii[2][0] = 0;
Kii[2][1] = 0;
Kii[2][2] = 3 * E * Iy / Math.pow(Lij, 3);
Kii[2][3] = 0;
Kii[2][4] = 3 * E * Iy / Math.pow(Lij, 2);
Kii[2][5] = 0;
Kii[3][0] = 0;
Kii[3][1] = 0;
Kii[3][2] = 0;
Kii[3][3] = 0;
Kii[3][4] = 0;
Kii[3][5] = 0;
Kii[4][0] = 0;
Kii[4][1] = 0;
Kii[4][2] = 3 * E * Iy / Math.pow(Lij, 2);
Kii[4][3] = 0;
Kii[4][4] = 3 * E * Iy / Lij;
Kii[4][5] = 0;
Kii[5][0] = 0;
Kii[5][1] = -3 * E * Iz / Math.pow(Lij, 2);
Kii[5][2] = 0;
Kii[5][3] = 0;
Kii[5][4] = 0;
Kii[5][5] = 3 * E * Iz / Lij;
// esfuerzos en nudo i por reacción de desplazamientos en j
Kij[0][0] = E * S / Lij;
Kij[0][1] = 0;
Kij[0][2] = 0;
Kij[0][3] = 0;
Kij[0][4] = 0;
Kij[0][5] = 0;
Kij[1][0] = 0;
Kij[1][1] = 3 * E * Iz / Math.pow(Lij, 3);
Kij[1][2] = 0;
Kij[1][3] = 0;
Kij[1][4] = 0;
Kij[1][5] = 0;
Kij[2][0] = 0;
Kij[2][1] = 0;
Kij[2][2] = -3 * E * Iy / Math.pow(Lij, 3);
Kij[2][3] = 0;
Kij[2][4] = 0;
Kij[2][5] = 0;
Kij[3][0] = 0;
Kij[3][1] = 0;
Kij[3][2] = 0;
Kij[3][3] = 0;
Kij[3][4] = 0;
Kij[3][5] = 0;
Kij[4][0] = 0;
Kij[4][1] = 0;
Kij[4][2] = -3 * E * Iy / Math.pow(Lij, 2);
Kij[4][3] = 0;
Kij[4][4] = 0;
Kij[4][5] = 0;
Kij[5][0] = 0;
Kij[5][1] = -3 * E * Iz / Math.pow(Lij, 2);
Kij[5][2] = 0;
Kij[5][3] = 0;
Kij[5][4] = 0;
Kij[5][5] = 0;
// esfuerzos en nudo j por reacción de desplazamientos en i
Kji[0][0] = E * S / Lij;
Kji[0][1] = 0;
Kji[0][2] = 0;
Kji[0][3] = 0;
Kji[0][4] = 0;
Kji[0][5] = 0;
Kji[1][0] = 0;
Kji[1][1] = 3 * E * Iz / Math.pow(Lij, 3);
Kji[1][2] = 0;
Kji[1][3] = 0;
Kji[1][4] = 0;
Kji[1][5] = -3 * E * Iz / Math.pow(Lij, 2);
Kji[2][0] = 0;
Kji[2][1] = 0;
Kji[2][2] = -3 * E * Iy / Math.pow(Lij, 3);
Kji[2][3] = 0;
Kji[2][4] = -3 * E * Iy / Math.pow(Lij, 2);
Kji[2][5] = 0;
Kji[3][0] = 0;
Kji[3][1] = 0;
Kji[3][2] = 0;
Kji[3][3] = 0;
Kji[3][4] = 0;
Kji[3][5] = 0;
Kji[4][0] = 0;
Kji[4][1] = 0;
Kji[4][2] = 0;
Kji[4][3] = 0;
Kji[4][4] = 0;
Kji[4][5] = 0;
Kji[5][0] = 0;
Kji[5][1] = 0;
Kji[5][2] = 0;
Kji[5][3] = 0;
Kji[5][4] = 0;
Kji[5][5] = 0;
Kjj[0][0] = E * S / Lij;
Kjj[0][1] = 0;
Kjj[0][2] = 0;
Kjj[0][3] = 0;
Kjj[0][4] = 0;
Kjj[0][5] = 0;
Kjj[1][0] = 0;
Kjj[1][1] = 3 * E * Iz / Math.pow(Lij, 3);
Kjj[1][2] = 0;
Kjj[1][3] = 0;
Kjj[1][4] = 0;
Kjj[1][5] = 0;
Kjj[2][0] = 0;
Kjj[2][1] = 0;
Kjj[2][2] = 3 * E * Iy / Math.pow(Lij, 3);
Kjj[2][3] = 0;
Kjj[2][4] = 0;
Kjj[2][5] = 0;
Kjj[3][0] = 0;
Kjj[3][1] = 0;
Kjj[3][2] = 0;
Kjj[3][3] = 0;
Kjj[3][4] = 0;
Kjj[3][5] = 0;
Kjj[4][0] = 0;
Kjj[4][1] = 0;
Kjj[4][2] = 0;
Kjj[4][3] = 0;
Kjj[4][4] = 0;
Kjj[4][5] = 0;
Kjj[5][0] = 0;
Kjj[5][1] = 0;
Kjj[5][2] = 0;
Kjj[5][3] = 0;
Kjj[5][4] = 0;
Kjj[5][5] = 0;
// PrintArchTxtMKLB(e);
}
public void EBEsMat3DL_iArt_jArt(int e) throws JMetalException {
// The element element 3D than form rigid matrix in the local coordinates
// i: rigid
// j: rigid
// esfuerzos en nudo j por reacción de desplazamientos en i + esfuerzo en i
// double [][]Kii = new double [5][5];
// Elements
// the Element long
// the Element long
double Lij = Element_[e][L_];
// index gropus
int idx = (int) Element_[e][INDEX_];
// Secction
double S = Groups_[idx][AREA];
// inertia in axis local z
// elastic modulus (Young)
double E = Groups_[idx][E_];
// elastic transversal modulus
// esfuerzos en nudo j por reacción de desplazamientos en i + esfuerzo en i
Kii[0][0] = E * S / Lij;
Kii[0][1] = 0;
Kii[0][2] = 0;
Kii[0][3] = 0;
Kii[0][4] = 0;
Kii[0][5] = 0;
Kii[1][0] = 0;
Kii[1][1] = 0;
Kii[1][2] = 0;
Kii[1][3] = 0;
Kii[1][4] = 0;
Kii[1][5] = 0;
Kii[2][0] = 0;
Kii[2][1] = 0;
Kii[2][2] = 0;
Kii[2][3] = 0;
Kii[2][4] = 0;
Kii[2][5] = 0;
Kii[3][0] = 0;
Kii[3][1] = 0;
Kii[3][2] = 0;
Kii[3][3] = 0;
Kii[3][4] = 0;
Kii[3][5] = 0;
Kii[4][0] = 0;
Kii[4][1] = 0;
Kii[4][2] = 0;
Kii[4][3] = 0;
Kii[4][4] = 0;
Kii[4][5] = 0;
Kii[5][0] = 0;
Kii[5][1] = 0;
Kii[5][2] = 0;
Kii[5][3] = 0;
Kii[5][4] = 0;
Kii[5][5] = 0;
// esfuerzos en nudo i por reacción de desplazamientos en j
Kij[0][0] = E * S / Lij;
Kij[0][1] = 0;
Kij[0][2] = 0;
Kij[0][3] = 0;
Kij[0][4] = 0;
Kij[0][5] = 0;
Kij[1][0] = 0;
Kij[1][1] = 0;
Kij[1][2] = 0;
Kij[1][3] = 0;
Kij[1][4] = 0;
Kij[1][5] = 0;
Kij[2][0] = 0;
Kij[2][1] = 0;
Kij[2][2] = 0;
Kij[2][3] = 0;
Kij[2][4] = 0;
Kij[2][5] = 0;
Kij[3][0] = 0;
Kij[3][1] = 0;
Kij[3][2] = 0;
Kij[3][3] = 0;
Kij[3][4] = 0;
Kij[3][5] = 0;
Kij[4][0] = 0;
Kij[4][1] = 0;
Kij[4][2] = 0;
Kij[4][3] = 0;
Kij[4][4] = 0;
Kij[4][5] = 0;
Kij[5][0] = 0;
Kij[5][1] = 0;
Kij[5][2] = 0;
Kij[5][3] = 0;
Kij[5][4] = 0;
Kij[5][5] = 0;
// esfuerzos en nudo j por reacción de desplazamientos en i
Kji[0][0] = E * S / Lij;
Kji[0][1] = 0;
Kji[0][2] = 0;
Kji[0][3] = 0;
Kji[0][4] = 0;
Kji[0][5] = 0;
Kji[1][0] = 0;
Kji[1][1] = 0;
Kji[1][2] = 0;
Kji[1][3] = 0;
Kji[1][4] = 0;
Kji[1][5] = 0;
Kji[2][0] = 0;
Kji[2][1] = 0;
Kji[2][2] = 0;
Kji[2][3] = 0;
Kji[2][4] = 0;
Kji[2][5] = 0;
Kji[3][0] = 0;
Kji[3][1] = 0;
Kji[3][2] = 0;
Kji[3][3] = 0;
Kji[3][4] = 0;
Kji[3][5] = 0;
Kji[4][0] = 0;
Kji[4][1] = 0;
Kji[4][2] = 0;
Kji[4][3] = 0;
Kji[4][4] = 0;
Kji[4][5] = 0;
Kji[5][0] = 0;
Kji[5][1] = 0;
Kji[5][2] = 0;
Kji[5][3] = 0;
Kji[5][4] = 0;
Kji[5][5] = 0;
// esfuerzos en nudo i por reacción de desplazamientos en j + esfuerzo en j
Kjj[0][0] = E * S / Lij;
Kjj[0][1] = 0;
Kjj[0][2] = 0;
Kjj[0][3] = 0;
Kjj[0][4] = 0;
Kjj[0][5] = 0;
Kjj[1][0] = 0;
Kjj[1][1] = 0;
Kjj[1][2] = 0;
Kjj[1][3] = 0;
Kjj[1][4] = 0;
Kjj[1][5] = 0;
Kjj[2][0] = 0;
Kjj[2][1] = 0;
Kjj[2][2] = 0;
Kjj[2][3] = 0;
Kjj[2][4] = 0;
Kjj[2][5] = 0;
Kjj[3][0] = 0;
Kjj[3][1] = 0;
Kjj[3][2] = 0;
Kjj[3][3] = 0;
Kjj[3][4] = 0;
Kjj[3][5] = 0;
Kjj[4][0] = 0;
Kjj[4][1] = 0;
Kjj[4][2] = 0;
Kjj[4][3] = 0;
Kjj[4][4] = 0;
Kjj[4][5] = 0;
Kjj[5][0] = 0;
Kjj[5][1] = 0;
Kjj[5][2] = 0;
Kjj[5][3] = 0;
Kjj[5][4] = 0;
Kjj[5][5] = 0;
// PrintArchTxtMKLB(e);
}
public void EBEsMat3DL_SOG(int e) throws JMetalException {
// The element element 3D than form rigid matrix in the local coordinates
// esfuerzos en nudo j por reacción de desplazamientos en i + esfuerzo en i
// double [][]KiiSOG = new double [5][5];
// Elements
// the Element long
double l = Element_[e][L_];
double Ni = AxialForcei_[e];
double Nj = AxialForcej_[e];
KiiSOG[0][0] = 0.0;
KiiSOG[0][1] = 0.0;
KiiSOG[0][2] = 0.0;
KiiSOG[0][3] = 0.0;
KiiSOG[0][4] = 0.0;
KiiSOG[0][5] = 0.0;
KiiSOG[1][0] = 0.0;
KiiSOG[1][1] = Ni * 6.0 / (5.0 * l);
KiiSOG[1][2] = 0.0;
KiiSOG[1][3] = 0.0;
KiiSOG[1][4] = 0.0;
KiiSOG[1][5] = -Ni / 10.0;
KiiSOG[2][0] = 0.0;
KiiSOG[2][1] = 0.0;
KiiSOG[2][2] = Ni * 6.0 / (5.0 * l);
KiiSOG[2][3] = 0.0;
KiiSOG[2][4] = Ni / 10.0;
KiiSOG[2][5] = 0.0;
KiiSOG[3][0] = 0.0;
KiiSOG[3][1] = 0.0;
KiiSOG[3][2] = 0.0;
KiiSOG[3][3] = 0.0;
KiiSOG[3][4] = 0.0;
KiiSOG[3][5] = 0.0;
KiiSOG[4][0] = 0.0;
KiiSOG[4][1] = 0.0;
KiiSOG[4][2] = Ni / 10.0;
KiiSOG[4][3] = 0.0;
KiiSOG[4][4] = Ni * 2.0 * l / 15.0;
KiiSOG[4][5] = 0.0;
KiiSOG[5][0] = 0.0;
KiiSOG[5][1] = -Ni / 10.0;
KiiSOG[5][2] = 0.0;
KiiSOG[5][3] = 0.0;
KiiSOG[5][4] = 0.0;
KiiSOG[5][5] = Ni * 2.0 * l / 15.0;
// esfuerzos en nudo j por reacción de desplazamientos en i + esfuerzo en i
KijSOG[0][0] = 0.0;
KijSOG[0][1] = 0.0;
KijSOG[0][2] = 0.0;
KijSOG[0][3] = 0.0;
KijSOG[0][4] = 0.0;
KijSOG[0][5] = 0.0;
KijSOG[1][0] = 0.0;
KijSOG[1][1] = Ni * 6.0 / (5.0 * l);
KijSOG[1][2] = 0.0;
KijSOG[1][3] = 0.0;
KijSOG[1][4] = 0.0;
KijSOG[1][5] = -Ni / 10.0; // - original
KijSOG[2][0] = 0.0;
KijSOG[2][1] = 0.0;
KijSOG[2][2] = -Ni * 6.0 / (5.0 * l);
KijSOG[2][3] = 0.0;
KijSOG[2][4] = -Ni / 10.0; // - original
KijSOG[2][5] = 0.0;
KijSOG[3][0] = 0.0;
KijSOG[3][1] = 0.0;
KijSOG[3][2] = 0.0;
KijSOG[3][3] = 0.0;
KijSOG[3][4] = 0.0;
KijSOG[3][5] = 0.0;
KijSOG[4][0] = 0.0;
KijSOG[4][1] = 0.0;
KijSOG[4][2] = -Ni / l;
KijSOG[4][3] = 0.0;
KijSOG[4][4] = Ni * l / 30.0; // - original
KijSOG[4][5] = 0.0;
KijSOG[5][0] = 0.0;
KijSOG[5][1] = -Ni / 10.0;
KijSOG[5][2] = 0.0;
KijSOG[5][3] = 0.0;
KijSOG[5][4] = 0.0;
KijSOG[5][5] = -Ni * l / 30.0; // + original
KjiSOG[0][0] = 0.0;
KjiSOG[0][1] = 0.0;
KjiSOG[0][2] = 0.0;
KjiSOG[0][3] = 0.0;
KjiSOG[0][4] = 0.0;
KjiSOG[0][5] = 0.0;
KjiSOG[1][0] = 0.0;
KjiSOG[1][1] = Nj * 6.0 / (5.0 * l);
KjiSOG[1][2] = 0.0;
KjiSOG[1][3] = 0.0;
KjiSOG[1][4] = 0.0;
KjiSOG[1][5] = -Nj / 10.0;
KjiSOG[2][0] = 0.0;
KjiSOG[2][1] = 0.0;
KjiSOG[2][2] = -Nj * 6.0 / (5.0 * l);
KjiSOG[2][3] = 0.0;
KjiSOG[2][4] = -Nj / 10.0;
KjiSOG[2][5] = 0.0;
KjiSOG[3][0] = 0.0;
KjiSOG[3][1] = 0.0;
KjiSOG[3][2] = 0.0;
KjiSOG[3][3] = 0.0;
KjiSOG[3][4] = 0.0;
KjiSOG[3][5] = 0.0;
KjiSOG[4][0] = 0.0;
KjiSOG[4][1] = 0.0;
KjiSOG[4][2] = -Nj / 10.0; // - original
KjiSOG[4][3] = 0.0;
KjiSOG[4][4] = Nj * l / 30.0; // - original
KjiSOG[4][5] = 0.0;
KjiSOG[5][0] = 0.0;
KjiSOG[5][1] = -Nj / 10.0; // - original
KjiSOG[5][2] = 0.0;
KjiSOG[5][3] = 0.0;
KjiSOG[5][4] = 0.0;
KjiSOG[5][5] = -Nj * l / 30.0; // + origianl
KjjSOG[0][0] = 0.0;
KjjSOG[0][1] = 0.0;
KjjSOG[0][2] = 0.0;
KjjSOG[0][3] = 0.0;
KjjSOG[0][4] = 0.0;
KjjSOG[0][5] = 0.0;
KjjSOG[1][0] = 0.0;
KjjSOG[1][1] = Nj * 6.0 / (5.0 * l);
KjjSOG[1][2] = 0.0;
KjjSOG[1][3] = 0.0;
KjjSOG[1][4] = 0.0;
KjjSOG[1][5] = -Nj / 10.0;
KjjSOG[2][0] = 0.0;
KjjSOG[2][1] = 0.0;
KjjSOG[2][2] = Nj * 6.0 / (5.0 * l);
KjjSOG[2][3] = 0.0;
KjjSOG[2][4] = Nj / 10.0;
KjjSOG[2][5] = 0.0;
KjjSOG[3][0] = 0.0;
KjjSOG[3][1] = 0.0;
KjjSOG[3][2] = 0.0;
KjjSOG[3][3] = 0.0;
KjjSOG[3][4] = 0.0;
KjjSOG[3][5] = 0.0;
KjjSOG[4][0] = 0.0;
KjjSOG[4][1] = 0.0;
KjjSOG[4][2] = Nj / 10.0;
KjjSOG[4][3] = 0.0;
KjjSOG[4][4] = Nj * 2.0 * l / 15.0;
KjjSOG[4][5] = 0.0;
KjjSOG[5][0] = 0.0;
KjjSOG[5][1] = -Nj / 10.0;
KjjSOG[5][2] = 0.0;
KjjSOG[5][3] = 0.0;
KjjSOG[5][4] = 0.0;
KjjSOG[5][5] = Nj * 2.0 * l / 15.0;
// PrintArchTxtMKLB(e);
}
public void EBEsMatRot3DLaG(int e) throws JMetalException {
// matriz de rotación 3D de desplazamientos de ejes Locales a Generales
int i, j;
// cosenos directores de x local respecto al sistema global
double
lx; // cosenos directo respecto del eje local x (coincidente con el eje de la barra) y el
// eje X Global
double
mx; // cosenos directo respecto del eje local x (coincidente con el eje de la barra) y el
// eje Y Global
double
nx; // cosenos directo respecto del eje local x (coincidente con el eje de la barra) y el
// eje Z Global
// cosenos directores de y local respecto al sistema global
double D;
double
ly; // cosenos directo respecto del eje local y (coincidente con el eje de la barra) y el
// eje X Global
double
my; // Single 'cosenos directo respecto del eje local y (coincidente con el eje de la barra)
// y el eje Y Global
double
ny; // cosenos directo respecto del eje local y (coincidente con el eje de la barra) y el
// eje Z Global
// cosenos directores de z local respecto al sistema global
double
lz; // cosenos directo respecto del eje local z (coincidente con el eje de la barra) y el
// eje X Global
double
mz; // cosenos directo respecto del eje local z (coincidente con el eje de la barra) y el
// eje Y Global
double
nz; // cosenos directo respecto del eje local z (coincidente con el eje de la barra) y el
// eje Z Global
int sgn;
// cosenos directores de x local respecto al sistema global XYZ
int ni = (int) Element_[e][i_];
int nj = (int) Element_[e][j_];
lx = (Node_[ni][aX_] - Node_[nj][aX_]) / Element_[e][L_];
mx = (Node_[ni][aY_] - Node_[nj][aY_]) / Element_[e][L_];
nx = (Node_[ni][aZ_] - Node_[nj][aZ_]) / Element_[e][L_];
D = Math.sqrt(Math.pow(lx, 2.0) + Math.pow(mx, 2.0));
if (lx == 0 && mx == 0) {
// indeterminación por ser la barra con eje local x // al eje global z
// cosenos directores de x local respecto al sistema global XYZ
sgn = (int) Math.signum(Node_[ni][aZ_] - Node_[nj][aZ_]);
lx = 0;
mx = 0;
nx = sgn;
// cosenos directores de y local respecto al sistema global XYZ
ly = 0;
my = sgn;
ny = 0;
// cosenos directores de z local respecto al sistema global XYZ
lz = -1;
mz = 0;
nz = 0;
D = 0;
} else {
// la barra con eje local x no es // al eje global z
// cosenos directores de y local respecto al sistema global XYZ
ly = -mx / D;
my = lx / D;
ny = 0;
// cosenos directores de z local respecto al sistema global XYZ
lz = -lx * nx / D;
mz = -mx * nx / D;
nz = D;
}
// matriz de rotación de desplazamientos locales a ejes globales XYZ si los ejes principales
// de la
// sección "yp,zp" coinciden con los ejes locales "y,z" de la barra
// para el nudo i de la barra ij
Rij[0][0] = lx;
Rij[0][1] = mx;
Rij[0][2] = nx;
Rij[1][0] = ly;
Rij[1][1] = my;
Rij[1][2] = ny;
Rij[2][0] = lz;
Rij[2][1] = mz;
Rij[2][2] = nz;
for (i = 0; i < 3; i++) {
for (j = 3; j < 6; j++) {
Rij[i][j] = 0.0;
} // next j
} // next i
for (i = 3; i < 6; i++) {
for (j = 0; j < 3; j++) {
Rij[i][j] = 0.0;
} // next j
} // next i
Rij[3][3] = lx;
Rij[3][4] = mx;
Rij[3][5] = nx;
Rij[4][3] = ly;
Rij[4][4] = my;
Rij[4][5] = ny;
Rij[5][3] = lz;
Rij[5][4] = mz;
Rij[5][5] = nz;
// trasponer la matriz de rotación
RTij = EBEsMatrizTraspuesta(Rij);
// matriz de rotación de desplazamientos locales a ejes globales XYZ si los ejes principales
// de la
// sección "yp,zp" coinciden con los ejes locales "y,z" de la barra
// para el nudo j de la barra ij
Rji[0][0] = -lx;
Rji[0][1] = -mx;
Rji[0][2] = -nx;
Rji[1][0] = -ly;
Rji[1][1] = -my;
Rji[1][2] = ny;
Rji[2][0] = lz;
Rji[2][1] = mz;
Rji[2][2] = nz;
for (i = 0; i < 3; i++) {
for (j = 3; j < 6; j++) {
Rji[i][j] = 0.0;
} // next j
} // next i
for (i = 3; i < 6; i++) {
for (j = 0; j < 3; j++) {
Rji[i][j] = 0.0;
} // next j
} // next i
Rji[3][3] = -lx;
Rji[3][4] = -mx;
Rji[3][5] = -nx;
Rji[4][3] = -ly;
Rji[4][4] = -my;
Rji[4][5] = ny;
Rji[5][3] = lz;
Rji[5][4] = mz;
Rji[5][5] = nz;
// trasponer la matriz de rotación
RTji = EBEsMatrizTraspuesta(Rji);
}
public void EBEsMat3DGij() throws JMetalException {
// CONSTRUYE LA MATRIZ DE RIGIDEZ DE UNA BARRA EN COORDENADAS GLOBALES
double[][] r = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] s = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] t = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
// para el extremo ii
// KGii = RTij * RpTij * KjjSOGSOGSOG * Rpij * Rij
r = EBEsMatrizMultiplicar(Rpij, Rij);
s = EBEsMatrizMultiplicar(Kii, r);
t = EBEsMatrizMultiplicar(RpTij, s);
KGii = EBEsMatrizMultiplicar(RTij, t);
// para el extremo ij
// KGij = RTij * RpTij * Kij * Rpji * Rji
r = EBEsMatrizMultiplicar(Rpji, Rji);
s = EBEsMatrizMultiplicar(Kij, r);
t = EBEsMatrizMultiplicar(RpTij, s);
KGij = EBEsMatrizMultiplicar(RTij, t);
// para el extremo ji
// KGji = RTji * RpTji * Kji * Rpij * Rij
r = EBEsMatrizMultiplicar(Rpij, Rij);
s = EBEsMatrizMultiplicar(Kji, r);
t = EBEsMatrizMultiplicar(RpTji, s);
KGji = EBEsMatrizMultiplicar(RTji, t);
// para el extremo jj
// KGjj = RTji * RpTji * Kjj* Rpji * Rji
r = EBEsMatrizMultiplicar(Rpji, Rji);
s = EBEsMatrizMultiplicar(Kjj, r);
t = EBEsMatrizMultiplicar(RpTji, s);
KGjj = EBEsMatrizMultiplicar(RTji, t);
} // end module
public void EBEsMat3DG(int e) throws JMetalException {
// ELEMENTO DE BARRA 3D QUE FORMA LA MATRIZ DE RIGIDEZ EN COORDENADAS GLOBALES
// i: rÃÂgido
// j: rÃÂgido
int ni, nj;
int p;
int p1;
int p2;
int p3;
int p4;
int p5;
int p6;
int r;
int r1;
int r2;
int r3;
int r4;
int r5;
int r6;
// ELEMENTOS DE LA MATRIZ QUE CORRESPONDEN AL ELEMENTO i
ni = (int) Element_[e][i_];
nj = (int) Element_[e][j_];
p = numberOfLibertyDegree_ * ni;
r = numberOfLibertyDegree_ * nj;
p1 = matrixWidthBand_ * p;
p2 = matrixWidthBand_ * (p + 1);
p3 = matrixWidthBand_ * (p + 2);
p4 = matrixWidthBand_ * (p + 3);
p5 = matrixWidthBand_ * (p + 4);
p6 = matrixWidthBand_ * (p + 5);
// ELEMENTOS DE LA MATRIZ QUE QUE CORRESPONDEN AL ELEMENTO j
r1 = matrixWidthBand_ * r;
r2 = matrixWidthBand_ * (r + 1);
r3 = matrixWidthBand_ * (r + 2);
r4 = matrixWidthBand_ * (r + 3);
r5 = matrixWidthBand_ * (r + 4);
r6 = matrixWidthBand_ * (r + 5);
// ELEMENTOS DE LA MATRIZ QUE CORRESPONDEN AL EXTREMO j
// 0° fila
MatrixStiffness_[p1] = MatrixStiffness_[p1] + KGii[0][0]; // 0
MatrixStiffness_[p1 + 1] = MatrixStiffness_[p1 + 1] + KGii[0][1]; // 1
MatrixStiffness_[p1 + 2] = MatrixStiffness_[p1 + 2] + KGii[0][2]; // 2
MatrixStiffness_[p1 + 3] = MatrixStiffness_[p1 + 3] + KGii[0][3]; // 3
MatrixStiffness_[p1 + 4] = MatrixStiffness_[p1 + 4] + KGii[0][4]; // 4
MatrixStiffness_[p1 + 5] = MatrixStiffness_[p1 + 5] + KGii[0][5]; // 5
MatrixStiffness_[p1 + r - p] = KGij[0][0]; // 6
MatrixStiffness_[p1 + 1 + r - p] = KGij[0][1]; // 7
MatrixStiffness_[p1 + 2 + r - p] = KGij[0][2]; // 8
MatrixStiffness_[p1 + 3 + r - p] = KGij[0][3]; // 9
MatrixStiffness_[p1 + 4 + r - p] = KGij[0][4]; // 10
MatrixStiffness_[p1 + 5 + r - p] = KGij[0][5]; // 11
// 1° fila
MatrixStiffness_[p2] = MatrixStiffness_[p2] + KGii[1][1]; // 12
MatrixStiffness_[p2 + 1] = MatrixStiffness_[p2 + 1] + KGii[1][2]; // 13
MatrixStiffness_[p2 + 2] = MatrixStiffness_[p2 + 2] + KGii[1][3]; // 14
MatrixStiffness_[p2 + 3] = MatrixStiffness_[p2 + 3] + KGii[1][4]; // 15
MatrixStiffness_[p2 + 4] = MatrixStiffness_[p2 + 4] + KGii[1][5]; // 16
MatrixStiffness_[p2 + r - p - 1] = KGij[1][0]; // 17
MatrixStiffness_[p2 + r - p] = KGij[1][1]; // 18
MatrixStiffness_[p2 + r - p + 1] = KGij[1][2]; // 19
MatrixStiffness_[p2 + r - p + 2] = KGij[1][3]; // 20
MatrixStiffness_[p2 + r - p + 3] = KGij[1][4]; // 21
MatrixStiffness_[p2 + r - p + 4] = KGij[1][5]; // 22
// 2° fila
MatrixStiffness_[p3] = MatrixStiffness_[p3] + KGii[2][2]; // 24
MatrixStiffness_[p3 + 1] = MatrixStiffness_[p3 + 1] + KGii[2][3]; // 25
MatrixStiffness_[p3 + 2] = MatrixStiffness_[p3 + 2] + KGii[2][4]; // 26
MatrixStiffness_[p3 + 3] = MatrixStiffness_[p3 + 3] + KGii[2][5]; // 27
MatrixStiffness_[p3 + r - p - 2] = KGij[2][0]; // 28
MatrixStiffness_[p3 + r - p - 1] = KGij[2][1]; // 29
MatrixStiffness_[p3 + r - p] = KGij[2][2]; // 30
MatrixStiffness_[p3 + r - p + 1] = KGij[2][3]; // 31
MatrixStiffness_[p3 + r - p + 2] = KGij[2][4]; // 32
MatrixStiffness_[p3 + r - p + 3] = KGij[2][5]; // 33
// 3° fila
MatrixStiffness_[p4] = MatrixStiffness_[p4] + KGii[3][3]; // 36
MatrixStiffness_[p4 + 1] = MatrixStiffness_[p4 + 1] + KGii[3][4]; // 37
MatrixStiffness_[p4 + 2] = MatrixStiffness_[p4 + 2] + KGii[3][5]; // 38
MatrixStiffness_[p4 + r - p - 3] = KGij[3][0]; // 39
MatrixStiffness_[p4 + r - p - 2] = KGij[3][1]; // 40
MatrixStiffness_[p4 + r - p - 1] = KGij[3][2]; // 41
MatrixStiffness_[p4 + r - p] = KGij[3][3]; // 42
MatrixStiffness_[p4 + r - p + 1] = KGij[3][4]; // 43
MatrixStiffness_[p4 + r - p + 2] = KGij[3][5]; // 44
// 4° fila
MatrixStiffness_[p5] = MatrixStiffness_[p5] + KGii[4][4]; // 48
MatrixStiffness_[p5 + 1] = MatrixStiffness_[p5 + 1] + KGii[4][5]; // 49
MatrixStiffness_[p5 + r - p - 4] = KGij[4][0]; // 50
MatrixStiffness_[p5 + r - p - 3] = KGij[4][1]; // 51
MatrixStiffness_[p5 + r - p - 2] = KGij[4][2]; // 52
MatrixStiffness_[p5 + r - p - 1] = KGij[4][3]; // 53
MatrixStiffness_[p5 + r - p] = KGij[4][4]; // 54
MatrixStiffness_[p5 + r - p + 1] = KGij[4][5]; // 55
// 5° fila
MatrixStiffness_[p6] = MatrixStiffness_[p6] + KGii[5][5]; // 60
MatrixStiffness_[p6 + r - p - 5] = KGij[5][0]; // 61
MatrixStiffness_[p6 + r - p - 4] = KGij[5][1]; // 62
MatrixStiffness_[p6 + r - p - 3] = KGij[5][2]; // 63
MatrixStiffness_[p6 + r - p - 2] = KGij[5][3]; // 64
MatrixStiffness_[p6 + r - p - 1] = KGij[5][4]; // 65
MatrixStiffness_[p6 + r - p] = KGij[5][5]; // 66
// ELEMENTOS DE LA MATRIZ QUE CORRESPONDEN AL EXTREMO i
// 6° fila
MatrixStiffness_[r1] = MatrixStiffness_[r1] + KGjj[0][0]; // 72
MatrixStiffness_[r1 + 1] = MatrixStiffness_[r1 + 1] + KGjj[0][1]; // 73
MatrixStiffness_[r1 + 2] = MatrixStiffness_[r1 + 2] + KGjj[0][2]; // 74
MatrixStiffness_[r1 + 3] = MatrixStiffness_[r1 + 3] + KGjj[0][3]; // 75
MatrixStiffness_[r1 + 4] = MatrixStiffness_[r1 + 4] + KGjj[0][4]; // 76
MatrixStiffness_[r1 + 5] = MatrixStiffness_[r1 + 5] + KGjj[0][5]; // 77
// 7° fila
MatrixStiffness_[r2] = MatrixStiffness_[r2] + KGjj[1][1]; // 84
MatrixStiffness_[r2 + 1] = MatrixStiffness_[r2 + 1] + KGjj[1][2]; // 85
MatrixStiffness_[r2 + 2] = MatrixStiffness_[r2 + 2] + KGjj[1][3]; // 86
MatrixStiffness_[r2 + 3] = MatrixStiffness_[r2 + 3] + KGjj[1][4]; // 87
MatrixStiffness_[r2 + 4] = MatrixStiffness_[r2 + 4] + KGjj[1][5]; // 88
// 8° fila
MatrixStiffness_[r3] = MatrixStiffness_[r3] + KGjj[2][2]; // 96
MatrixStiffness_[r3 + 1] = MatrixStiffness_[r3 + 1] + KGjj[2][3]; // 97
MatrixStiffness_[r3 + 2] = MatrixStiffness_[r3 + 2] + KGjj[2][4]; // 98
MatrixStiffness_[r3 + 3] = MatrixStiffness_[r3 + 3] + KGjj[2][5]; // 99
// 9° fila
MatrixStiffness_[r4] = MatrixStiffness_[r4] + KGjj[3][3]; // 108
MatrixStiffness_[r4 + 1] = MatrixStiffness_[r4 + 1] + KGjj[3][4]; // 109
MatrixStiffness_[r4 + 2] = MatrixStiffness_[r4 + 2] + KGjj[3][5]; // 110
// 10° fila
MatrixStiffness_[r5] = MatrixStiffness_[r5] + KGjj[4][4]; // 120
MatrixStiffness_[r5 + 1] = MatrixStiffness_[r5 + 1] + KGjj[4][5]; // 121
// 11° fila
MatrixStiffness_[r6] = MatrixStiffness_[r6] + KGjj[5][5]; // 132
}
public double[] EBEsMatrizVectorMultiplicar(double[][] s, double[] t) throws JMetalException {
int f, c;
double[] r = new double[t.length];
for (f = 0; f < s.length; f++) { // cantidad de elementos de la 1ra dimensión
r[f] = 0;
for (c = 0; c < t.length; c++) { // cantidad de elementos de la 2ra dimensión
r[f] = r[f] + s[f][c] * t[c];
} // next c
} // next f
return r;
} // end module
public double[][] EBEsMatrizMultiplicar(double[][] s, double[][] t) throws JMetalException {
int f, c, q;
double[][] r = new double[s.length][t[0].length];
for (f = 0; f < s.length; f++) { // cantidad de elementos de la 1ra dimensión
for (c = 0; c < s[f].length; c++) { // cantidad de elementos de la 2ra dimensión
r[f][c] = 0;
for (q = 0; q < s[f].length; q++) { // cantidad de elementos de la 2ra dimensión
r[f][c] = r[f][c] + s[f][q] * t[q][c];
} // Next q
} // Next c
} // Next f
return r;
} // end module
public double[][] EBEsMatrixAdd(double[][] s, double[][] t) throws JMetalException {
double[][] r = new double[s.length][t[0].length];
for (int f = 0; f < s.length; f++) { // cantidad de elementos de la 1ra dimensión
for (int c = 0; c < t.length; c++) { // cantidad de elementos de la 2ra dimensión
r[f][c] = s[f][c] + t[f][c];
} // Next c
} // Next f
return r;
} // end module
public double[][] EBEsMatrixSubtractions(double[][] s, double[][] t) throws JMetalException {
double[][] r = new double[s.length][t[0].length];
for (int f = 0; f < s.length; f++) { // cantidad de elementos de la 1ra dimensión
for (int c = 0; c < t.length; c++) { // cantidad de elementos de la 2ra dimensión
r[f][c] = s[f][c] - t[f][c];
} // Next c
} // Next f
return r;
} // end module
public void EBEsNodesEquilibrium3D(int hi) throws JMetalException {
for (int ba = 0; ba < Element_.length; ba++) {
double[][] ri = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] rj = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[] ei = new double[numberOfLibertyDegree_];
double[] ej = new double[numberOfLibertyDegree_];
double[] egi = new double[numberOfLibertyDegree_];
double[] egj = new double[numberOfLibertyDegree_];
// carga en barra
// i inode
ei[aX_] = Efforti_[aX_][ba][hi];
ei[aY_] = Efforti_[aY_][ba][hi];
ei[aZ_] = Efforti_[aZ_][ba][hi];
ei[gX_] = Efforti_[gX_][ba][hi];
ei[gY_] = Efforti_[gY_][ba][hi];
ei[gZ_] = Efforti_[gZ_][ba][hi];
// j node
ej[aX_] = Effortj_[aX_][ba][hi];
ej[aY_] = Effortj_[aY_][ba][hi];
ej[aZ_] = Effortj_[aZ_][ba][hi];
ej[gX_] = Effortj_[gX_][ba][hi];
ej[gY_] = Effortj_[gY_][ba][hi];
ej[gZ_] = Effortj_[gZ_][ba][hi];
// Proyección de los esfuerzos de barras sobre los ejes generales
// en el nudo i
ri = EBEsMatrizMultiplicar(RTij, RpTij);
egi = EBEsMatrizVectorMultiplicar(ri, ei);
// Proyección de los esfuerzos de barras sobre los ejes generales
// en el nudo j
rj = EBEsMatrizMultiplicar(RTji, RpTji);
egj = EBEsMatrizVectorMultiplicar(rj, ej);
// sumatoria de esfuerzos concurrentes al nudo
int ni = (int) Element_[ba][i_];
Reaction_[numberOfLibertyDegree_ * ni + aX_][hi] += egi[aX_];
Reaction_[numberOfLibertyDegree_ * ni + aY_][hi] += egi[aY_];
Reaction_[numberOfLibertyDegree_ * ni + aZ_][hi] += egi[aZ_];
Reaction_[numberOfLibertyDegree_ * ni + gX_][hi] += egi[gX_];
Reaction_[numberOfLibertyDegree_ * ni + gY_][hi] += egi[gY_];
Reaction_[numberOfLibertyDegree_ * ni + gZ_][hi] += egi[gZ_];
// sumatoria de esfuerzos concurrentes al nudo
int nj = (int) Element_[ba][j_];
Reaction_[numberOfLibertyDegree_ * nj + aX_][hi] += egj[aX_];
Reaction_[numberOfLibertyDegree_ * nj + aY_][hi] += egj[aY_];
Reaction_[numberOfLibertyDegree_ * nj + aZ_][hi] += egj[aZ_];
Reaction_[numberOfLibertyDegree_ * nj + gX_][hi] += egj[gX_];
Reaction_[numberOfLibertyDegree_ * nj + gY_][hi] += egj[gY_];
Reaction_[numberOfLibertyDegree_ * nj + gZ_][hi] += egj[gZ_];
} // ba
}
public void EBEsEffortsElements3D(int hi, int countIter, double Slip[][]) throws JMetalException {
// ESFUERZOS EN EXTREMOS DE BARRA 3D EN COORDENADAS LOCALES
// i: rÃÂgido
// j: rÃÂgido
int i, ni, nj;
for (int ba = 0; ba < numberOfElements_; ba++) {
switch ((int) Element_[ba][Vij_]) {
case 00:
EBEsMat3DL_iRig_jRig(ba);
break;
case 01:
EBEsMat3DL_iRig_jArt(ba);
break;
case 10:
EBEsMat3DL_iArt_jRig(ba);
break;
case 11:
EBEsMat3DL_iArt_jArt(ba);
break;
default:
System.out.println("invalid link");
return;
}
if (lSecondOrderGeometric && countIter == 1) {
EBEsMat3DL_SOG(ba);
Kii = EBEsMatrixAdd(Kii, KiiSOG);
Kij = EBEsMatrixAdd(Kij, KijSOG);
Kji = EBEsMatrixAdd(Kji, KjiSOG);
Kjj = EBEsMatrixAdd(Kjj, KjjSOG);
}
double[][] r = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[][] s = new double[numberOfLibertyDegree_][numberOfLibertyDegree_];
double[] di = new double[numberOfLibertyDegree_];
double[] dj = new double[numberOfLibertyDegree_];
double[] eii = new double[numberOfLibertyDegree_];
double[] eij = new double[numberOfLibertyDegree_];
double[] eji = new double[numberOfLibertyDegree_];
double[] ejj = new double[numberOfLibertyDegree_];
// matriz de rotación de la barra del sistema principal al local
EBEsMatRot3DLpSaL(ba);
// matriz de rotación de la barra del local al global
EBEsMatRot3DLaG(ba);
// desplazamientos calculados
for (i = 0; i < numberOfLibertyDegree_; i++) {
ni = (int) Element_[ba][i_];
nj = (int) Element_[ba][j_];
di[i] = Slip[numberOfLibertyDegree_ * ni + i][hi];
dj[i] = Slip[numberOfLibertyDegree_ * nj + i][hi];
} // Next i
// para el extremo ii
// eii = (Kii * Rpij * Rij) * Di
r = EBEsMatrizMultiplicar(Rpij, Rij);
s = EBEsMatrizMultiplicar(Kii, r);
eii = EBEsMatrizVectorMultiplicar(s, di);
// para el extremo ij
// eij = (Kij * Rpji * Rji) * Dj
r = EBEsMatrizMultiplicar(Rpji, Rji);
s = EBEsMatrizMultiplicar(Kij, r);
eij = EBEsMatrizVectorMultiplicar(s, dj);
// para el extremo ji
// eji =(Kji * Rpij * Rij) * Di
r = EBEsMatrizMultiplicar(Rpij, Rij);
s = EBEsMatrizMultiplicar(Kji, r);
eji = EBEsMatrizVectorMultiplicar(s, di);
// para el extremo jj
// ejj= (Kjj * Rpji * Rji) * Dj
r = EBEsMatrizMultiplicar(Rpji, Rji);
s = EBEsMatrizMultiplicar(Kjj, r);
ejj = EBEsMatrizVectorMultiplicar(s, dj);
for (i = 0; i < numberOfLibertyDegree_; i++) {
Efforti_[i][ba][hi] = eii[i] + eij[i];
Effortj_[i][ba][hi] = eji[i] + ejj[i];
} // i
} // ba
} // end module
public void EBEsReactions3D(int hi) {
for (int i = 0; i < numberOfNodesRestricts_; i++) {
int no = (int) NodeRestrict_[i][0];
// trasforma el número en código texto caracterizando las coacciones;
String strCxyz = String.valueOf((int) NodeRestrict_[i][1]);
// if(strCxyz != "000000"){
String str = "";
for (int j = numberOfLibertyDegree_; j > strCxyz.length(); j--) {
str += "0";
}
strCxyz = str + strCxyz;
// penalización de la matriz de rigidez
char w0 = strCxyz.charAt(aX_); // sentido en X
if (w0 == '1') {
// fuerza aplicada en nudo en X
Reaction_[numberOfLibertyDegree_ * no + aX_][hi] +=
-PQ[numberOfLibertyDegree_ * no + aX_][hi];
}
w0 = strCxyz.charAt(aY_); // sentido en Y
if (w0 == '1') {
// fuerza aplicada en nudo en Y
Reaction_[numberOfLibertyDegree_ * no + aY_][hi] +=
-PQ[numberOfLibertyDegree_ * no + aY_][hi];
}
w0 = strCxyz.charAt(aZ_); // sentido en Z
if (w0 == '1') {
// fuerza aplicada en nudo en Z
Reaction_[numberOfLibertyDegree_ * no + aZ_][hi] +=
-PQ[numberOfLibertyDegree_ * no + aZ_][hi];
}
w0 = strCxyz.charAt(gX_);
if (w0 == '1') {
// flexor moment en nudo en X
Reaction_[numberOfLibertyDegree_ * no + gX_][hi] +=
-PQ[numberOfLibertyDegree_ * no + gX_][hi];
}
w0 = strCxyz.charAt(gY_); // rotación alrededor del eje Y
if (w0 == '1') {
// flexor moment en nudo en Y
Reaction_[numberOfLibertyDegree_ * no + gY_][hi] +=
-PQ[numberOfLibertyDegree_ * no + gY_][hi];
}
w0 = strCxyz.charAt(gZ_); // rotación alrededor del eje Z
if (w0 == '1') {
// flexor moment en nudo en Z
Reaction_[numberOfLibertyDegree_ * no + gZ_][hi] +=
-PQ[numberOfLibertyDegree_ * no + gZ_][hi];
}
// }
} // nex i
}
public double[][][] EBEsStrainNode(double[][][] E) throws JMetalException {
// [0]: Tensión normal de compresión
// [1]: Tensión normal de tracción
// [2]: Tensión tangencial
double[][][] Strain = new double[3][numberOfElements_][numberOfWeigthHypothesis_];
double z, y;
// double ez, ey;
double A, Iz, Iy, It;
// Effort
double Nxx, Qxy, Qxz, Mxx, Mxy, Mxz;
// Streins
double Sxx, Sxzu, Sxzd, TQxy, TQxz, TTx, TTxy, TTxz;
double Sxyl, Sxyr;
double y1, z1, S1, S2, S3, S4;
double Ay, Az;
omegaMax_ = new double[numberOfGroupElements_][numberOfWeigthHypothesis_];
NxxMin_ = new double[numberOfGroupElements_][numberOfWeigthHypothesis_];
NxxMax_ = new double[numberOfGroupElements_][numberOfWeigthHypothesis_];
MxzMin_ = new double[numberOfGroupElements_][numberOfWeigthHypothesis_];
MxzMax_ = new double[numberOfGroupElements_][numberOfWeigthHypothesis_];
MxyMin_ = new double[numberOfGroupElements_][numberOfWeigthHypothesis_];
MxyMax_ = new double[numberOfGroupElements_][numberOfWeigthHypothesis_];
StrainNxxMin_ = new double[numberOfGroupElements_][numberOfWeigthHypothesis_];
StrainNxxMax_ = new double[numberOfGroupElements_][numberOfWeigthHypothesis_];
StrainMxzMin_ = new double[numberOfGroupElements_][numberOfWeigthHypothesis_];
StrainMxzMax_ = new double[numberOfGroupElements_][numberOfWeigthHypothesis_];
StrainMxyMin_ = new double[numberOfGroupElements_][numberOfWeigthHypothesis_];
StrainMxyMax_ = new double[numberOfGroupElements_][numberOfWeigthHypothesis_];
// ver Cálculo de estructuras VIII.35
for (int hi = 0; hi < numberOfWeigthHypothesis_; hi++) {
for (int ba = 0; ba < numberOfElements_; ba++) {
// index gropus
int idx = (int) Element_[ba][INDEX_];
y = Groups_[idx][Y_];
z = Groups_[idx][Z_];
// ey = Groups_[idx][eY_];
// ez = Groups_[idx][eZ_];
A = Groups_[idx][AREA];
Az = Groups_[idx][Az_];
Ay = Groups_[idx][Ay_];
Iz = Groups_[idx][Iz_];
Iy = Groups_[idx][Iy_];
It = Groups_[idx][It_];
// ESFUERZOS en MN (Mega Newton = 1 kN / 1000)
// esfuerzos axial
Nxx = E[aX_][ba][hi];
// esfuerzo de corte en y
Qxy = E[aY_][ba][hi];
// esfuerzo de corte en z
Qxz = E[aZ_][ba][hi];
// momento torsor en x
Mxx = E[gX_][ba][hi];
// momento flexor en y
Mxy = E[gY_][ba][hi];
// momento flexor en z
Mxz = E[gZ_][ba][hi];
// TENSIONES NORMALES en MPa (Mega Pascal = 1.000.000 Pa = 1.000 kN/m2, [1.0 Pa = 1.0 N/mm2]
// )
// Tensión normal en x debido a esf. axial x
// Buckling coeficient omega
// coeficiente de mayoración de Pandeo
double omega = BucklingOmega(Nxx, Groups_[idx], Element_[ba]);
Sxx = omega * Nxx / A;
omegaMax_[idx][hi] = Math.max(omega, omegaMax_[idx][hi]);
if (Math.signum(Sxx) > 0) {
NxxMax_[idx][hi] = Math.max(E[aX_][ba][hi], Nxx);
StrainNxxMax_[idx][hi] = Math.max(Sxx, StrainNxxMax_[idx][hi]);
} else {
NxxMin_[idx][hi] = Math.min(E[aX_][ba][hi], Nxx);
StrainNxxMin_[idx][hi] = Math.min(Sxx, StrainNxxMin_[idx][hi]);
}
if (Element_[ba][Vij_] != 11) {
// tensiones de flexión alrededor del eje z
// fibra superior e inferior
y1 = Groups_[idx][uY_];
Sxzu = Mxz * y1 / Iz;
Sxzd = -Sxzu;
// tensiones normales de flexión alrededor del eje y
// fibra izquierda y derecha
z1 = Groups_[idx][lZ_];
Sxyl = Mxy * z1 / Iy;
Sxyr = -Sxyl;
// con momentos (-)
S3 = Sxzu;
StrainMxzMin_[idx][hi] = Math.min(S3, StrainMxzMin_[idx][hi]);
if (S3 > Sxzd) {
S3 = Sxzd;
StrainMxzMin_[idx][hi] = Math.min(S3, StrainMxzMin_[idx][hi]);
MxzMin_[idx][hi] = Math.min(-Mxz, MxzMin_[idx][hi]);
}
S4 = Sxyl;
StrainMxyMin_[idx][hi] = Math.min(S4, StrainMxyMin_[idx][hi]);
if (S4 > Sxyr) {
S4 = Sxyr;
StrainMxyMin_[idx][hi] = Math.min(S4, StrainMxyMin_[idx][hi]);
MxyMin_[idx][hi] = Math.min(-Mxy, MxyMin_[idx][hi]);
}
// tensiones negativas
Strain[STRAIN_COMPRESS][ba][hi] = Sxx + S3 + S4;
// con momentos +
S1 = Sxzu;
StrainMxzMax_[idx][hi] = Math.max(S1, StrainMxzMax_[idx][hi]);
if (S1 < Sxzd) {
S1 = Sxzd;
StrainMxzMax_[idx][hi] = Math.max(S1, StrainMxzMax_[idx][hi]);
MxzMax_[idx][hi] = Math.max(Mxz, MxzMax_[idx][hi]);
}
S2 = Sxyl;
StrainMxyMax_[idx][hi] = Math.max(S2, StrainMxyMax_[idx][hi]);
if (S2 < Sxyr) {
S2 = Sxyr;
StrainMxyMax_[idx][hi] = Math.max(S2, StrainMxyMax_[idx][hi]);
MxyMax_[idx][hi] = Math.max(Mxy, MxyMax_[idx][hi]);
}
// tensiones positivas
Strain[STRAIN_TRACTION][ba][hi] = Sxx + S1 + S2;
// momento estático respecto a z
// espesores de las paredes
// se desprecia el cordón superior, revisar
// momento estático respecto a y
// espesores de las paredes
// se desprecia el cordón lateral
// TENSIONES TANGENCIALES
// tensión de corte en y
TQxy = Qxy * Az / (z * Iz);
// CORTE TRANSVERSAL eje Z
// Tensión de corte en z
TQxz = Qxz * Ay / (y * Iy);
// tensión de torsión
if (z / y >= 1.0) {
TTxz = Math.abs(Mxx) * y / It;
TTxy = 0.9 * TTxz;
} else {
TTxy = Math.abs(Mxx) * z / It;
TTxz = 0.9 * TTxy;
}
TTx = TTxz;
if (TTx < TTxy) {
TTx = TTxy;
}
// tensiones tangenciales totales
Strain[STRAIN_CUT][ba][hi] = Math.abs(TQxz) + Math.abs(TQxy) + TTx;
} else {
Strain[STRAIN_CUT][ba][hi] = 0.0;
if (E[aX_][ba][hi] < 0.0) {
Strain[STRAIN_COMPRESS][ba][hi] = Sxx;
} else if (E[aX_][ba][hi] > 0.0) {
Strain[STRAIN_TRACTION][ba][hi] = Sxx;
} else {
Strain[STRAIN_COMPRESS][ba][hi] = 0.0;
Strain[STRAIN_TRACTION][ba][hi] = 0.0;
}
StrainNxxMin_[idx][hi] =
Math.max(Strain[STRAIN_COMPRESS][ba][hi], StrainNxxMin_[idx][hi]);
StrainNxxMax_[idx][hi] =
Math.max(Strain[STRAIN_TRACTION][ba][hi], StrainNxxMax_[idx][hi]);
}
} // next ba
} // next hi
return Strain;
}
public double BucklingOmega(double Nxx, double[] G, double[] B) throws JMetalException {
double w = 1.0; // coeficiente de Pandeo
if (Nxx < 0.0 && G[AREA] > 0.0 && lBuckling) {
if (G[BLijY_] <= 0.0) G[BLijY_] = 1.0;
if (G[BLijZ_] <= 0.0) G[BLijZ_] = 1.0;
// radio de inercia respecto al eje y
double iy = G[Iy_] / G[AREA];
// radio de inercia respecto al eje z
double iz = G[Iz_] / G[AREA];
// esbeltez respecto al eje y
double lambdao = B[L_] * G[BLijY_] / iy;
// esbeltez maxima entre para los ejes y, z
lambdao = Math.min(lambdao, B[L_] * G[BLijZ_] / iz);
if (G[TypeMaterial_] == 0) {
// steel cable stress 125 MN/m2
// System.out.println("Error in " + G[INDEX_] + " group, the material number " +
// G[TypeMaterial_] + " is not implemented");
} else if (G[TypeMaterial_] == 1) // F-22 steel 22 MN/m2
{
System.out.println(
"Error in "
+ G[INDEX_]
+ " group, the material number "
+ G[TypeMaterial_]
+ " is not implemented");
} else if (G[TypeMaterial_] == 2) // F-24 steel 24 MN/m2
{
// Método Domke - Tabla Lamda0 - Lamda
// double lambda = -2.3443 + 1.07817 * lambdao - 0.0036181 * Math.pow(lambdao, 2) +
// 0.000008209862 * Math.pow(lambdao, 3) - 0.00000001058458 * Math.pow(lambdao, 4) +
// 0.000000000007147864 * Math.pow(lambdao, 5) - 0.00000000000000196410755 *
// Math.pow(lambdao, 6);
// coeficiente omega de mayoración por efecto de Pandeo
// double ratio = lambdao / lambda;
// w = Math.pow(ratio, 2.0);
// Buckling coefficient, Table: Lamda-Omega
double lambda = lambdao;
if (lambda <= 150) {
w =
1.113
+ 0.0070516 * lambda
- 0.000132108 * Math.pow(lambda, 2.0)
+ 0.000002106132 * Math.pow(lambda, 3.0)
- 0.00000000397368332151 * Math.pow(lambda, 4.0);
} else w = 25.0;
} else if (G[TypeMaterial_] == 3) { // F-32 steel 320 N/mm2
System.out.println(
"Error in "
+ G[INDEX_]
+ " group, the material number "
+ G[TypeMaterial_]
+ " is not implemented");
} else if (G[TypeMaterial_] == 10) { // reinforced concrete 7 MN/m2
System.out.println(
"Error in "
+ G[INDEX_]
+ " group, the material number "
+ G[TypeMaterial_]
+ " is not implemented");
} else if (G[TypeMaterial_] == 12) { // H-17 reinforced concrete 17 MN/m2
System.out.println(
"Error in "
+ G[INDEX_]
+ " group, the material number "
+ G[TypeMaterial_]
+ " is not implemented");
} else if (G[TypeMaterial_] == 14) { // H-21 reinforced concrete 21 MN/m2
// System.out.println("Error in " + G[INDEX_] + " group, the material number " +
// G[TypeMaterial_] + " is not implemented");
} else if (G[TypeMaterial_] == 20) // Wood hard an halt-hard
{
// Buckling coefficient, Table: Lamda-Omega
double lambda = lambdao;
if (lambda <= 150) {
w =
1.048
+ 0.005524 * lambda
- 0.000101666 * Math.pow(lambda, 2.0)
+ 0.00000301687 * Math.pow(lambda, 3.0)
- 0.000000004366246 * Math.pow(lambda, 4.0);
} else w = 25.0;
}
}
return w;
}
public void EBEsTransversalSectionCircular(int gr, double d) throws JMetalException {
// calculus of Estatic Momentum
// distancia Y en ejes locales principales
double r; // radius circle
double Am; // 1/2 area of circle
double y; // distance to the centroid of the circle
Groups_[gr][Y_] = d;
// distancia Z en ejes locales principales
Groups_[gr][Z_] = d;
// coeficint thickness of the axis Y -> Ay
Groups_[gr][eY_] = 0.0;
// coeficint thickness of the axis Z -> Az
Groups_[gr][eZ_] = 0.0;
r = d / 2.0;
Groups_[gr][uY_] = r;
Groups_[gr][dY_] = r;
Groups_[gr][lZ_] = r;
Groups_[gr][rZ_] = r;
y = 4.0 * r / (3.0 * Math.PI);
Am = Math.PI * Math.pow(r, 2.0) / 2.0;
// momento estático respecto de Z -> Az
Groups_[gr][Az_] = Am * Math.pow(y, 2.0);
// momento estático respecto de Y -> Ay
Groups_[gr][Ay_] = Groups_[gr][Az_];
// section
Groups_[gr][AREA] = Math.PI * Math.pow(d, 2.0) / 4.0;
// mementum inertia Iz
Groups_[gr][Iz_] = Math.PI * Math.pow(d, 4.0) / 64.0;
// mementum inertia Iy
Groups_[gr][Iy_] = Groups_[gr][Iz_];
// momentum inertia twisting It
Groups_[gr][It_] = Math.PI * Math.pow(d, 4.0) / 32.0;
Groups_[gr][Iw_] = Groups_[gr][It_];
}
public void EBEsTransversalSectionHoleCircular(int gr, double D, double e)
throws JMetalException {
// calculus of Estatic Momentum
double d, R, r, Y, y, Am, am;
// distancia Y en ejes locales principales
Groups_[gr][Y_] = D;
// distancia Z en ejes locales principales
Groups_[gr][Z_] = D;
// distancia Z en ejes locales principales
// coeficint thickness of the axis Y -> Ay
Groups_[gr][eY_] = e;
// coeficint thickness of the axis Z -> Az
Groups_[gr][eZ_] = e;
// distancias a las fibras mas alejadas
Groups_[gr][uY_] = D / 2.0;
Groups_[gr][dY_] = D / 2.0;
Groups_[gr][lZ_] = D / 2.0;
Groups_[gr][rZ_] = D / 2.0;
// diámetro interno
R = D / 2.0;
d = D - 2.0 * e;
r = d / 2.0;
Y = 4.0 * R / (3.0 * Math.PI);
y = 4.0 * r / (3.0 * Math.PI);
Am = Math.PI * Math.pow(R, 2.0) / 2.0;
am = Math.PI * Math.pow(r, 2.0) / 2.0;
// momento estático respecto de Z -> Az
Groups_[gr][Az_] = Am * Math.pow(Y, 2.0) - am * Math.pow(y, 2.0);
// momento estático respecto de Y -> Ay
Groups_[gr][Ay_] = Groups_[gr][Az_];
// section
Groups_[gr][AREA] = Math.PI / 4.0 * (Math.pow(D, 2.0) - Math.pow(d, 2.0));
// mementum inertia Iz
Groups_[gr][Iz_] = Math.PI / 64.0 * (Math.pow(D, 4.0) - Math.pow(d, 4.0));
// mementum inertia Iy
Groups_[gr][Iy_] = Groups_[gr][Iz_];
// momentum inertia polar or twisting It
Groups_[gr][It_] = Math.PI / 32.0 * (Math.pow(D, 4.0) - Math.pow(d, 4.0));
Groups_[gr][Iw_] = Groups_[gr][It_];
}
public void EBEsTransversalSectionRectangle(int gr, double y, double z) throws JMetalException {
// calculus of Estatic Momentum
double y1, z1;
// distancia Y en ejes locales principales
Groups_[gr][Y_] = y;
// distancia Z en ejes locales principales
Groups_[gr][Z_] = z;
// coeficint thickness of the axis Y -> Ay
Groups_[gr][eY_] = 0.0;
// coeficint thickness of the axis Z -> Az
Groups_[gr][eZ_] = 0.0;
// media distancia Z en ejes locales principales
z1 = z / 2.0;
// media distancia Y en ejes locales principales
y1 = y / 2.0;
// distancias a las fibras mas alejadas
Groups_[gr][uY_] = y1;
Groups_[gr][dY_] = y1;
Groups_[gr][lZ_] = z1;
Groups_[gr][rZ_] = z1;
// momento estático respecto de Y -> Ay
Groups_[gr][Ay_] = y * z1 * z1 / 2.0;
// momento estático respecto de Z -> Az
Groups_[gr][Az_] = z * y1 * y1 / 2.0;
// z: lado de la base del rectángulo
// y: altura del rectángulo
// section
Groups_[gr][AREA] = z * y;
// mementum inertia Iz
Groups_[gr][Iz_] = z * Math.pow(y, 3.0) / 12.0;
// mementum inertia Iy
Groups_[gr][Iy_] = y * Math.pow(z, 3.0) / 12.0;
// momentum inertia polar or twisting It
if (z / y >= 1.0) {
Groups_[gr][It_] = 0.22 * z * Math.pow(y, 3.0);
} else {
Groups_[gr][It_] = 0.22 * y * Math.pow(z, 3.0);
}
Groups_[gr][Iw_] = Groups_[gr][It_];
}
public void EBEsTransversalSectionHoleRectangle(int gr, double y, double z, double ey, double ez)
throws JMetalException {
// ba: es la barra de referencia en sentido y
// y: lado de la sección rectangular
// z: lado de la sección rectangular en sentido z
// ey: espesor de cada pared se la sección hueca en y
// ez: espesor de cada pared se la sección hueca en z
// calculus of Estatic Momentum
double yi, zi;
double as, ys, es, al, yl, el;
double zl, ae, ze, ee;
// distancia Y en ejes locales principales
Groups_[gr][Y_] = y;
// distancia Z en ejes locales principales
Groups_[gr][Z_] = z;
// thickness principal local axis Y
Groups_[gr][eY_] = ey;
// thickness principal local axis Z
Groups_[gr][eZ_] = ez;
// distancias a las fibras mas alejadas
Groups_[gr][uY_] = y / 2.0;
Groups_[gr][dY_] = y / 2.0;
Groups_[gr][lZ_] = z / 2.0;
Groups_[gr][rZ_] = z / 2.0;
// lados de la sección hueca
yi = y - 2 * ey;
zi = z - 2 * ez;
// momento estático respecto a y
// espesores de las paredes
// se desprecia el cordón lateral
// area cordón lateral
al = y * ez;
// distancia del baricentro al centro
zl = zi / 2.0 + ez / 2.0;
// momento estático del cordón lateral
el = al * Math.pow(zl, 2.0);
// area del cordon extremo sup e inferior
ae = ey * zi / 2.0;
// distancia del baricentro al centro
ze = zi / 4.0;
// momento estático de 2 cordones extremos
ee = 2.0 * ae * Math.pow(ze, 2.0);
// momento estático respecto de Y -> Ay
Groups_[gr][Ay_] = el + ee;
// area cordón superior
as = z * ey;
// distancia del baricentro al centro
ys = yi / 2.0 + ey / 2.0;
// momento estático al cordón superior
es = as * Math.pow(ys, 2.0);
// area cordón lateral
al = ez * (yi / 2.0);
// distancia del baricentro al centro
yl = yi / 4.0;
// momento estático de 2 cordoneslaterales
el = 2.0 * al * Math.pow(yl, 2.0);
// momento estático respecto de Z -> Az
Groups_[gr][Az_] = es + el;
// area de la sección hueca
double Ai = zi * yi;
// area total
double At = z * y;
// thw solid area A
Groups_[gr][AREA] = At - Ai;
// momento de inercia respecto al eje z Iz
double Iez = z * Math.pow(y, 3.0) / 12.0;
double Iiz = zi * Math.pow(yi, 3.0) / 12.0;
Groups_[gr][Iz_] = Iez - Iiz;
// momento de inercia respecto al eje y Iy
double Iey = y * Math.pow(z, 3) / 12;
double Iiy = yi * Math.pow(zi, 3) / 12;
Groups_[gr][Iy_] = Iey - Iiy;
// inercia torsional
// perÃÂmetro medio
double It1 = 1.3 * 1 / 3 * (2 * z * Math.pow(ey, 3) + 2 * yi * Math.pow(ez, 3));
double It2 = Groups_[gr][Iz_] + Groups_[gr][Iy_];
Groups_[gr][It_] = (It1 + It2) / 2;
Groups_[gr][Iw_] = Groups_[gr][It_];
}
public void EBEsTransversalSection_I_Single(int gr, double y, double z, double ey, double ez)
throws JMetalException {
// la orientación del perfil es con el alma coincidente con el eje Y
// gr: es el grupo al que pertenece la barra
// y: longitud en sentido del eje Y princial local
// z: longitud en sentido del eje Z princial local
// ey: espesor en sentido eje Y, es decir coincidente con el espesor de cada ala
// ez: espesor en sentido Z, es decir coincidente con el espesor del alma
// calculus of Estatic Momentum
double yi, zi;
double as, ys, es, al, yl, el;
double zl, ae, ze, ee;
// distancia Y en ejes locales principales
Groups_[gr][Y_] = y;
// distancia Z en ejes locales principales
Groups_[gr][Z_] = z;
// thickness principal local axis Y
Groups_[gr][eY_] = ey;
// thickness principal local axis Z
Groups_[gr][eZ_] = ez;
// distancias a las fibras mas alejadas
Groups_[gr][uY_] = y / 2.0;
Groups_[gr][dY_] = y / 2.0;
Groups_[gr][lZ_] = ez / 2.0;
Groups_[gr][rZ_] = ez / 2.0;
// lados de la sección hueca virtual
yi = y - 2 * ey;
zi = z - ez;
// MOMENTO EST�TICO RESPECTO EJE y
// desprecio el espesor del ala
// area del alma
al = yi * ez;
// distancia del baricentro del alma al baricentro de la sección completa
zl = ez / 2;
// momento estático del alma
el = al * Math.pow(zl, 2.0);
// area media de los dos cordon extremo sup e inferior
ae = ey * z; // 2 medias areas de alas
// distancia del baricentro al centro
ze = z / 4.0;
// momento estático de los 2 cordones extremos
ee = ae * Math.pow(ze, 2.0);
// momento estático total respecto de Y -> Ay
Groups_[gr][Ay_] = el + ee;
// MOMENTO EST�TICO RESPECTO AL EJE Z
// area cordón superior
as = z * ey;
// distancia del baricentro al centro
ys = yi / 2.0 + ey / 2.0;
// momento estático al cordón superior
es = as * Math.pow(ys, 2.0);
// area alma
al = ez * (yi / 2.0);
// distancia del baricentro al centro
yl = yi / 4.0;
// momento estático del alma
el = al * Math.pow(yl, 2.0);
// momento estático respecto de Z -> Az
Groups_[gr][Az_] = es + el;
// area de la sección hueca
double Ai = zi * yi;
// area total
double At = z * y;
// two solid area A
Groups_[gr][AREA] = At - Ai;
// momento de inercia respecto al eje z Iz
double Iez = z * Math.pow(y, 3.0) / 12.0;
double Iiz = zi * Math.pow(yi, 3.0) / 12.0;
Groups_[gr][Iz_] = Iez - Iiz;
// momento de inercia respecto al eje y Iy
// double Iey=y*Math.pow(z, 3)/12.0;
// double Iiy=yi*Math.pow(zi, 3)/12.0;
Groups_[gr][Iy_] = y * Math.pow(ez, 3.0) / 12.0;
// inercia torsional
// perÃÂmetro medio
Groups_[gr][It_] = (2 * z * Math.pow(ey, 3.0) + yi * Math.pow(ez, 3.0)) / 3.0;
Groups_[gr][Iw_] = Groups_[gr][It_];
}
public void EBEsTransversalSection_I_Double(int gr, double y, double z, double ey, double ez)
throws JMetalException {}
public void EBEsTransversalSection_H_Single(int gr, double y, double z, double ey, double ez)
throws JMetalException {}
public void EBEsTransversalSection_H_Double(int gr, double y, double z, double ey, double ez)
throws JMetalException {}
public void EBEsTransversalSection_L_Single(int gr, double y, double z, double ey, double ez)
throws JMetalException {}
public void EBEsTransversalSection_L_Double(int gr, double y, double z, double ey, double ez)
throws JMetalException {
// la orientación del perfil es con las almas coincidente con
// el eje Y separados por una placa de espesor igual a eZ
// y las alas paralelas al eje Z hacia abajo
// gr: es el grupo al que pertenece la barra
// y: longitud en sentido del eje Y princial local
// z: longitud en sentido del eje Z princial local
// ey: espesor en sentido eje Y, es decir coincidente con el espesor de cada ala
// ez: espesor en sentido Z, es decir coincidente con el espesor del alma
// double y1, z1, yi, zi;
// double as, ys, es, al, yl, el;
// double zl, ae, ze, ee;
double yi, zi;
// distancia Y en ejes locales principales
Groups_[gr][Y_] = y;
// distancia Z en ejes locales principales
Groups_[gr][Z_] = z;
// thickness principal local axis Y
Groups_[gr][eY_] = ey;
// thickness principal local axis Z
Groups_[gr][eZ_] = ez;
// lados de la sección hueca
yi = y - ey;
zi = z - 2 * ez;
// area de la sección hueca
double Ai = zi * yi;
// area total
double At = z * y;
// thw solid area A
Groups_[gr][AREA] = At - Ai;
// distancias a las fibras mas alejadas desde el centro de gavedad
Groups_[gr][dY_] = 1 / 2.0 * (ez * Math.pow(y, 2) + zi * Math.pow(ey, 2)) / (ez * y + zi * ey);
Groups_[gr][uY_] = y - Groups_[gr][dY_];
Groups_[gr][rZ_] = z / 2.0;
Groups_[gr][lZ_] = z / 2.0;
// momento estático respecto de Y -> Ay
Groups_[gr][Ay_] = 0;
// momento estático respecto de Z -> Az
Groups_[gr][Az_] = 0;
Groups_[gr][Iz_] = 0;
Groups_[gr][Iy_] = 0;
Groups_[gr][It_] = 0;
Groups_[gr][Iw_] = 0;
}
public void EBEsTransversalSection_T_Single(int ba, double y, double z, double ey, double ez)
throws JMetalException {}
public void EBEsTransversalSection_T_Double(int ba, double y, double z, double ey, double ez)
throws JMetalException {}
/*
public void EbesMutation(int groupId, int hi, Variable[] x) {
if (StrainNxxMin_ != null && StrainNxxMax_ != null)
{
// alturas necearias por tres esfuerzos distintos
double[] Y = {0.0, 0.0, 0.0};
// área de la sección por tensión de compresión
// participaciones de las tensiones normales referidas al esfuerzo axil
double ratioStrainMinNxx = StrainNxxMin_[groupId][hi] / Groups_[groupId][COMPRESSION];
// área de la sección por tensión de compresión
double Ac = omegaMax_[groupId][hi] * NxxMin_[groupId][hi] / Groups_[groupId][COMPRESSION] * ratioStrainMinNxx;
// participaciones de las tensiones normales referidas al esfuerzo axil
double ratioStrainMaxNxx = StrainNxxMax_[groupId][hi] / Groups_[groupId][STRESS];
// área de la sección por tensión de tracción
double At = omegaMax_[groupId][hi] * NxxMax_[groupId][hi] / Groups_[groupId][STRESS] * ratioStrainMaxNxx;
// área máxima necesaria
double A = Math.max(Ac, At);
A *= 10000;
// altura necesaria en función del area y del esfuerzo normal coincidente con el eje x
Y[0] = Interpolation_I_Single_Y_func_Area_(A);
// participaciones de las tensiones normales mÃnimas referidas al momento flector respecto al eje z
double ratioStrainMinMxz = StrainMxzMin_[groupId][hi] / Groups_[groupId][COMPRESSION];
// módulo resistente por tensión de compresión respecto al momento flector Mxz
double Wzc = MxzMin_[groupId][hi] / Groups_[groupId][COMPRESSION] * ratioStrainMinMxz;
// participaciones de las tensiones normales máximas referidas al momento flector respecto al eje z
double ratioStrainMaxMxz = StrainMxzMax_[groupId][hi] / Groups_[groupId][STRESS];
// módulo resistente por tensión de tracción respecto al momento flector Mxz
double Wzt = MxzMax_[groupId][hi] / Groups_[groupId][STRESS] * ratioStrainMaxMxz;
// módulo resistente máximo necesario
double Wxz = Math.max(Wzc, Wzt);
// conversión de unidades de medidas a cm3
Wxz *= 1000000;
// altura necesaria en función del módulo resistente y del momento flector respecto al eje z
Y[1] = Interpolation_I_Single_Y_func_Wxz_(Wxz);
// participaciones de las tensiones normales mÃnimas referidas al momento flector respecto al eje y
double ratioStrainMinMxy = StrainMxyMin_[groupId][hi] / Groups_[groupId][COMPRESSION];
// módulo resistente por tensión de compresión respecto al momento flector Mxz
double Wyc = MxyMin_[groupId][hi] / Groups_[groupId][COMPRESSION] * ratioStrainMinMxy;
// participaciones de las tensiones normales máximas referidas al momento flector respecto al eje y
double ratioStrainMaxMxy = StrainMxyMax_[groupId][hi] / Groups_[groupId][STRESS];
// módulo resistente por tensión de tracción respecto al momento flector Mxz
double Wyt = MxyMax_[groupId][hi] / Groups_[groupId][STRESS] * ratioStrainMaxMxy;
// módulo resistente máximo necesario
double Wxy = Math.max(Wyc, Wyt);
// conversión de unidades de medidas a cm3
Wxy *= 1000000;
// altura necesaria en función del módulo resistente y del momento flector respecto al eje z
Y[2] = Interpolation_I_Single_Y_func_Wxy_(Wxy);
// altura máxima necesaria
double y = 0.0;
for (int i = 1; i < Y.length; i++) {
y = Math.max(y, Y[i]);
}
double z = Interpolation_I_Single_Z_func_Y_(y);
double ey = Interpolation_I_Single_ey_func_Y_(y);
double ez = Interpolation_I_Single_ez_func_Y_(y);
int variableIndex = getVariablePosition(groupId);
// conversión de unidades de medidas al sistema de cálculo
y *= 0.001;
if (y<x[variableIndex].getLowerBound())
y=x[variableIndex].getLowerBound();
if (y>x[variableIndex].getUpperBound())
y=x[variableIndex].getUpperBound();
z *= 0.001;
ey *= 0.001;
ez *= 0.001;
x[variableIndex].setValue(y);
x[variableIndex+1].setValue(z);
x[variableIndex+2].setValue(ey);
x[variableIndex+3].setValue(ez);
}
}
*/
public double Interpolation_I_Single_Y_func_Area_(double A) {
// A (cm2) es el area necesaria para cubrir la tensión
// Y (mm) es la latura relacionada al eje y
double Y = 0;
// se limita la interpolación
if (5.0 < A && A < 1000.0) {
Y = 0.000003 * Math.pow(A, 3) - 0.0063 * Math.pow(A, 2) + 4.1118 * A + 75.414;
}
return Y;
}
public double Interpolation_I_Single_Y_func_Wxy_(double Wxy) {
// Wxy (cm3) es el area necesaria para cubrir la tensión
// Y (mm) es la latura relacionada al eje y
double Y = 0;
// se limita la interpolación
if (1.0 < Wxy && Wxy < 400.0) {
Y = 0.0003 * Math.pow(Wxy, 3) - 0.119 * Math.pow(Wxy, 2) + 16.539 * Wxy + 136.59;
}
return Y;
}
public double Interpolation_I_Single_Y_func_Wxz_(double Wxz) {
// Wxz (cm3) es el area necesaria para cubrir la tensión
// Y (mm) es la latura relacionada al eje y
double Y = 0;
// se limita la interpolación
if (3.0 < Wxz && Wxz < 3500.0) {
Y = 61.9 * Math.pow(Wxz, 0.3849);
}
return Y;
}
public double Interpolation_I_Single_Z_func_Y_(double Y) {
// Y (mm) es la altura dela sección en sentido del eje y
// Z (mm) es el ancho de la sección en sentido del eje z
double Z = 0;
// se limita la interpolación
if (50.0 < Y && Y < 2000.0) {
Z = -0.0002 * Math.pow(Y, 2.0) + 0.4339 * Y + 29.849;
}
return Z;
}
public double Interpolation_I_Single_ez_func_Y_(double Y) {
// Y (mm) es la altura dela sección en sentido del eje y
// ez (mm) es el ancho del alma del perfil en sentido del eje z
double ez = 0;
// se limita la interpolación
if (50.0 < Y && Y < 2000.0) {
ez = 7E-08 * Math.pow(Y, 3.0) - 0.0001 * Math.pow(Y, 2.0) + 0.0632 * Y - 3.3817;
}
return ez;
}
public double Interpolation_I_Single_ey_func_Y_(double Y) {
// Y es la altura dela sección en sentido del eje y
// ey es el ancho del alma del perfil en sentido del eje y
double ey = 0;
// se limita la interpolación
if (50.0 < Y && Y < 2000.0) {
ey = 1E-07 * Math.pow(Y, 3.0) - 0.0002 * Math.pow(Y, 2.0) + 0.1014 * Y - 4.5708;
}
return ey;
}
public void EBEsStrainMaxWhitElement() throws JMetalException {
// determinación de las tensiones máximas entre los extremos
for (int hi = 0; hi < numberOfWeigthHypothesis_; hi++) {
StrainMax_[0][hi] = Straini_[0][0][hi]; // strain compress i
StrainCutMax_[0][hi] = Math.abs(Straini_[2][0][hi]); // strain corte i
for (int ba = 0; ba < numberOfElements_; ba++) {
// TENSIONES NORMALES
// en nudo menor numeración i
if (StrainMax_[ba][hi] < Straini_[0][ba][hi]) {
StrainMax_[ba][hi] = Straini_[0][ba][hi]; // strain compression node j
}
// en nudo menor numeración i
if (StrainMax_[ba][hi] < Strainj_[0][ba][hi]) {
StrainMax_[ba][hi] = Strainj_[0][ba][hi]; // strain compression node j
}
// en nudo mayor numeración j
if (StrainMax_[ba][hi] < Straini_[1][ba][hi]) {
StrainMax_[ba][hi] = Straini_[1][ba][hi]; // strain traction node j
}
if (StrainMax_[ba][hi] < Strainj_[1][ba][hi]) {
StrainMax_[ba][hi] = Strainj_[1][ba][hi]; // strain traction node j
}
// TENSIONES TANGENCIALES
// en nudo menor numeración i
if (StrainCutMax_[ba][hi] < Math.abs(Straini_[2][ba][hi])) {
StrainCutMax_[ba][hi] = Math.abs(Straini_[2][ba][hi]); // strain traction node j
}
// en nudo mayor numeración j
if (StrainCutMax_[ba][hi] < Math.abs(Strainj_[2][ba][hi])) {
// en nudo mayor numeración j
StrainCutMax_[ba][hi] = Math.abs(Strainj_[2][ba][hi]);
}
}
}
}
public void EBEsStrainMinWhitElement() throws JMetalException {
// determinación de las tensiones minimas entre los extremos
// de las barras para cada agrupación
for (int hi = 0; hi < numberOfWeigthHypothesis_; hi++) {
// tensiones normales mÃÂnimas
// en nudo menor numeración i
StrainMin_[0][hi] = Straini_[0][0][hi]; // strain compress i
for (int ba = 0; ba < numberOfElements_; ba++) {
// TENSIONES NORMALES
// en nudo menor numeración i
if (StrainMin_[ba][hi] > Straini_[0][ba][hi]) {
StrainMin_[ba][hi] = Straini_[0][ba][hi]; // strain compression node j
}
// en nudo mayor numeración j
if (StrainMin_[ba][hi] > Strainj_[0][ba][hi]) {
StrainMin_[ba][hi] = Strainj_[0][ba][hi]; // strain compression node j
}
// en nudo menor numeración i
if (StrainMin_[ba][hi] > Straini_[1][ba][hi]) {
StrainMin_[ba][hi] = Straini_[1][ba][hi]; // strain traction node j
}
// en nudo mayor numeración j
if (StrainMin_[ba][hi] > Strainj_[1][ba][hi]) {
StrainMin_[ba][hi] = Strainj_[1][ba][hi]; // strain traction node j
}
}
}
}
public void EBEsStrainMaxWhitGroup() throws JMetalException {
// determinación de las tensiones máximas entre los extremos
// de las barras para cada agrupación
for (int hi = 0; hi < numberOfWeigthHypothesis_; hi++) {
StrainMax_[0][hi] = Straini_[0][0][hi]; // strain compress i
StrainCutMax_[0][hi] = Straini_[2][0][hi]; // strain corte i
for (int ba = 0; ba < numberOfElements_; ba++) {
// index gropus
int idx = (int) Element_[ba][INDEX_];
// TENSIONES NORMALES
// en nudo menor numeración i
if (StrainMax_[idx][hi] < Straini_[0][ba][hi]) {
StrainMax_[idx][hi] = Straini_[0][ba][hi]; // strain compression node i
}
// en nudo mayor numeración j
if (StrainMax_[idx][hi] < Strainj_[0][ba][hi]) {
StrainMax_[idx][hi] = Strainj_[0][ba][hi]; // strain compression node j
}
// en nudo menor numeración i
if (StrainMax_[idx][hi] < Straini_[1][ba][hi]) {
StrainMax_[idx][hi] = Straini_[1][ba][hi]; // strain traction node i
}
// en nudo mayor numeración j
if (StrainMax_[idx][hi] < Strainj_[1][ba][hi]) {
StrainMax_[idx][hi] = Strainj_[1][ba][hi]; // strain traction node j
}
// TENSIONES TANGENCIALES
// en nudo menor numeración i
if (StrainCutMax_[idx][hi] < Math.abs(Straini_[2][ba][hi])) {
StrainCutMax_[idx][hi] = Math.abs(Straini_[2][ba][hi]); // strain traction node j
}
// en nudo mayor numeración j
if (StrainCutMax_[idx][hi] < Math.abs(Strainj_[2][ba][hi])) {
// en nudo mayor numeración j
StrainCutMax_[idx][hi] = Math.abs(Strainj_[2][ba][hi]);
}
}
}
}
public void EBEsStrainMinWhitGroup() throws JMetalException {
// determinación de las tensiones minimas entre los extremos
for (int hi = 0; hi < numberOfWeigthHypothesis_; hi++) {
// tensiones normales mÃÂnimas
// en nudo menor numeración i
StrainMin_[0][hi] = Straini_[0][0][hi]; // strain compress i
for (int ba = 0; ba < numberOfElements_; ba++) {
// index gropus
int idx = (int) Element_[ba][INDEX_];
// TENSIONES NORMALES
// en nudo menor numeración i
if (StrainMin_[idx][hi] > Straini_[0][ba][hi]) {
StrainMin_[idx][hi] = Straini_[0][ba][hi]; // strain compression node i
}
// en nudo mayor numeración j
if (StrainMin_[idx][hi] > Strainj_[0][ba][hi]) {
StrainMin_[idx][hi] = Strainj_[0][ba][hi]; // strain compression node j
}
// en nudo menor numeración i
if (StrainMin_[idx][hi] > Straini_[1][ba][hi]) {
StrainMin_[idx][hi] = Straini_[1][ba][hi]; // strain traction node i
}
// en nudo mayor numeración j
if (StrainMin_[idx][hi] > Strainj_[1][ba][hi]) {
StrainMin_[idx][hi] = Strainj_[1][ba][hi]; // strain traction node j
}
}
}
}
public void EBEsStrainResidualVerication() throws JMetalException {
// [0][hi] residual strain axial
// [1][hi] residual strain transversal
for (int hi = 0; hi < numberOfWeigthHypothesis_; hi++) {
// for(int ba=0; ba<numberOfElements_; ba++){
for (int gr = 0; gr < numberOfGroupElements_; gr++) {
// residuo de tensiones normales
if (StrainMax_[gr][hi] != 0.0)
StrainResidualMax_[hi] +=
Math.sqrt(
Math.pow(
(StrainMax_[gr][hi] - Groups_[(int) Element_[gr][INDEX_]][STRESS]), 2.0));
if (StrainMin_[gr][hi] != 0.0)
StrainResidualMin_[hi] +=
Math.sqrt(
Math.pow(
(-StrainMin_[gr][hi] + Groups_[(int) Element_[gr][INDEX_]][COMPRESSION]),
2.0));
// residuos de tensiones tangenciales
if (StrainCutMax_[gr][hi] != 0.0)
StrainResidualCut_[hi] +=
Math.sqrt(
Math.pow(
(StrainCutMax_[gr][hi] - Groups_[(int) Element_[gr][INDEX_]][STRESS_CUT]),
2.0));
}
}
}
public void EBEsPrintArchTxtElements() throws JMetalException {
try {
PrintStream ps = new PrintStream("EBEs - Groups Elements.txt");
// impresion de la las caracterÃÂsticas de las barras
ps.printf(
"Groups Y Z eY_ eZ_ uY dY lZ rZ A Az Ay Iz Iy Ip");
ps.println();
ps.printf("-----------------------------------------------------------------------------");
ps.println();
for (int gr = 0; gr < Groups_.length; gr++) {
ps.printf(
"%4d %6.3f %6.3f %7.4f %7.4f %6.3f %6.3f %6.3f %6.3f %9.6f %9.6f %9.6f %9.6f %9.6f",
gr,
Groups_[gr][Y_],
Groups_[gr][Z_],
Groups_[gr][eY_],
Groups_[gr][eZ_],
Groups_[gr][uY_],
Groups_[gr][dY_],
Groups_[gr][lZ_],
Groups_[gr][rZ_],
Groups_[gr][Az_],
Groups_[gr][Ay_],
Groups_[gr][Iz_],
Groups_[gr][Iy_],
Groups_[gr][It_]);
ps.println();
} // Next ba
ps.close();
} catch (Exception ex) {
System.out.println("Grupos de barras: El archivo no pudo grabarse!");
}
}
public void EBEsPrintArchTxtMKLB(int e) throws JMetalException {
try {
PrintStream ps = new PrintStream("EBEs-MKLB(" + e + ").txt");
// impresion de la matriz de rigidez penalizada
// extremo ii
ps.print("kii" + e + "=[");
for (int o = 0; o < 6; o++) {
for (int p = 0; p < 6; p++) {
ps.printf("%12.3f", Kii[o][p]);
if (o != 5 && p == 5) {
ps.print(";");
} else if (o == 5 && p == 5) {
ps.print("]");
} else {
ps.print(",");
}
} // Next p
} // Next o
ps.println();
ps.print("kij" + e + "=[");
for (int o = 0; o < 6; o++) {
for (int p = 0; p < 6; p++) {
ps.printf("%12.3f", Kij[o][p]);
if (o != 5 && p == 5) {
ps.print(";");
} else if (o == 5 && p == 5) {
ps.print("]");
} else {
ps.print(",");
}
} // Next p
} // Next o
ps.println();
ps.print("kji" + e + "=[");
for (int o = 0; o < 6; o++) {
for (int p = 0; p < 6; p++) {
ps.printf("%12.3f", Kji[o][p]);
if (o != 5 && p == 5) {
ps.print(";");
} else if (o == 5 && p == 5) {
ps.print("]");
} else {
ps.print(",");
}
} // Next p
} // Next o
ps.println();
ps.print("kjj" + e + "=[");
for (int o = 0; o < 6; o++) {
for (int p = 0; p < 6; p++) {
ps.printf("%12.3f", Kjj[o][p]);
if (o != 5 && p == 5) {
ps.print(";");
} else if (o == 5 && p == 5) {
ps.print("]");
} else {
ps.print(",");
}
} // Next p
} // Next o
ps.println();
ps.close();
} catch (Exception ex) {
System.out.println("Mat Rig Local: El archivo no pudo grabarse!");
}
}
public void EBEsPrintArchTxtMKG(String s, int hi) throws JMetalException {
try {
PrintStream ps = new PrintStream("EBEs-M" + s + "-H(" + hi + ").txt");
// impresion de la matriz de rigidez penalizada
// extremo ii
for (int o = 0; o < MatrixStiffness_.length; o++) {
ps.printf("(%5d) - %15.4f", o, MatrixStiffness_[o]);
ps.println();
} // Next o
ps.close();
} catch (Exception ex) {
System.out.println("Mat Rig Global: El archivo no pudo grabarse!");
}
}
public void EBEsPrintArchTxtDesp(int hi) throws JMetalException {
try {
PrintStream ps = new PrintStream("EBEs-Desp-H(" + hi + ").txt");
// impresion de la matriz de rigidez penalizada
// extremo ii
for (int o = 0; o < DisplacementNodes_.length; o++) {
ps.printf("(%5d, %2d) = %20.16f", o, hi, DisplacementNodes_[o][hi]);
ps.println();
} // Next o
ps.close();
} catch (Exception ex) {
System.out.println("Desplazamientos: El archivo no pudo grabarse!");
}
}
public void EBEsPrintArchTxtEfforts(int hi) throws JMetalException {
try {
PrintStream ps = new PrintStream("EBEs-Efforts-H(" + hi + ").txt");
// impresion de la matriz de rigidez penalizada
// extremo ii
for (int ba = 0; ba < Element_.length; ba++) {
int ni = (int) Element_[ba][i_];
int nj = (int) Element_[ba][j_];
ps.printf(
"Ei(%3d,%3d)=%10.3f %10.3f %10.3f %10.3f %10.3f %10.3f",
ba,
ni,
Efforti_[0][ba][hi],
Efforti_[1][ba][hi],
Efforti_[2][ba][hi],
Efforti_[3][ba][hi],
Efforti_[4][ba][hi],
Efforti_[5][ba][hi]);
ps.println();
ps.printf(
"Ej(%3d,%3d)=%10.3f %10.3f %10.3f %10.3f %10.3f %10.3f",
ba,
nj,
Effortj_[0][ba][hi],
Effortj_[1][ba][hi],
Effortj_[2][ba][hi],
Effortj_[3][ba][hi],
Effortj_[4][ba][hi],
Effortj_[5][ba][hi]);
ps.println();
ps.println();
} // Next ba
ps.close();
} catch (Exception ex) {
System.out.println("Esfuerzos: El archivo no pudo grabarse!");
}
}
public void EBEsPrintArchTxtStrain() throws JMetalException {
try {
for (int hi = 0; hi < numberOfWeigthHypothesis_; hi++) {
PrintStream ps = new PrintStream("EBEs-Strain-H(" + hi + ").txt");
// impresion de la matriz de rigidez penalizada
// extremo ii
ps.printf("Elements Nodo Stracc Scomp Scut");
ps.println();
ps.printf("--------------------------------------------");
ps.println();
for (int ba = 0; ba < Element_.length; ba++) {
int ni = (int) Element_[ba][i_];
int nj = (int) Element_[ba][j_];
ps.printf(
"%4d %4d %10.3f %10.3f %10.3f",
ba,
ni,
Straini_[STRAIN_TRACTION][ba][hi],
Straini_[STRAIN_COMPRESS][ba][hi],
Straini_[STRAIN_CUT][ba][hi]);
ps.println();
ps.printf(
"%4d %4d %10.3f %10.3f %10.3f",
ba,
nj,
Strainj_[STRAIN_TRACTION][ba][hi],
Strainj_[STRAIN_COMPRESS][ba][hi],
Strainj_[STRAIN_CUT][ba][hi]);
ps.println();
ps.println();
} // Next ba
ps.close();
} // hi
} catch (Exception ex) {
System.out.println("Tensiones: El archivo no pudo grabarse!");
}
}
public void EBEsPrintArchTxtReaction(int hi) throws JMetalException {
try {
PrintStream ps = new PrintStream("EBEs-Reaction-H(" + hi + ").txt");
ps.printf("Nodo Restriction X Y Z MX MY MZ");
ps.println();
ps.printf("--------------------------------------------");
ps.println();
// impresion de la matriz de rigidez penalizada
// extremo ii
for (int o = 0; o < NodeRestrict_.length; o++) {
int no = (int) NodeRestrict_[o][0];
int ap = (int) NodeRestrict_[o][1];
double x = Reaction_[6 * no + aX_][hi];
double y = Reaction_[6 * no + aY_][hi];
double z = Reaction_[6 * no + aZ_][hi];
double mx = Reaction_[6 * no + gX_][hi];
double my = Reaction_[6 * no + gY_][hi];
double mz = Reaction_[6 * no + gZ_][hi];
ps.printf("%5d %6d %8.3f %8.3f %8.3f %8.3f %8.3f %8.3f", no, ap, x, y, z, mx, my, mz);
ps.println();
} // Next o
ps.close();
} catch (Exception ex) {
System.out.println("Reacciones: El archivo no pudo grabarse!");
}
}
public String EBEsReadProblems() throws FileNotFoundException {
// en aquellos casos en los que se usen iteraciones en los algoritmos y no evaluaciones hacer
// iteraciones = total evaluaciones / tamaño población
char ch;
String line = "";
String var1 = "";
String txt = "";
int i = 0, j = 0;
// create a File instance
// java.io.File file = new java.io.File("EBEs.txt");
// create a Scanner for the file
// java.util.Scanner input = new java.util.Scanner(file);
// InputStream inputStream = getClass().getResourceAsStream("/" + "Ebes.txt");
// InputStream inputStream = getClass().getResourceAsStream("/" + "Ebes.txt");
InputStream inputStream = getClass().getResourceAsStream("/" + "Ebes.txt");
if (inputStream == null) {
inputStream = new FileInputStream("Ebes.txt");
}
InputStreamReader isr = new InputStreamReader(inputStream);
BufferedReader br = new BufferedReader(isr);
Scanner input = new Scanner(br);
// java.util.Scanner input = new java.util.Scanner(file);
// Read name problems EBEs
line = input.nextLine();
// count spaces
j = 0;
for (i = line.length() - 1; i >= 0; i--) {
ch = line.charAt(i);
if (ch == ' ') {
j++;
}
}
OF_ = new String[j];
int indOF = j - 1;
j = 0;
for (i = line.length() - 1; i >= 0; i--) {
ch = line.charAt(i);
if (ch == ' ') {
j = i + 1;
break;
}
}
OF_[indOF] = line.substring(j);
indOF--;
int m = 0;
if (indOF >= 0) {
for (i = j - 2; i >= 0; i--) {
ch = line.charAt(i);
if (ch == ' ') {
m = i + 1;
break;
}
}
OF_[indOF] = line.substring(m, j - 1);
indOF--;
}
int n = 0;
if (indOF >= 0) {
for (i = m - 2; i >= 0; i--) {
ch = line.charAt(i);
if (ch == ' ') {
n = i + 1;
break;
}
}
OF_[indOF] = line.substring(n, m - 1);
indOF--;
}
int o = 0;
if (indOF >= 0) {
for (i = n - 2; i >= 0; i--) {
ch = line.charAt(i);
if (ch == ' ') {
o = i + 1;
break;
}
}
OF_[indOF] = line.substring(o, n - 1);
indOF--;
}
int p = 0;
if (indOF >= 0) {
for (i = o - 2; i >= 0; i--) {
ch = line.charAt(i);
if (ch == ' ') {
p = i + 1;
break;
}
}
OF_[indOF] = line.substring(p, o - 1);
indOF--;
}
line = line.substring(0, i);
j = 0;
for (i = line.length() - 1; i >= 0; i--) {
ch = line.charAt(i);
if (ch == ' ') {
j = i + 1;
break;
}
}
var1 = line.substring(j);
if (i == -1) {
txt = var1;
} else {
txt = line.substring(0, i);
}
// clse the file
input.close();
return txt;
}
public final void EBEsReadDataFile(String fileName) throws JMetalException {
int i, j = 0;
char ch;
String txt = "";
try {
// create a File instance
InputStream inputStream = getClass().getResourceAsStream("/" + fileName);
if (inputStream == null) {
inputStream = new FileInputStream(fileName);
}
// create a Scanner for the file
Scanner input = new Scanner(inputStream);
// Read data from file
while (input.hasNext()) {
for (i = 0; i < 5; i++) {
txt = input.nextLine();
}
// number of nodes
for (i = txt.length() - 1; i >= 0; i--) {
ch = txt.charAt(i);
if (ch == ' ') {
j = i + 1;
break;
}
}
txt = txt.substring(j);
numberOfNodes = Integer.valueOf(txt);
// number of restriction
txt = input.nextLine();
for (i = txt.length() - 1; i >= 0; i--) {
ch = txt.charAt(i);
if (ch == ' ') {
j = i + 1;
break;
}
}
txt = txt.substring(j);
numberOfNodesRestricts_ = Integer.valueOf(txt);
// number of bar groups
txt = input.nextLine();
for (i = txt.length() - 1; i >= 0; i--) {
ch = txt.charAt(i);
if (ch == ' ') {
j = i + 1;
break;
}
}
txt = txt.substring(j);
numberOfGroupElements_ = Integer.valueOf(txt);
// number of elements
txt = input.nextLine();
for (i = txt.length() - 1; i >= 0; i--) {
ch = txt.charAt(i);
if (ch == ' ') {
j = i + 1;
break;
}
}
txt = txt.substring(j);
numberOfElements_ = Integer.valueOf(txt);
// number of hipotesis
for (i = 0; i < 5; i++) {
txt = input.nextLine();
}
for (i = txt.length() - 1; i >= 0; i--) {
ch = txt.charAt(i);
if (ch == ' ') {
j = i + 1;
break;
}
}
txt = txt.substring(j);
numberOfWeigthHypothesis_ = Integer.valueOf(txt);
numberOfWeigthHypothesis_ = 1;
// load as own weight for elements
txt = input.nextLine();
for (i = txt.length() - 1; i >= 0; i--) {
ch = txt.charAt(i);
if (ch == ' ') {
j = i + 1;
break;
}
}
txt = txt.substring(j);
lLoadsOwnWeight = Boolean.valueOf(txt);
// Weight elements
txt = input.nextLine();
for (i = txt.length() - 1; i >= 0; i--) {
ch = txt.charAt(i);
if (ch == ' ') {
j = i + 1;
break;
}
}
txt = txt.substring(j);
numberOfWeigthsElements_ = Integer.valueOf(txt);
// Weight nodes
txt = input.nextLine();
for (i = txt.length() - 1; i >= 0; i--) {
ch = txt.charAt(i);
if (ch == ' ') {
j = i + 1;
break;
}
}
txt = txt.substring(j);
numberOfWeigthsNodes_ = Integer.valueOf(txt);
// txt = input.nextLine();
// txt = input.next();
// read lines
for (i = 0; i < 4; i++) {
txt = input.nextLine();
}
// check node constraint
for (i = txt.length() - 1; i >= 0; i--) {
ch = txt.charAt(i);
if (ch == ' ') {
j = i + 1;
break;
}
}
txt = txt.substring(j);
numberOfConstraintsNodes_ = Integer.valueOf(txt);
// number Of Groups To Check Geometry
// read lines
txt = input.nextLine();
for (i = txt.length() - 1; i >= 0; i--) {
ch = txt.charAt(i);
if (ch == ' ') {
j = i + 1;
break;
}
}
txt = txt.substring(j);
numberOfGroupsToCheckGeometry_ = Integer.valueOf(txt);
// Cutting efect (not not included, read lines)
txt = input.nextLine();
// considered second-order effect
txt = input.nextLine();
for (i = txt.length() - 1; i >= 0; i--) {
ch = txt.charAt(i);
if (ch == ' ') {
j = i + 1;
break;
}
}
txt = txt.substring(j);
lSecondOrderGeometric = Boolean.valueOf(txt);
// considered buckling effect
txt = input.nextLine();
for (i = txt.length() - 1; i >= 0; i--) {
ch = txt.charAt(i);
if (ch == ' ') {
j = i + 1;
break;
}
}
txt = txt.substring(j);
lBuckling = Boolean.valueOf(txt);
// read lines
for (i = 0; i < 3; i++) {
txt = input.nextLine();
}
Node_ = new double[numberOfNodes][4];
for (i = 0; i < numberOfNodes; i++) {
txt = input.next();
for (j = 0; j < 4; j++) {
Node_[i][j] = Double.valueOf(input.next());
}
for (j = 0; j < 6; j++) {
txt = input.next();
}
}
NodeRestrict_ = new double[numberOfNodesRestricts_][2];
j = 0;
for (i = 0; i < numberOfNodes; i++) {
if (Node_[i][3] != 0) {
// Restriction of the movement
NodeRestrict_[j][0] = i;
NodeRestrict_[j][1] = Node_[i][3];
j++;
}
}
// ELEMENTS GROUPS
txt = input.nextLine();
txt = input.nextLine();
Groups_ = new double[numberOfGroupElements_][MAX_COLUMN];
for (i = 0; i < numberOfGroupElements_; i++) {
for (j = 0; j < MAX_COLUMN - 1; j++) {
Groups_[i][j] = Double.valueOf(input.next());
}
input.next(); // description
}
// ELEMENTS
txt = input.nextLine();
txt = input.nextLine();
Element_ = new double[numberOfElements_][8];
for (i = 0; i < numberOfElements_; i++) {
txt = input.next(); // BARRAS
Element_[i][INDEX_] = Double.valueOf(input.next());
Element_[i][i_] = Double.valueOf(input.next());
Element_[i][j_] = Double.valueOf(input.next());
Element_[i][L_] = Double.valueOf(input.next());
Element_[i][Vij_] = Double.valueOf(input.next());
Element_[i][Ei_] = Double.valueOf(input.next());
Element_[i][Ej_] = Double.valueOf(input.next());
// correction
int ni = (int) Element_[i][i_];
int nj = (int) Element_[i][j_];
double xi, yi, zi;
double xj, yj, zj;
// coordenadas de los extremso de la barra
/*
if(Math.abs(Node_[ni][aX_])<= 0.000001)
xi = 0.0;
else xi=Node_[ni][aX_];
if(Math.abs(Node_[ni][aY_])<= 0.000001)
yi= 0.0;
else yi = Node_[ni][aY_];
if(Math.abs(Node_[ni][aZ_])<= 0.000001)
zi = 0.0;
else zi = Node_[ni][aZ_];
if(Math.abs(Node_[nj][aX_])<= 0.000001)
xj = 0.0;
else xj=Node_[nj][aX_];
if(Math.abs(Node_[nj][aY_])<= 0.000001)
yj=0.0;
else yj = Node_[nj][aY_];
if(Math.abs(Node_[nj][aZ_])<= 0.000001)
zj = 0.0;
else
*/
xi = Node_[ni][aX_];
yi = Node_[ni][aY_];
zi = Node_[ni][aZ_];
xj = Node_[nj][aX_];
yj = Node_[nj][aY_];
zj = Node_[nj][aZ_];
Element_[i][L_] =
Math.sqrt(
Math.pow((xj - xi), 2.0) + Math.pow((yj - yi), 2.0) + Math.pow((zj - zi), 2.0));
if (Element_[i][L_] < 0.001) Element_[i][L_] = 0.0;
}
txt = input.nextLine();
txt = input.nextLine();
// OVERLOAD
OverloadInElement_ = new double[numberOfWeigthsElements_][8];
for (i = 0; i < numberOfWeigthsElements_; i++) {
txt = input.next(); // load number
for (j = 0; j < 8; j++) {
OverloadInElement_[i][j] = Double.valueOf(input.next());
}
}
// LOAD NODES
txt = input.nextLine();
if (numberOfWeigthsElements_ != 0) {
txt = input.nextLine();
}
WeightNode_ = new double[numberOfWeigthsNodes_][8];
for (i = 0; i < numberOfWeigthsNodes_; i++) {
txt = input.next();
for (j = 0; j < 8; j++) {
WeightNode_[i][j] = Double.valueOf(input.next());
}
}
// CHECK NODE FOR DISPLACEMENT (CONSTRAINT)
txt = input.nextLine();
txt = input.nextLine();
txt = input.nextLine();
txt = input.nextLine();
if (numberOfWeigthsNodes_ != 0) {
txt = input.nextLine();
}
nodeCheck_ = new double[numberOfConstraintsNodes_][2];
for (i = 0; i < numberOfConstraintsNodes_; i++) {
nodeCheck_[i][0] = Double.valueOf(input.next());
nodeCheck_[i][1] = Double.valueOf(input.next());
}
// number of groups to check geometry
txt = input.nextLine();
txt = input.nextLine();
if (numberOfGroupsToCheckGeometry_ != 0) {
geometryCheck_ = new int[numberOfGroupsToCheckGeometry_][];
// txt = input.nextLine();
for (i = 0; i < numberOfGroupsToCheckGeometry_; i++) {
txt = input.nextLine();
geometryCheck_[i] = new int[(txt.length() + 1) / 2];
String aTxt[] = txt.split(" ");
int k = 0;
for (j = 0; j < aTxt.length; j++) {
if (aTxt[j] != " ") {
geometryCheck_[i][k] = Integer.parseInt(aTxt[j]);
k++;
}
}
}
}
while (input.hasNext()) {
txt = input.nextLine();
}
}
// clse the file
input.close();
} catch (Exception ex) {
System.out.println("Error: data file EBEs not readed");
System.out.println(ex.getMessage());
System.exit(1);
}
}
public int Variable_Position() {
int numberOfVariables_ = 0;
try {
numberOfConstraintsGeometric_ = 0;
for (int gr = 0; gr < numberOfGroupElements_; gr++) {
if (Groups_[gr][SHAPE] == CIRCLE) {
// variables
numberOfVariables_ += 1;
Groups_[gr][VARIABLES] = 1;
Groups_[gr][VAR_POSITION] = numberOfVariables_ - 1;
// constrain
numberOfConstraintsGeometric_ += 0;
Groups_[gr][CONSTRAINT] = 0;
} else if (Groups_[gr][SHAPE] == HOLE_CIRCLE) {
// variables
numberOfVariables_ += 2;
Groups_[gr][VARIABLES] = 2;
Groups_[gr][VAR_POSITION] = numberOfVariables_ - 2;
// constrain
numberOfConstraintsGeometric_ += 2;
Groups_[gr][CONSTRAINT] = 2;
} else if (Groups_[gr][SHAPE] == RECTANGLE) {
// variables
numberOfVariables_ += 2;
Groups_[gr][VARIABLES] = 2;
Groups_[gr][VAR_POSITION] = numberOfVariables_ - 2;
// constrain
numberOfConstraintsGeometric_ += 2;
Groups_[gr][CONSTRAINT] = 2;
} else if (Groups_[gr][SHAPE] == HOLE_RECTANGLE) {
// variable
numberOfVariables_ += 4;
Groups_[gr][VARIABLES] = 4;
Groups_[gr][VAR_POSITION] = numberOfVariables_ - 4;
// constrain
numberOfConstraintsGeometric_ += 4;
Groups_[gr][CONSTRAINT] = 4;
} else if (Groups_[gr][SHAPE] == I_SINGLE) {
// variable
numberOfVariables_ += 4;
Groups_[gr][VARIABLES] = 4;
Groups_[gr][VAR_POSITION] = numberOfVariables_ - 4;
// constrain
numberOfConstraintsGeometric_ += 4;
Groups_[gr][CONSTRAINT] = 4;
} else if (Groups_[gr][SHAPE] == I_DOUBLE) {
// variable
numberOfVariables_ += 4;
Groups_[gr][VARIABLES] = 4;
Groups_[gr][VAR_POSITION] = numberOfVariables_ - 4;
// constrain
numberOfConstraintsGeometric_ += 4;
Groups_[gr][CONSTRAINT] = 4;
} else if (Groups_[gr][SHAPE] == H_SINGLE) {
// variables
numberOfVariables_ += 4;
Groups_[gr][VARIABLES] = 4;
Groups_[gr][VAR_POSITION] = numberOfVariables_ - 4;
// constrain
numberOfConstraintsGeometric_ += 4;
Groups_[gr][CONSTRAINT] = 4;
} else if (Groups_[gr][SHAPE] == H_DOUBLE) {
// variable
numberOfVariables_ += 4;
Groups_[gr][VARIABLES] = 4;
Groups_[gr][VAR_POSITION] = numberOfVariables_ - 4;
// constrain
numberOfConstraintsGeometric_ += 4;
Groups_[gr][CONSTRAINT] = 4;
} else if (Groups_[gr][SHAPE] == L_SINGLE) {
// variable
numberOfVariables_ += 4;
Groups_[gr][VARIABLES] = 4;
Groups_[gr][VAR_POSITION] = numberOfVariables_ - 4;
// constrain
numberOfConstraintsGeometric_ += 4;
Groups_[gr][CONSTRAINT] = 4;
} else if (Groups_[gr][SHAPE] == L_DOUBLE) {
// variable
numberOfVariables_ += 4;
Groups_[gr][VARIABLES] = 4;
Groups_[gr][VAR_POSITION] = numberOfVariables_ - 4;
// constrain
numberOfConstraintsGeometric_ += 4;
Groups_[gr][CONSTRAINT] = 4;
} else if (Groups_[gr][SHAPE] == T_SINGLE) {
// variable
numberOfVariables_ += 4;
Groups_[gr][VARIABLES] = 4;
Groups_[gr][VAR_POSITION] = numberOfVariables_ - 4;
// constrain
numberOfConstraintsGeometric_ += 4;
Groups_[gr][CONSTRAINT] = 4;
} else if (Groups_[gr][SHAPE] == T_DOUBLE) {
// variale
numberOfVariables_ += 4;
Groups_[gr][VARIABLES] = 4;
Groups_[gr][VAR_POSITION] = numberOfVariables_ - 4;
// constrain
numberOfConstraintsGeometric_ += 4;
Groups_[gr][CONSTRAINT] = 4;
} else {
System.out.println("Error: transversal section not considerated in " + gr + " group");
System.exit(1);
} // end if
} // gr
} catch (Exception ex) {
System.out.println(ex.getCause());
System.out.println(ex.getMessage());
System.exit(1);
}
return numberOfVariables_;
}
public double FunctionENS(int hi) {
// function Efficiency Nash-Sutcliffe
// O[k] : k-th data value of matirial stress (observed)
// E[k] : k-th estimated value of the stress
double SSRes = 0.0;
double SSTot = 0.0;
double ENS = 0.0;
double mOmax = 0.0;
double mOmin = 0.0;
// [0][hi] residual strain axial
// [1][hi] residual strain transversal
for (int i = 0; i < geometryCheck_.length; i++) {
double[] Omax = new double[geometryCheck_[i].length];
double[] Omin = new double[geometryCheck_[i].length];
double[] Emax = new double[geometryCheck_[i].length];
double[] Emin = new double[geometryCheck_[i].length];
for (int j = 0; j < geometryCheck_[i].length; j++) {
int gr = geometryCheck_[i][j];
Emin[j] = StrainMin_[gr][hi];
Omin[j] = Groups_[(int) Element_[gr][INDEX_]][COMPRESSION];
Emax[j] = StrainMax_[gr][hi];
Omax[j] = Groups_[(int) Element_[gr][INDEX_]][STRESS];
mOmax += (Omax[j]);
mOmin += (Omin[j]);
}
// mean of the observed data
mOmax = 2 * mOmax / Omax.length;
mOmin = 2 * mOmin / Omin.length;
for (int k = 0; k < Omax.length; k++) {
// Sum of Squares of Residuals, also called the residual sum of squares
SSRes += Math.pow((Omin[k] - Emin[k]), 2.0) + Math.pow((Omax[k] - Emax[k]), 2.0);
// Total Sum of Squares (proportional to the sample variance)
SSTot += Math.pow((Omin[k] - mOmin), 2.0) + Math.pow((Omax[k] - mOmax), 2.0);
}
ENS += SSRes / SSTot;
}
return ENS;
}
public double FunctionsMahalanobis_Distance_With_Variance(int hi) {
// Mahalanobis Distance With Variance for estimated value respect to estimated data
double MD = 0.0; // mahalanobis distance
double[] MDi = new double[geometryCheck_.length]; // mahalanobis distance
for (int i = 0; i < geometryCheck_.length; i++) {
int N = geometryCheck_[i].length;
double[] distY = new double[N];
double[] distZ = new double[N];
double sumY = 0.0; //
double sumZ = 0.0; //
double sumYxY = 0.0; //
double sumZxZ = 0.0; //
double sumYxZ = 0.0; //
double meanY = 0.0; // means Y distance
double meanZ = 0.0; // means Z distance
double S2Y = 0.0; // variance Y distance
double S2Z = 0.0; // variance Z distance
double SY = 0.0; // variance Y distance
double SZ = 0.0; // variance Z distance
// double CS2 = 0.0; // covariance
double r = 0.0; // Pearson correlation
for (int j = 0; j < geometryCheck_[i].length; j++) {
distY[j] = Groups_[geometryCheck_[i][j]][Y_];
distZ[j] = Groups_[geometryCheck_[i][j]][Z_];
sumY += distY[j];
sumZ += distZ[j];
sumYxY += distY[j] * distY[j];
sumZxZ += distZ[j] * distZ[j];
sumYxZ += distY[j] * distZ[j];
meanY += distY[j];
meanZ += distZ[j];
}
// mean of the observed data and values estimated
meanY /= N;
meanZ /= N;
// Pearson’s correlation coefficient
r =
(N * sumYxZ - sumY * sumZ)
/ (Math.sqrt(
(N * sumYxY - Math.pow(sumY, 2.0)) * (N * sumZxZ - Math.pow(sumZ, 2.0))));
// variance
for (int k = 0; k < N; k++) {
S2Y += Math.pow((distY[k] - meanY), 2.0);
S2Z += Math.pow((distZ[k] - meanZ), 2.0);
}
S2Y /= (N - 1);
S2Z /= (N - 1);
SY = Math.sqrt(S2Y);
SZ = Math.sqrt(S2Z);
// CS2 /= (N-1);
// Mahalanobis distance
for (int k = 1; k < N; k++) {
MDi[i] +=
Math.pow((0 - distY[k]), 2.0) / S2Y
+ Math.pow((0 - distZ[k]), 2.0) / S2Z
- 2.0 * r * (0 - distY[k]) * (0 - distZ[k]) / (SY * SZ);
}
MDi[i] = Math.sqrt(1 / (1 - Math.pow(r, 2.0)) * MDi[i]);
}
for (int i = 0; i < geometryCheck_.length; i++) {
MD += MDi[i];
}
return MD;
}
/*
public double FunctionENS(int indx, int hi)
{
// function Efficiency Nash-Sutcliffe
// O[k] : k-th data value of matirial stress (observed)
// E[k] : k-th estimated value of the stress
double[] O = new double [numberOfGroupElements_];
double[] E = new double [numberOfGroupElements_];
int k = O.length;
double Om = 0.0;
// [0][hi] residual strain axial
// [1][hi] residual strain transversal
if( indx == COMPRESSION)
{
// compress (-)
for(int gr=0; gr<numberOfGroupElements_; gr++)
{
E[gr]=StrainMin_[gr][hi];
if(StrainMin_[gr][hi] !=0.0)
O[gr]=Groups_[(int)Element_[gr][INDEX_]][COMPRESSION];
}
}
else if ( indx == STRESS)
{
// stress (+)
for(int gr=0; gr<numberOfGroupElements_; gr++)
{
E[gr]=StrainMax_[gr][hi];
if(StrainMax_[gr][hi] !=0.0)
O[gr]=Groups_[(int)Element_[gr][INDEX_]][STRESS];
}
}
else {
for(int gr=0; gr<numberOfGroupElements_; gr++) {
E[gr] = StrainMin_[gr][hi];
if (StrainMin_[gr][hi] != 0.0)
O[gr]+= Math.abs(Groups_[(int) Element_[gr][INDEX_]][COMPRESSION]);
E[gr]=StrainMax_[gr][hi];
if(StrainMax_[gr][hi] !=0.0)
O[gr]+=Math.abs(Groups_[(int)Element_[gr][INDEX_]][STRESS]);
}
}
//mean of the observed data
for(int i=0; i<k;i++)
{
Om += O[i];
}
Om = Om/k;
if ( indx == 2)
Om *=2.0;
double SSRes = 0.0;
double SSTot = 0.0;
double ENS = 0.0;
for (int i = 0; i < k; i++)
{
//Sum of Squares of Residuals, also called the residual sum of squares
SSRes += Math.pow((O[i] - E[i]), 2.0);
//Total Sum of Squares (proportional to the sample variance)
SSTot += Math.pow((O[i] - Om), 2.0);
}
//ENS = Math.abs(1.0 - Math.abs(SSRes / SSTot));
ENS = Math.abs(SSRes / SSTot);
return ENS;
}
*/
}
|
package com.github.romankh3.tacocloud.config;
import com.github.romankh3.tacocloud.service.UserDetailsService;
import com.github.romankh3.tacocloud.service.UserRepositoryUserDetailsService;
import javax.sql.DataSource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.crypto.password.StandardPasswordEncoder;
@Configuration
@EnableWebSecurity
public class SecurityConfig extends WebSecurityConfigurerAdapter {
@Autowired
private UserDetailsService userDetailsService;
@Override
protected void configure(AuthenticationManagerBuilder auth)
throws Exception {
auth
.userDetailsService(userDetailsService)
.passwordEncoder(encoder());
//4.2.2 JDBC-based user store
// auth
// .jdbcAuthentication()
// .dataSource(dataSource)
// .usersByUsernameQuery(
// "select username, password, enabled from Users " +
// "where username=?")
// .authoritiesByUsernameQuery(
// "select username, authority from UserAuthorities " +
// "where username=?")
// .passwordEncoder(new BCryptPasswordEncoder());
//4.2.3 LDAP-based user store
// auth
// .ldapAuthentication()
// .userSearchBase("ou=people")
// .userSearchFilter("(uid={0})")
// .groupSearchBase("ou=groups")
// .groupSearchFilter("member={0}");
//4.2.1 in-memory authentication
// auth
// .inMemoryAuthentication()
// .withUser("buzz")
// .password("infinity")
// .authorities("ROLE_USER")
// .and()
// .withUser("woody")
// .password("bullseye")
// .authorities("ROLE_USER");
}
@Bean
public PasswordEncoder encoder() {
return new StandardPasswordEncoder("53cr3t");
}
}
|
package cmd;
import static org.junit.jupiter.api.Assertions.assertAll;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import org.junit.jupiter.api.Test;
import org.kohsuke.args4j.CmdLineParser;
import command.extension.TestCommand;
public class TestCommandLineTools {
@Test
public void testCommandLineParser() {
TestCommand command = new TestCommand();
String args[] = { "-model=Test", "-interpreter=Hello" };
CommandLineParser.parse(args, command);
assertAll("Should return all options which set by TestCommand",
() -> assertEquals("Test", command.getModel()),
() -> assertEquals("Hello", command.getInterpreter()),
() -> assertNull(command.getLoader()));
}
@Test
public void testCommandLineSplitterDefinedArgs() {
TestCommand command = new TestCommand();
String args[] = { "-model=Test", "-interpreter=Hello", "-hello=World" };
CmdLineParser parser = new CmdLineParser(command);
String[] result = CommandLineSplitter.definedArgs(args, parser);
assertAll("Should return all possible options which can be set by TestCommand",
() -> assertEquals(2, result.length),
() -> assertEquals("-model=Test", result[0]),
() -> assertEquals("-interpreter=Hello", result[1]));
}
@Test
public void testCommandLineSplitterUndefinedArgs() {
TestCommand command = new TestCommand();
String args[] = { "-model=Test", "-interpreter=Hello", "-hello=World" };
CmdLineParser parser = new CmdLineParser(command);
String[] result = CommandLineSplitter.undefinedArgs(args, parser);
assertAll("Should return all possible options which can not be set by TestCommand",
() -> assertEquals(1, result.length),
() -> assertEquals("-hello=World", result[0]));
}
}
|
/*
*
*/
package com.github.icelyframework.activitystorming.diagram.providers.assistants;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart;
import org.eclipse.gmf.runtime.emf.type.core.IElementType;
import com.github.icelyframework.activitystorming.diagram.edit.parts.Aggregate2EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.AggregateEditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.CommandAction2EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.CommandActionEditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.ConstraintPin2EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.ConstraintPin3EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.ConstraintPinEditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.ControlNode2EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.ControlNodeEditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.DecisionNode2EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.DecisionNodeEditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.DomainEvent2EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.DomainEventEditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.DomainObject2EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.DomainObject3EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.DomainObjectEditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.Entity2EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.Entity3EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.EntityEditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.ExternalSystem2EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.ExternalSystemEditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.FinalNode2EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.FinalNodeEditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.ForkNode2EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.ForkNodeEditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.InitialNode2EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.InitialNodeEditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.JoinNode2EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.JoinNodeEditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.MergeNode2EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.MergeNodeEditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.QueryAction2EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.QueryActionEditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.ReadModel2EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.ReadModelEditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.ValueObject2EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.ValueObject3EditPart;
import com.github.icelyframework.activitystorming.diagram.edit.parts.ValueObjectEditPart;
import com.github.icelyframework.activitystorming.diagram.providers.ActivitystormingElementTypes;
import com.github.icelyframework.activitystorming.diagram.providers.ActivitystormingModelingAssistantProvider;
/**
* @generated
*/
public class ActivitystormingModelingAssistantProviderOfEntity2EditPart
extends ActivitystormingModelingAssistantProvider {
/**
* @generated
*/
@Override
public List<IElementType> getRelTypesOnSource(IAdaptable source) {
IGraphicalEditPart sourceEditPart = (IGraphicalEditPart) source.getAdapter(IGraphicalEditPart.class);
return doGetRelTypesOnSource((Entity2EditPart) sourceEditPart);
}
/**
* @generated
*/
public List<IElementType> doGetRelTypesOnSource(Entity2EditPart source) {
List<IElementType> types = new ArrayList<IElementType>(6);
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
types.add(ActivitystormingElementTypes.ControlFlow_4003);
types.add(ActivitystormingElementTypes.ControlFlow_4004);
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
return types;
}
/**
* @generated
*/
@Override
public List<IElementType> getRelTypesOnSourceAndTarget(IAdaptable source, IAdaptable target) {
IGraphicalEditPart sourceEditPart = (IGraphicalEditPart) source.getAdapter(IGraphicalEditPart.class);
IGraphicalEditPart targetEditPart = (IGraphicalEditPart) target.getAdapter(IGraphicalEditPart.class);
return doGetRelTypesOnSourceAndTarget((Entity2EditPart) sourceEditPart, targetEditPart);
}
/**
* @generated
*/
public List<IElementType> doGetRelTypesOnSourceAndTarget(Entity2EditPart source,
IGraphicalEditPart targetEditPart) {
List<IElementType> types = new LinkedList<IElementType>();
if (targetEditPart instanceof CommandActionEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof AggregateEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof QueryActionEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof ValueObjectEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof EntityEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof ExternalSystemEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof ConstraintPinEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof InitialNodeEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof ForkNodeEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof JoinNodeEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof DecisionNodeEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof MergeNodeEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof FinalNodeEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof DomainObjectEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof ReadModelEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof DomainEventEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof ControlNodeEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof ConstraintPin2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof ValueObject2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof Entity2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof DomainObject2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof CommandAction2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof Aggregate2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof QueryAction2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof ValueObject3EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof Entity3EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof ExternalSystem2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof ConstraintPin3EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof InitialNode2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof ForkNode2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof JoinNode2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof DecisionNode2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof MergeNode2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof FinalNode2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof DomainObject3EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof ReadModel2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof DomainEvent2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof ControlNode2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
}
if (targetEditPart instanceof CommandActionEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof AggregateEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof QueryActionEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof ValueObjectEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof EntityEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof ExternalSystemEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof ConstraintPinEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof InitialNodeEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof ForkNodeEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof JoinNodeEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof DecisionNodeEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof MergeNodeEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof FinalNodeEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof DomainObjectEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof ReadModelEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof DomainEventEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof ControlNodeEditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof ConstraintPin2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof ValueObject2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof Entity2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof DomainObject2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof CommandAction2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof Aggregate2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof QueryAction2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof ValueObject3EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof Entity3EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof ExternalSystem2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof ConstraintPin3EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof InitialNode2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof ForkNode2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof JoinNode2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof DecisionNode2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof MergeNode2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof FinalNode2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof DomainObject3EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof ReadModel2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof DomainEvent2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof ControlNode2EditPart) {
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
}
if (targetEditPart instanceof CommandActionEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof AggregateEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof QueryActionEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof ValueObjectEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof EntityEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof ExternalSystemEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof ConstraintPinEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof InitialNodeEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof ForkNodeEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof JoinNodeEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof DecisionNodeEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof MergeNodeEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof FinalNodeEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof DomainObjectEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof ReadModelEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof DomainEventEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof ControlNodeEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof ConstraintPin2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof ValueObject2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof Entity2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof DomainObject2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof CommandAction2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof Aggregate2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof QueryAction2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof ValueObject3EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof Entity3EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof ExternalSystem2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof ConstraintPin3EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof InitialNode2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof ForkNode2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof JoinNode2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof DecisionNode2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof MergeNode2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof FinalNode2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof DomainObject3EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof ReadModel2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof DomainEvent2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof ControlNode2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4003);
}
if (targetEditPart instanceof CommandActionEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof AggregateEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof QueryActionEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof ValueObjectEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof EntityEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof ExternalSystemEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof ConstraintPinEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof InitialNodeEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof ForkNodeEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof JoinNodeEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof DecisionNodeEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof MergeNodeEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof FinalNodeEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof DomainObjectEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof ReadModelEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof DomainEventEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof ControlNodeEditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof ConstraintPin2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof ValueObject2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof Entity2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof DomainObject2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof CommandAction2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof Aggregate2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof QueryAction2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof ValueObject3EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof Entity3EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof ExternalSystem2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof ConstraintPin3EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof InitialNode2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof ForkNode2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof JoinNode2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof DecisionNode2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof MergeNode2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof FinalNode2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof DomainObject3EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof ReadModel2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof DomainEvent2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof ControlNode2EditPart) {
types.add(ActivitystormingElementTypes.ControlFlow_4004);
}
if (targetEditPart instanceof CommandActionEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof AggregateEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof QueryActionEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof ValueObjectEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof EntityEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof ExternalSystemEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof ConstraintPinEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof InitialNodeEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof ForkNodeEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof JoinNodeEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof DecisionNodeEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof MergeNodeEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof FinalNodeEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof DomainObjectEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof ReadModelEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof DomainEventEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof ControlNodeEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof ConstraintPin2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof ValueObject2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof Entity2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof DomainObject2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof CommandAction2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof Aggregate2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof QueryAction2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof ValueObject3EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof Entity3EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof ExternalSystem2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof ConstraintPin3EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof InitialNode2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof ForkNode2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof JoinNode2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof DecisionNode2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof MergeNode2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof FinalNode2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof DomainObject3EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof ReadModel2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof DomainEvent2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof ControlNode2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
}
if (targetEditPart instanceof CommandActionEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof AggregateEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof QueryActionEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof ValueObjectEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof EntityEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof ExternalSystemEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof ConstraintPinEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof InitialNodeEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof ForkNodeEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof JoinNodeEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof DecisionNodeEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof MergeNodeEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof FinalNodeEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof DomainObjectEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof ReadModelEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof DomainEventEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof ControlNodeEditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof ConstraintPin2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof ValueObject2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof Entity2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof DomainObject2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof CommandAction2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof Aggregate2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof QueryAction2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof ValueObject3EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof Entity3EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof ExternalSystem2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof ConstraintPin3EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof InitialNode2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof ForkNode2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof JoinNode2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof DecisionNode2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof MergeNode2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof FinalNode2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof DomainObject3EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof ReadModel2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof DomainEvent2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
if (targetEditPart instanceof ControlNode2EditPart) {
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
}
return types;
}
/**
* @generated
*/
@Override
public List<IElementType> getTypesForTarget(IAdaptable source, IElementType relationshipType) {
IGraphicalEditPart sourceEditPart = (IGraphicalEditPart) source.getAdapter(IGraphicalEditPart.class);
return doGetTypesForTarget((Entity2EditPart) sourceEditPart, relationshipType);
}
/**
* @generated
*/
public List<IElementType> doGetTypesForTarget(Entity2EditPart source, IElementType relationshipType) {
List<IElementType> types = new ArrayList<IElementType>();
if (relationshipType == ActivitystormingElementTypes.ObjectFlow_4001) {
types.add(ActivitystormingElementTypes.CommandAction_2001);
types.add(ActivitystormingElementTypes.Aggregate_2002);
types.add(ActivitystormingElementTypes.QueryAction_2003);
types.add(ActivitystormingElementTypes.ValueObject_2004);
types.add(ActivitystormingElementTypes.Entity_2005);
types.add(ActivitystormingElementTypes.ExternalSystem_2006);
types.add(ActivitystormingElementTypes.ConstraintPin_2007);
types.add(ActivitystormingElementTypes.InitialNode_2008);
types.add(ActivitystormingElementTypes.ForkNode_2009);
types.add(ActivitystormingElementTypes.JoinNode_2010);
types.add(ActivitystormingElementTypes.DecisionNode_2011);
types.add(ActivitystormingElementTypes.MergeNode_2012);
types.add(ActivitystormingElementTypes.FinalNode_2013);
types.add(ActivitystormingElementTypes.DomainObject_2014);
types.add(ActivitystormingElementTypes.ReadModel_2015);
types.add(ActivitystormingElementTypes.DomainEvent_2016);
types.add(ActivitystormingElementTypes.ControlNode_2017);
types.add(ActivitystormingElementTypes.ConstraintPin_3001);
types.add(ActivitystormingElementTypes.ValueObject_3002);
types.add(ActivitystormingElementTypes.Entity_3003);
types.add(ActivitystormingElementTypes.DomainObject_3004);
types.add(ActivitystormingElementTypes.CommandAction_3005);
types.add(ActivitystormingElementTypes.Aggregate_3006);
types.add(ActivitystormingElementTypes.QueryAction_3007);
types.add(ActivitystormingElementTypes.ValueObject_3008);
types.add(ActivitystormingElementTypes.Entity_3009);
types.add(ActivitystormingElementTypes.ExternalSystem_3010);
types.add(ActivitystormingElementTypes.ConstraintPin_3011);
types.add(ActivitystormingElementTypes.InitialNode_3012);
types.add(ActivitystormingElementTypes.ForkNode_3013);
types.add(ActivitystormingElementTypes.JoinNode_3014);
types.add(ActivitystormingElementTypes.DecisionNode_3015);
types.add(ActivitystormingElementTypes.MergeNode_3016);
types.add(ActivitystormingElementTypes.FinalNode_3017);
types.add(ActivitystormingElementTypes.DomainObject_3018);
types.add(ActivitystormingElementTypes.ReadModel_3019);
types.add(ActivitystormingElementTypes.DomainEvent_3020);
types.add(ActivitystormingElementTypes.ControlNode_3021);
} else if (relationshipType == ActivitystormingElementTypes.ObjectFlow_4002) {
types.add(ActivitystormingElementTypes.CommandAction_2001);
types.add(ActivitystormingElementTypes.Aggregate_2002);
types.add(ActivitystormingElementTypes.QueryAction_2003);
types.add(ActivitystormingElementTypes.ValueObject_2004);
types.add(ActivitystormingElementTypes.Entity_2005);
types.add(ActivitystormingElementTypes.ExternalSystem_2006);
types.add(ActivitystormingElementTypes.ConstraintPin_2007);
types.add(ActivitystormingElementTypes.InitialNode_2008);
types.add(ActivitystormingElementTypes.ForkNode_2009);
types.add(ActivitystormingElementTypes.JoinNode_2010);
types.add(ActivitystormingElementTypes.DecisionNode_2011);
types.add(ActivitystormingElementTypes.MergeNode_2012);
types.add(ActivitystormingElementTypes.FinalNode_2013);
types.add(ActivitystormingElementTypes.DomainObject_2014);
types.add(ActivitystormingElementTypes.ReadModel_2015);
types.add(ActivitystormingElementTypes.DomainEvent_2016);
types.add(ActivitystormingElementTypes.ControlNode_2017);
types.add(ActivitystormingElementTypes.ConstraintPin_3001);
types.add(ActivitystormingElementTypes.ValueObject_3002);
types.add(ActivitystormingElementTypes.Entity_3003);
types.add(ActivitystormingElementTypes.DomainObject_3004);
types.add(ActivitystormingElementTypes.CommandAction_3005);
types.add(ActivitystormingElementTypes.Aggregate_3006);
types.add(ActivitystormingElementTypes.QueryAction_3007);
types.add(ActivitystormingElementTypes.ValueObject_3008);
types.add(ActivitystormingElementTypes.Entity_3009);
types.add(ActivitystormingElementTypes.ExternalSystem_3010);
types.add(ActivitystormingElementTypes.ConstraintPin_3011);
types.add(ActivitystormingElementTypes.InitialNode_3012);
types.add(ActivitystormingElementTypes.ForkNode_3013);
types.add(ActivitystormingElementTypes.JoinNode_3014);
types.add(ActivitystormingElementTypes.DecisionNode_3015);
types.add(ActivitystormingElementTypes.MergeNode_3016);
types.add(ActivitystormingElementTypes.FinalNode_3017);
types.add(ActivitystormingElementTypes.DomainObject_3018);
types.add(ActivitystormingElementTypes.ReadModel_3019);
types.add(ActivitystormingElementTypes.DomainEvent_3020);
types.add(ActivitystormingElementTypes.ControlNode_3021);
} else if (relationshipType == ActivitystormingElementTypes.ControlFlow_4003) {
types.add(ActivitystormingElementTypes.CommandAction_2001);
types.add(ActivitystormingElementTypes.Aggregate_2002);
types.add(ActivitystormingElementTypes.QueryAction_2003);
types.add(ActivitystormingElementTypes.ValueObject_2004);
types.add(ActivitystormingElementTypes.Entity_2005);
types.add(ActivitystormingElementTypes.ExternalSystem_2006);
types.add(ActivitystormingElementTypes.ConstraintPin_2007);
types.add(ActivitystormingElementTypes.InitialNode_2008);
types.add(ActivitystormingElementTypes.ForkNode_2009);
types.add(ActivitystormingElementTypes.JoinNode_2010);
types.add(ActivitystormingElementTypes.DecisionNode_2011);
types.add(ActivitystormingElementTypes.MergeNode_2012);
types.add(ActivitystormingElementTypes.FinalNode_2013);
types.add(ActivitystormingElementTypes.DomainObject_2014);
types.add(ActivitystormingElementTypes.ReadModel_2015);
types.add(ActivitystormingElementTypes.DomainEvent_2016);
types.add(ActivitystormingElementTypes.ControlNode_2017);
types.add(ActivitystormingElementTypes.ConstraintPin_3001);
types.add(ActivitystormingElementTypes.ValueObject_3002);
types.add(ActivitystormingElementTypes.Entity_3003);
types.add(ActivitystormingElementTypes.DomainObject_3004);
types.add(ActivitystormingElementTypes.CommandAction_3005);
types.add(ActivitystormingElementTypes.Aggregate_3006);
types.add(ActivitystormingElementTypes.QueryAction_3007);
types.add(ActivitystormingElementTypes.ValueObject_3008);
types.add(ActivitystormingElementTypes.Entity_3009);
types.add(ActivitystormingElementTypes.ExternalSystem_3010);
types.add(ActivitystormingElementTypes.ConstraintPin_3011);
types.add(ActivitystormingElementTypes.InitialNode_3012);
types.add(ActivitystormingElementTypes.ForkNode_3013);
types.add(ActivitystormingElementTypes.JoinNode_3014);
types.add(ActivitystormingElementTypes.DecisionNode_3015);
types.add(ActivitystormingElementTypes.MergeNode_3016);
types.add(ActivitystormingElementTypes.FinalNode_3017);
types.add(ActivitystormingElementTypes.DomainObject_3018);
types.add(ActivitystormingElementTypes.ReadModel_3019);
types.add(ActivitystormingElementTypes.DomainEvent_3020);
types.add(ActivitystormingElementTypes.ControlNode_3021);
} else if (relationshipType == ActivitystormingElementTypes.ControlFlow_4004) {
types.add(ActivitystormingElementTypes.CommandAction_2001);
types.add(ActivitystormingElementTypes.Aggregate_2002);
types.add(ActivitystormingElementTypes.QueryAction_2003);
types.add(ActivitystormingElementTypes.ValueObject_2004);
types.add(ActivitystormingElementTypes.Entity_2005);
types.add(ActivitystormingElementTypes.ExternalSystem_2006);
types.add(ActivitystormingElementTypes.ConstraintPin_2007);
types.add(ActivitystormingElementTypes.InitialNode_2008);
types.add(ActivitystormingElementTypes.ForkNode_2009);
types.add(ActivitystormingElementTypes.JoinNode_2010);
types.add(ActivitystormingElementTypes.DecisionNode_2011);
types.add(ActivitystormingElementTypes.MergeNode_2012);
types.add(ActivitystormingElementTypes.FinalNode_2013);
types.add(ActivitystormingElementTypes.DomainObject_2014);
types.add(ActivitystormingElementTypes.ReadModel_2015);
types.add(ActivitystormingElementTypes.DomainEvent_2016);
types.add(ActivitystormingElementTypes.ControlNode_2017);
types.add(ActivitystormingElementTypes.ConstraintPin_3001);
types.add(ActivitystormingElementTypes.ValueObject_3002);
types.add(ActivitystormingElementTypes.Entity_3003);
types.add(ActivitystormingElementTypes.DomainObject_3004);
types.add(ActivitystormingElementTypes.CommandAction_3005);
types.add(ActivitystormingElementTypes.Aggregate_3006);
types.add(ActivitystormingElementTypes.QueryAction_3007);
types.add(ActivitystormingElementTypes.ValueObject_3008);
types.add(ActivitystormingElementTypes.Entity_3009);
types.add(ActivitystormingElementTypes.ExternalSystem_3010);
types.add(ActivitystormingElementTypes.ConstraintPin_3011);
types.add(ActivitystormingElementTypes.InitialNode_3012);
types.add(ActivitystormingElementTypes.ForkNode_3013);
types.add(ActivitystormingElementTypes.JoinNode_3014);
types.add(ActivitystormingElementTypes.DecisionNode_3015);
types.add(ActivitystormingElementTypes.MergeNode_3016);
types.add(ActivitystormingElementTypes.FinalNode_3017);
types.add(ActivitystormingElementTypes.DomainObject_3018);
types.add(ActivitystormingElementTypes.ReadModel_3019);
types.add(ActivitystormingElementTypes.DomainEvent_3020);
types.add(ActivitystormingElementTypes.ControlNode_3021);
} else if (relationshipType == ActivitystormingElementTypes.ActivityEdge_4005) {
types.add(ActivitystormingElementTypes.CommandAction_2001);
types.add(ActivitystormingElementTypes.Aggregate_2002);
types.add(ActivitystormingElementTypes.QueryAction_2003);
types.add(ActivitystormingElementTypes.ValueObject_2004);
types.add(ActivitystormingElementTypes.Entity_2005);
types.add(ActivitystormingElementTypes.ExternalSystem_2006);
types.add(ActivitystormingElementTypes.ConstraintPin_2007);
types.add(ActivitystormingElementTypes.InitialNode_2008);
types.add(ActivitystormingElementTypes.ForkNode_2009);
types.add(ActivitystormingElementTypes.JoinNode_2010);
types.add(ActivitystormingElementTypes.DecisionNode_2011);
types.add(ActivitystormingElementTypes.MergeNode_2012);
types.add(ActivitystormingElementTypes.FinalNode_2013);
types.add(ActivitystormingElementTypes.DomainObject_2014);
types.add(ActivitystormingElementTypes.ReadModel_2015);
types.add(ActivitystormingElementTypes.DomainEvent_2016);
types.add(ActivitystormingElementTypes.ControlNode_2017);
types.add(ActivitystormingElementTypes.ConstraintPin_3001);
types.add(ActivitystormingElementTypes.ValueObject_3002);
types.add(ActivitystormingElementTypes.Entity_3003);
types.add(ActivitystormingElementTypes.DomainObject_3004);
types.add(ActivitystormingElementTypes.CommandAction_3005);
types.add(ActivitystormingElementTypes.Aggregate_3006);
types.add(ActivitystormingElementTypes.QueryAction_3007);
types.add(ActivitystormingElementTypes.ValueObject_3008);
types.add(ActivitystormingElementTypes.Entity_3009);
types.add(ActivitystormingElementTypes.ExternalSystem_3010);
types.add(ActivitystormingElementTypes.ConstraintPin_3011);
types.add(ActivitystormingElementTypes.InitialNode_3012);
types.add(ActivitystormingElementTypes.ForkNode_3013);
types.add(ActivitystormingElementTypes.JoinNode_3014);
types.add(ActivitystormingElementTypes.DecisionNode_3015);
types.add(ActivitystormingElementTypes.MergeNode_3016);
types.add(ActivitystormingElementTypes.FinalNode_3017);
types.add(ActivitystormingElementTypes.DomainObject_3018);
types.add(ActivitystormingElementTypes.ReadModel_3019);
types.add(ActivitystormingElementTypes.DomainEvent_3020);
types.add(ActivitystormingElementTypes.ControlNode_3021);
} else if (relationshipType == ActivitystormingElementTypes.ActivityEdge_4006) {
types.add(ActivitystormingElementTypes.CommandAction_2001);
types.add(ActivitystormingElementTypes.Aggregate_2002);
types.add(ActivitystormingElementTypes.QueryAction_2003);
types.add(ActivitystormingElementTypes.ValueObject_2004);
types.add(ActivitystormingElementTypes.Entity_2005);
types.add(ActivitystormingElementTypes.ExternalSystem_2006);
types.add(ActivitystormingElementTypes.ConstraintPin_2007);
types.add(ActivitystormingElementTypes.InitialNode_2008);
types.add(ActivitystormingElementTypes.ForkNode_2009);
types.add(ActivitystormingElementTypes.JoinNode_2010);
types.add(ActivitystormingElementTypes.DecisionNode_2011);
types.add(ActivitystormingElementTypes.MergeNode_2012);
types.add(ActivitystormingElementTypes.FinalNode_2013);
types.add(ActivitystormingElementTypes.DomainObject_2014);
types.add(ActivitystormingElementTypes.ReadModel_2015);
types.add(ActivitystormingElementTypes.DomainEvent_2016);
types.add(ActivitystormingElementTypes.ControlNode_2017);
types.add(ActivitystormingElementTypes.ConstraintPin_3001);
types.add(ActivitystormingElementTypes.ValueObject_3002);
types.add(ActivitystormingElementTypes.Entity_3003);
types.add(ActivitystormingElementTypes.DomainObject_3004);
types.add(ActivitystormingElementTypes.CommandAction_3005);
types.add(ActivitystormingElementTypes.Aggregate_3006);
types.add(ActivitystormingElementTypes.QueryAction_3007);
types.add(ActivitystormingElementTypes.ValueObject_3008);
types.add(ActivitystormingElementTypes.Entity_3009);
types.add(ActivitystormingElementTypes.ExternalSystem_3010);
types.add(ActivitystormingElementTypes.ConstraintPin_3011);
types.add(ActivitystormingElementTypes.InitialNode_3012);
types.add(ActivitystormingElementTypes.ForkNode_3013);
types.add(ActivitystormingElementTypes.JoinNode_3014);
types.add(ActivitystormingElementTypes.DecisionNode_3015);
types.add(ActivitystormingElementTypes.MergeNode_3016);
types.add(ActivitystormingElementTypes.FinalNode_3017);
types.add(ActivitystormingElementTypes.DomainObject_3018);
types.add(ActivitystormingElementTypes.ReadModel_3019);
types.add(ActivitystormingElementTypes.DomainEvent_3020);
types.add(ActivitystormingElementTypes.ControlNode_3021);
}
return types;
}
/**
* @generated
*/
@Override
public List<IElementType> getRelTypesOnTarget(IAdaptable target) {
IGraphicalEditPart targetEditPart = (IGraphicalEditPart) target.getAdapter(IGraphicalEditPart.class);
return doGetRelTypesOnTarget((Entity2EditPart) targetEditPart);
}
/**
* @generated
*/
public List<IElementType> doGetRelTypesOnTarget(Entity2EditPart target) {
List<IElementType> types = new ArrayList<IElementType>(6);
types.add(ActivitystormingElementTypes.ObjectFlow_4001);
types.add(ActivitystormingElementTypes.ObjectFlow_4002);
types.add(ActivitystormingElementTypes.ControlFlow_4003);
types.add(ActivitystormingElementTypes.ControlFlow_4004);
types.add(ActivitystormingElementTypes.ActivityEdge_4005);
types.add(ActivitystormingElementTypes.ActivityEdge_4006);
return types;
}
/**
* @generated
*/
@Override
public List<IElementType> getTypesForSource(IAdaptable target, IElementType relationshipType) {
IGraphicalEditPart targetEditPart = (IGraphicalEditPart) target.getAdapter(IGraphicalEditPart.class);
return doGetTypesForSource((Entity2EditPart) targetEditPart, relationshipType);
}
/**
* @generated
*/
public List<IElementType> doGetTypesForSource(Entity2EditPart target, IElementType relationshipType) {
List<IElementType> types = new ArrayList<IElementType>();
if (relationshipType == ActivitystormingElementTypes.ObjectFlow_4001) {
types.add(ActivitystormingElementTypes.CommandAction_2001);
types.add(ActivitystormingElementTypes.Aggregate_2002);
types.add(ActivitystormingElementTypes.QueryAction_2003);
types.add(ActivitystormingElementTypes.ValueObject_2004);
types.add(ActivitystormingElementTypes.Entity_2005);
types.add(ActivitystormingElementTypes.ExternalSystem_2006);
types.add(ActivitystormingElementTypes.ConstraintPin_2007);
types.add(ActivitystormingElementTypes.InitialNode_2008);
types.add(ActivitystormingElementTypes.ForkNode_2009);
types.add(ActivitystormingElementTypes.JoinNode_2010);
types.add(ActivitystormingElementTypes.DecisionNode_2011);
types.add(ActivitystormingElementTypes.MergeNode_2012);
types.add(ActivitystormingElementTypes.FinalNode_2013);
types.add(ActivitystormingElementTypes.DomainObject_2014);
types.add(ActivitystormingElementTypes.ReadModel_2015);
types.add(ActivitystormingElementTypes.DomainEvent_2016);
types.add(ActivitystormingElementTypes.ControlNode_2017);
types.add(ActivitystormingElementTypes.ConstraintPin_3001);
types.add(ActivitystormingElementTypes.ValueObject_3002);
types.add(ActivitystormingElementTypes.Entity_3003);
types.add(ActivitystormingElementTypes.DomainObject_3004);
types.add(ActivitystormingElementTypes.CommandAction_3005);
types.add(ActivitystormingElementTypes.Aggregate_3006);
types.add(ActivitystormingElementTypes.QueryAction_3007);
types.add(ActivitystormingElementTypes.ValueObject_3008);
types.add(ActivitystormingElementTypes.Entity_3009);
types.add(ActivitystormingElementTypes.ExternalSystem_3010);
types.add(ActivitystormingElementTypes.ConstraintPin_3011);
types.add(ActivitystormingElementTypes.InitialNode_3012);
types.add(ActivitystormingElementTypes.ForkNode_3013);
types.add(ActivitystormingElementTypes.JoinNode_3014);
types.add(ActivitystormingElementTypes.DecisionNode_3015);
types.add(ActivitystormingElementTypes.MergeNode_3016);
types.add(ActivitystormingElementTypes.FinalNode_3017);
types.add(ActivitystormingElementTypes.DomainObject_3018);
types.add(ActivitystormingElementTypes.ReadModel_3019);
types.add(ActivitystormingElementTypes.DomainEvent_3020);
types.add(ActivitystormingElementTypes.ControlNode_3021);
} else if (relationshipType == ActivitystormingElementTypes.ObjectFlow_4002) {
types.add(ActivitystormingElementTypes.CommandAction_2001);
types.add(ActivitystormingElementTypes.Aggregate_2002);
types.add(ActivitystormingElementTypes.QueryAction_2003);
types.add(ActivitystormingElementTypes.ValueObject_2004);
types.add(ActivitystormingElementTypes.Entity_2005);
types.add(ActivitystormingElementTypes.ExternalSystem_2006);
types.add(ActivitystormingElementTypes.ConstraintPin_2007);
types.add(ActivitystormingElementTypes.InitialNode_2008);
types.add(ActivitystormingElementTypes.ForkNode_2009);
types.add(ActivitystormingElementTypes.JoinNode_2010);
types.add(ActivitystormingElementTypes.DecisionNode_2011);
types.add(ActivitystormingElementTypes.MergeNode_2012);
types.add(ActivitystormingElementTypes.FinalNode_2013);
types.add(ActivitystormingElementTypes.DomainObject_2014);
types.add(ActivitystormingElementTypes.ReadModel_2015);
types.add(ActivitystormingElementTypes.DomainEvent_2016);
types.add(ActivitystormingElementTypes.ControlNode_2017);
types.add(ActivitystormingElementTypes.ConstraintPin_3001);
types.add(ActivitystormingElementTypes.ValueObject_3002);
types.add(ActivitystormingElementTypes.Entity_3003);
types.add(ActivitystormingElementTypes.DomainObject_3004);
types.add(ActivitystormingElementTypes.CommandAction_3005);
types.add(ActivitystormingElementTypes.Aggregate_3006);
types.add(ActivitystormingElementTypes.QueryAction_3007);
types.add(ActivitystormingElementTypes.ValueObject_3008);
types.add(ActivitystormingElementTypes.Entity_3009);
types.add(ActivitystormingElementTypes.ExternalSystem_3010);
types.add(ActivitystormingElementTypes.ConstraintPin_3011);
types.add(ActivitystormingElementTypes.InitialNode_3012);
types.add(ActivitystormingElementTypes.ForkNode_3013);
types.add(ActivitystormingElementTypes.JoinNode_3014);
types.add(ActivitystormingElementTypes.DecisionNode_3015);
types.add(ActivitystormingElementTypes.MergeNode_3016);
types.add(ActivitystormingElementTypes.FinalNode_3017);
types.add(ActivitystormingElementTypes.DomainObject_3018);
types.add(ActivitystormingElementTypes.ReadModel_3019);
types.add(ActivitystormingElementTypes.DomainEvent_3020);
types.add(ActivitystormingElementTypes.ControlNode_3021);
} else if (relationshipType == ActivitystormingElementTypes.ControlFlow_4003) {
types.add(ActivitystormingElementTypes.CommandAction_2001);
types.add(ActivitystormingElementTypes.Aggregate_2002);
types.add(ActivitystormingElementTypes.QueryAction_2003);
types.add(ActivitystormingElementTypes.ValueObject_2004);
types.add(ActivitystormingElementTypes.Entity_2005);
types.add(ActivitystormingElementTypes.ExternalSystem_2006);
types.add(ActivitystormingElementTypes.ConstraintPin_2007);
types.add(ActivitystormingElementTypes.InitialNode_2008);
types.add(ActivitystormingElementTypes.ForkNode_2009);
types.add(ActivitystormingElementTypes.JoinNode_2010);
types.add(ActivitystormingElementTypes.DecisionNode_2011);
types.add(ActivitystormingElementTypes.MergeNode_2012);
types.add(ActivitystormingElementTypes.FinalNode_2013);
types.add(ActivitystormingElementTypes.DomainObject_2014);
types.add(ActivitystormingElementTypes.ReadModel_2015);
types.add(ActivitystormingElementTypes.DomainEvent_2016);
types.add(ActivitystormingElementTypes.ControlNode_2017);
types.add(ActivitystormingElementTypes.ConstraintPin_3001);
types.add(ActivitystormingElementTypes.ValueObject_3002);
types.add(ActivitystormingElementTypes.Entity_3003);
types.add(ActivitystormingElementTypes.DomainObject_3004);
types.add(ActivitystormingElementTypes.CommandAction_3005);
types.add(ActivitystormingElementTypes.Aggregate_3006);
types.add(ActivitystormingElementTypes.QueryAction_3007);
types.add(ActivitystormingElementTypes.ValueObject_3008);
types.add(ActivitystormingElementTypes.Entity_3009);
types.add(ActivitystormingElementTypes.ExternalSystem_3010);
types.add(ActivitystormingElementTypes.ConstraintPin_3011);
types.add(ActivitystormingElementTypes.InitialNode_3012);
types.add(ActivitystormingElementTypes.ForkNode_3013);
types.add(ActivitystormingElementTypes.JoinNode_3014);
types.add(ActivitystormingElementTypes.DecisionNode_3015);
types.add(ActivitystormingElementTypes.MergeNode_3016);
types.add(ActivitystormingElementTypes.FinalNode_3017);
types.add(ActivitystormingElementTypes.DomainObject_3018);
types.add(ActivitystormingElementTypes.ReadModel_3019);
types.add(ActivitystormingElementTypes.DomainEvent_3020);
types.add(ActivitystormingElementTypes.ControlNode_3021);
} else if (relationshipType == ActivitystormingElementTypes.ControlFlow_4004) {
types.add(ActivitystormingElementTypes.CommandAction_2001);
types.add(ActivitystormingElementTypes.Aggregate_2002);
types.add(ActivitystormingElementTypes.QueryAction_2003);
types.add(ActivitystormingElementTypes.ValueObject_2004);
types.add(ActivitystormingElementTypes.Entity_2005);
types.add(ActivitystormingElementTypes.ExternalSystem_2006);
types.add(ActivitystormingElementTypes.ConstraintPin_2007);
types.add(ActivitystormingElementTypes.InitialNode_2008);
types.add(ActivitystormingElementTypes.ForkNode_2009);
types.add(ActivitystormingElementTypes.JoinNode_2010);
types.add(ActivitystormingElementTypes.DecisionNode_2011);
types.add(ActivitystormingElementTypes.MergeNode_2012);
types.add(ActivitystormingElementTypes.FinalNode_2013);
types.add(ActivitystormingElementTypes.DomainObject_2014);
types.add(ActivitystormingElementTypes.ReadModel_2015);
types.add(ActivitystormingElementTypes.DomainEvent_2016);
types.add(ActivitystormingElementTypes.ControlNode_2017);
types.add(ActivitystormingElementTypes.ConstraintPin_3001);
types.add(ActivitystormingElementTypes.ValueObject_3002);
types.add(ActivitystormingElementTypes.Entity_3003);
types.add(ActivitystormingElementTypes.DomainObject_3004);
types.add(ActivitystormingElementTypes.CommandAction_3005);
types.add(ActivitystormingElementTypes.Aggregate_3006);
types.add(ActivitystormingElementTypes.QueryAction_3007);
types.add(ActivitystormingElementTypes.ValueObject_3008);
types.add(ActivitystormingElementTypes.Entity_3009);
types.add(ActivitystormingElementTypes.ExternalSystem_3010);
types.add(ActivitystormingElementTypes.ConstraintPin_3011);
types.add(ActivitystormingElementTypes.InitialNode_3012);
types.add(ActivitystormingElementTypes.ForkNode_3013);
types.add(ActivitystormingElementTypes.JoinNode_3014);
types.add(ActivitystormingElementTypes.DecisionNode_3015);
types.add(ActivitystormingElementTypes.MergeNode_3016);
types.add(ActivitystormingElementTypes.FinalNode_3017);
types.add(ActivitystormingElementTypes.DomainObject_3018);
types.add(ActivitystormingElementTypes.ReadModel_3019);
types.add(ActivitystormingElementTypes.DomainEvent_3020);
types.add(ActivitystormingElementTypes.ControlNode_3021);
} else if (relationshipType == ActivitystormingElementTypes.ActivityEdge_4005) {
types.add(ActivitystormingElementTypes.CommandAction_2001);
types.add(ActivitystormingElementTypes.Aggregate_2002);
types.add(ActivitystormingElementTypes.QueryAction_2003);
types.add(ActivitystormingElementTypes.ValueObject_2004);
types.add(ActivitystormingElementTypes.Entity_2005);
types.add(ActivitystormingElementTypes.ExternalSystem_2006);
types.add(ActivitystormingElementTypes.ConstraintPin_2007);
types.add(ActivitystormingElementTypes.InitialNode_2008);
types.add(ActivitystormingElementTypes.ForkNode_2009);
types.add(ActivitystormingElementTypes.JoinNode_2010);
types.add(ActivitystormingElementTypes.DecisionNode_2011);
types.add(ActivitystormingElementTypes.MergeNode_2012);
types.add(ActivitystormingElementTypes.FinalNode_2013);
types.add(ActivitystormingElementTypes.DomainObject_2014);
types.add(ActivitystormingElementTypes.ReadModel_2015);
types.add(ActivitystormingElementTypes.DomainEvent_2016);
types.add(ActivitystormingElementTypes.ControlNode_2017);
types.add(ActivitystormingElementTypes.ConstraintPin_3001);
types.add(ActivitystormingElementTypes.ValueObject_3002);
types.add(ActivitystormingElementTypes.Entity_3003);
types.add(ActivitystormingElementTypes.DomainObject_3004);
types.add(ActivitystormingElementTypes.CommandAction_3005);
types.add(ActivitystormingElementTypes.Aggregate_3006);
types.add(ActivitystormingElementTypes.QueryAction_3007);
types.add(ActivitystormingElementTypes.ValueObject_3008);
types.add(ActivitystormingElementTypes.Entity_3009);
types.add(ActivitystormingElementTypes.ExternalSystem_3010);
types.add(ActivitystormingElementTypes.ConstraintPin_3011);
types.add(ActivitystormingElementTypes.InitialNode_3012);
types.add(ActivitystormingElementTypes.ForkNode_3013);
types.add(ActivitystormingElementTypes.JoinNode_3014);
types.add(ActivitystormingElementTypes.DecisionNode_3015);
types.add(ActivitystormingElementTypes.MergeNode_3016);
types.add(ActivitystormingElementTypes.FinalNode_3017);
types.add(ActivitystormingElementTypes.DomainObject_3018);
types.add(ActivitystormingElementTypes.ReadModel_3019);
types.add(ActivitystormingElementTypes.DomainEvent_3020);
types.add(ActivitystormingElementTypes.ControlNode_3021);
} else if (relationshipType == ActivitystormingElementTypes.ActivityEdge_4006) {
types.add(ActivitystormingElementTypes.CommandAction_2001);
types.add(ActivitystormingElementTypes.Aggregate_2002);
types.add(ActivitystormingElementTypes.QueryAction_2003);
types.add(ActivitystormingElementTypes.ValueObject_2004);
types.add(ActivitystormingElementTypes.Entity_2005);
types.add(ActivitystormingElementTypes.ExternalSystem_2006);
types.add(ActivitystormingElementTypes.ConstraintPin_2007);
types.add(ActivitystormingElementTypes.InitialNode_2008);
types.add(ActivitystormingElementTypes.ForkNode_2009);
types.add(ActivitystormingElementTypes.JoinNode_2010);
types.add(ActivitystormingElementTypes.DecisionNode_2011);
types.add(ActivitystormingElementTypes.MergeNode_2012);
types.add(ActivitystormingElementTypes.FinalNode_2013);
types.add(ActivitystormingElementTypes.DomainObject_2014);
types.add(ActivitystormingElementTypes.ReadModel_2015);
types.add(ActivitystormingElementTypes.DomainEvent_2016);
types.add(ActivitystormingElementTypes.ControlNode_2017);
types.add(ActivitystormingElementTypes.ConstraintPin_3001);
types.add(ActivitystormingElementTypes.ValueObject_3002);
types.add(ActivitystormingElementTypes.Entity_3003);
types.add(ActivitystormingElementTypes.DomainObject_3004);
types.add(ActivitystormingElementTypes.CommandAction_3005);
types.add(ActivitystormingElementTypes.Aggregate_3006);
types.add(ActivitystormingElementTypes.QueryAction_3007);
types.add(ActivitystormingElementTypes.ValueObject_3008);
types.add(ActivitystormingElementTypes.Entity_3009);
types.add(ActivitystormingElementTypes.ExternalSystem_3010);
types.add(ActivitystormingElementTypes.ConstraintPin_3011);
types.add(ActivitystormingElementTypes.InitialNode_3012);
types.add(ActivitystormingElementTypes.ForkNode_3013);
types.add(ActivitystormingElementTypes.JoinNode_3014);
types.add(ActivitystormingElementTypes.DecisionNode_3015);
types.add(ActivitystormingElementTypes.MergeNode_3016);
types.add(ActivitystormingElementTypes.FinalNode_3017);
types.add(ActivitystormingElementTypes.DomainObject_3018);
types.add(ActivitystormingElementTypes.ReadModel_3019);
types.add(ActivitystormingElementTypes.DomainEvent_3020);
types.add(ActivitystormingElementTypes.ControlNode_3021);
}
return types;
}
}
|
package com.github.jiangxch.courselearningmanagement.providerapi.result;
import lombok.Data;
import java.io.Serializable;
/**
* @author: sanjin
* @date: 2020/2/27 下午12:45
*/
@Data
public class UserInfoResult implements Serializable {
private String id;
private String nickname;
private String profile;
private String username;
private Integer roleType;
private Long createTime;
private Long updateTime;
}
|
/*
* Copyright (c) 1996, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* Portions Copyright (c) 1995 Colin Plumb. All rights reserved.
*/
package java.math;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.ObjectStreamField;
import java.util.Arrays;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;
import sun.misc.DoubleConsts;
import sun.misc.FloatConsts;
/**
* Immutable arbitrary-precision integers. All operations behave as if
* BigIntegers were represented in two's-complement notation (like Java's
* primitive integer types). BigInteger provides analogues to all of Java's
* primitive integer operators, and all relevant methods from java.lang.Math.
* Additionally, BigInteger provides operations for modular arithmetic, GCD
* calculation, primality testing, prime generation, bit manipulation,
* and a few other miscellaneous operations.
*
* <p>Semantics of arithmetic operations exactly mimic those of Java's integer
* arithmetic operators, as defined in <i>The Java Language Specification</i>.
* For example, division by zero throws an {@code ArithmeticException}, and
* division of a negative by a positive yields a negative (or zero) remainder.
* All of the details in the Spec concerning overflow are ignored, as
* BigIntegers are made as large as necessary to accommodate the results of an
* operation.
*
* <p>Semantics of shift operations extend those of Java's shift operators
* to allow for negative shift distances. A right-shift with a negative
* shift distance results in a left shift, and vice-versa. The unsigned
* right shift operator ({@code >>>}) is omitted, as this operation makes
* little sense in combination with the "infinite word size" abstraction
* provided by this class.
*
* <p>Semantics of bitwise logical operations exactly mimic those of Java's
* bitwise integer operators. The binary operators ({@code and},
* {@code or}, {@code xor}) implicitly perform sign extension on the shorter
* of the two operands prior to performing the operation.
*
* <p>Comparison operations perform signed integer comparisons, analogous to
* those performed by Java's relational and equality operators.
*
* <p>Modular arithmetic operations are provided to compute residues, perform
* exponentiation, and compute multiplicative inverses. These methods always
* return a non-negative result, between {@code 0} and {@code (modulus - 1)},
* inclusive.
*
* <p>Bit operations operate on a single bit of the two's-complement
* representation of their operand. If necessary, the operand is sign-
* extended so that it contains the designated bit. None of the single-bit
* operations can produce a BigInteger with a different sign from the
* BigInteger being operated on, as they affect only a single bit, and the
* "infinite word size" abstraction provided by this class ensures that there
* are infinitely many "virtual sign bits" preceding each BigInteger.
*
* <p>For the sake of brevity and clarity, pseudo-code is used throughout the
* descriptions of BigInteger methods. The pseudo-code expression
* {@code (i + j)} is shorthand for "a BigInteger whose value is
* that of the BigInteger {@code i} plus that of the BigInteger {@code j}."
* The pseudo-code expression {@code (i == j)} is shorthand for
* "{@code true} if and only if the BigInteger {@code i} represents the same
* value as the BigInteger {@code j}." Other pseudo-code expressions are
* interpreted similarly.
*
* <p>All methods and constructors in this class throw
* {@code NullPointerException} when passed
* a null object reference for any input parameter.
*
* BigInteger must support values in the range
* -2<sup>{@code Integer.MAX_VALUE}</sup> (exclusive) to
* +2<sup>{@code Integer.MAX_VALUE}</sup> (exclusive)
* and may support values outside of that range.
*
* The range of probable prime values is limited and may be less than
* the full supported positive range of {@code BigInteger}.
* The range must be at least 1 to 2<sup>500000000</sup>.
*
* @implNote
* BigInteger constructors and operations throw {@code ArithmeticException} when
* the result is out of the supported range of
* -2<sup>{@code Integer.MAX_VALUE}</sup> (exclusive) to
* +2<sup>{@code Integer.MAX_VALUE}</sup> (exclusive).
*
* @see BigDecimal
* @author Josh Bloch
* @author Michael McCloskey
* @author Alan Eliasen
* @author Timothy Buktu
* @since JDK1.1
*/
public class BigInteger extends Number implements Comparable<BigInteger> {
/**
* The signum of this BigInteger: -1 for negative, 0 for zero, or
* 1 for positive. Note that the BigInteger zero <i>must</i> have
* a signum of 0. This is necessary to ensures that there is exactly one
* representation for each BigInteger value.
*
* @serial
*/
final int signum;
/**
* The magnitude of this BigInteger, in <i>big-endian</i> order: the
* zeroth element of this array is the most-significant int of the
* magnitude. The magnitude must be "minimal" in that the most-significant
* int ({@code mag[0]}) must be non-zero. This is necessary to
* ensure that there is exactly one representation for each BigInteger
* value. Note that this implies that the BigInteger zero has a
* zero-length mag array.
*/
final int[] mag;
// These "redundant fields" are initialized with recognizable nonsense
// values, and cached the first time they are needed (or never, if they
// aren't needed).
/**
* One plus the bitCount of this BigInteger. Zeros means unitialized.
*
* @serial
* @see #bitCount
* @deprecated Deprecated since logical value is offset from stored
* value and correction factor is applied in accessor method.
*/
@Deprecated
private int bitCount;
/**
* One plus the bitLength of this BigInteger. Zeros means unitialized.
* (either value is acceptable).
*
* @serial
* @see #bitLength()
* @deprecated Deprecated since logical value is offset from stored
* value and correction factor is applied in accessor method.
*/
@Deprecated
private int bitLength;
/**
* Two plus the lowest set bit of this BigInteger, as returned by
* getLowestSetBit().
*
* @serial
* @see #getLowestSetBit
* @deprecated Deprecated since logical value is offset from stored
* value and correction factor is applied in accessor method.
*/
@Deprecated
private int lowestSetBit;
/**
* Two plus the index of the lowest-order int in the magnitude of this
* BigInteger that contains a nonzero int, or -2 (either value is acceptable).
* The least significant int has int-number 0, the next int in order of
* increasing significance has int-number 1, and so forth.
* @deprecated Deprecated since logical value is offset from stored
* value and correction factor is applied in accessor method.
*/
@Deprecated
private int firstNonzeroIntNum;
/**
* This mask is used to obtain the value of an int as if it were unsigned.
*/
final static long LONG_MASK = 0xffffffffL;
/**
* This constant limits {@code mag.length} of BigIntegers to the supported
* range.
*/
private static final int MAX_MAG_LENGTH = Integer.MAX_VALUE / Integer.SIZE + 1; // (1 << 26)
/**
* Bit lengths larger than this constant can cause overflow in searchLen
* calculation and in BitSieve.singleSearch method.
*/
private static final int PRIME_SEARCH_BIT_LENGTH_LIMIT = 500000000;
/**
* The threshold value for using Karatsuba multiplication. If the number
* of ints in both mag arrays are greater than this number, then
* Karatsuba multiplication will be used. This value is found
* experimentally to work well.
*/
private static final int KARATSUBA_THRESHOLD = 80;
/**
* The threshold value for using 3-way Toom-Cook multiplication.
* If the number of ints in each mag array is greater than the
* Karatsuba threshold, and the number of ints in at least one of
* the mag arrays is greater than this threshold, then Toom-Cook
* multiplication will be used.
*/
private static final int TOOM_COOK_THRESHOLD = 240;
/**
* The threshold value for using Karatsuba squaring. If the number
* of ints in the number are larger than this value,
* Karatsuba squaring will be used. This value is found
* experimentally to work well.
*/
private static final int KARATSUBA_SQUARE_THRESHOLD = 128;
/**
* The threshold value for using Toom-Cook squaring. If the number
* of ints in the number are larger than this value,
* Toom-Cook squaring will be used. This value is found
* experimentally to work well.
*/
private static final int TOOM_COOK_SQUARE_THRESHOLD = 216;
/**
* The threshold value for using Burnikel-Ziegler division. If the number
* of ints in the divisor are larger than this value, Burnikel-Ziegler
* division may be used. This value is found experimentally to work well.
*/
static final int BURNIKEL_ZIEGLER_THRESHOLD = 80;
/**
* The offset value for using Burnikel-Ziegler division. If the number
* of ints in the divisor exceeds the Burnikel-Ziegler threshold, and the
* number of ints in the dividend is greater than the number of ints in the
* divisor plus this value, Burnikel-Ziegler division will be used. This
* value is found experimentally to work well.
*/
static final int BURNIKEL_ZIEGLER_OFFSET = 40;
/**
* The threshold value for using Schoenhage recursive base conversion. If
* the number of ints in the number are larger than this value,
* the Schoenhage algorithm will be used. In practice, it appears that the
* Schoenhage routine is faster for any threshold down to 2, and is
* relatively flat for thresholds between 2-25, so this choice may be
* varied within this range for very small effect.
*/
private static final int SCHOENHAGE_BASE_CONVERSION_THRESHOLD = 20;
//Constructors
/**
* Translates a byte array containing the two's-complement binary
* representation of a BigInteger into a BigInteger. The input array is
* assumed to be in <i>big-endian</i> byte-order: the most significant
* byte is in the zeroth element.
*
* @param val big-endian two's-complement binary representation of
* BigInteger.
* @throws NumberFormatException {@code val} is zero bytes long.
*/
public BigInteger(byte[] val) {
if (val.length == 0)
throw new NumberFormatException("Zero length BigInteger");
if (val[0] < 0) {
mag = makePositive(val);
signum = -1;
} else {
mag = stripLeadingZeroBytes(val);
signum = (mag.length == 0 ? 0 : 1);
}
if (mag.length >= MAX_MAG_LENGTH) {
checkRange();
}
}
/**
* This private constructor translates an int array containing the
* two's-complement binary representation of a BigInteger into a
* BigInteger. The input array is assumed to be in <i>big-endian</i>
* int-order: the most significant int is in the zeroth element.
*/
private BigInteger(int[] val) {
if (val.length == 0)
throw new NumberFormatException("Zero length BigInteger");
if (val[0] < 0) {
mag = makePositive(val);
signum = -1;
} else {
mag = trustedStripLeadingZeroInts(val);
signum = (mag.length == 0 ? 0 : 1);
}
if (mag.length >= MAX_MAG_LENGTH) {
checkRange();
}
}
/**
* Translates the sign-magnitude representation of a BigInteger into a
* BigInteger. The sign is represented as an integer signum value: -1 for
* negative, 0 for zero, or 1 for positive. The magnitude is a byte array
* in <i>big-endian</i> byte-order: the most significant byte is in the
* zeroth element. A zero-length magnitude array is permissible, and will
* result in a BigInteger value of 0, whether signum is -1, 0 or 1.
*
* @param signum signum of the number (-1 for negative, 0 for zero, 1
* for positive).
* @param magnitude big-endian binary representation of the magnitude of
* the number.
* @throws NumberFormatException {@code signum} is not one of the three
* legal values (-1, 0, and 1), or {@code signum} is 0 and
* {@code magnitude} contains one or more non-zero bytes.
*/
public BigInteger(int signum, byte[] magnitude) {
this.mag = stripLeadingZeroBytes(magnitude);
if (signum < -1 || signum > 1)
throw(new NumberFormatException("Invalid signum value"));
if (this.mag.length == 0) {
this.signum = 0;
} else {
if (signum == 0)
throw(new NumberFormatException("signum-magnitude mismatch"));
this.signum = signum;
}
if (mag.length >= MAX_MAG_LENGTH) {
checkRange();
}
}
/**
* A constructor for internal use that translates the sign-magnitude
* representation of a BigInteger into a BigInteger. It checks the
* arguments and copies the magnitude so this constructor would be
* safe for external use.
*/
private BigInteger(int signum, int[] magnitude) {
this.mag = stripLeadingZeroInts(magnitude);
if (signum < -1 || signum > 1)
throw(new NumberFormatException("Invalid signum value"));
if (this.mag.length == 0) {
this.signum = 0;
} else {
if (signum == 0)
throw(new NumberFormatException("signum-magnitude mismatch"));
this.signum = signum;
}
if (mag.length >= MAX_MAG_LENGTH) {
checkRange();
}
}
/**
* Translates the String representation of a BigInteger in the
* specified radix into a BigInteger. The String representation
* consists of an optional minus or plus sign followed by a
* sequence of one or more digits in the specified radix. The
* character-to-digit mapping is provided by {@code
* Character.digit}. The String may not contain any extraneous
* characters (whitespace, for example).
*
* @param val String representation of BigInteger.
* @param radix radix to be used in interpreting {@code val}.
* @throws NumberFormatException {@code val} is not a valid representation
* of a BigInteger in the specified radix, or {@code radix} is
* outside the range from {@link Character#MIN_RADIX} to
* {@link Character#MAX_RADIX}, inclusive.
* @see Character#digit
*/
public BigInteger(String val, int radix) {
int cursor = 0, numDigits;
final int len = val.length();
if (radix < Character.MIN_RADIX || radix > Character.MAX_RADIX)
throw new NumberFormatException("Radix out of range");
if (len == 0)
throw new NumberFormatException("Zero length BigInteger");
// Check for at most one leading sign
int sign = 1;
int index1 = val.lastIndexOf('-');
int index2 = val.lastIndexOf('+');
if (index1 >= 0) {
if (index1 != 0 || index2 >= 0) {
throw new NumberFormatException("Illegal embedded sign character");
}
sign = -1;
cursor = 1;
} else if (index2 >= 0) {
if (index2 != 0) {
throw new NumberFormatException("Illegal embedded sign character");
}
cursor = 1;
}
if (cursor == len)
throw new NumberFormatException("Zero length BigInteger");
// Skip leading zeros and compute number of digits in magnitude
while (cursor < len &&
Character.digit(val.charAt(cursor), radix) == 0) {
cursor++;
}
if (cursor == len) {
signum = 0;
mag = ZERO.mag;
return;
}
numDigits = len - cursor;
signum = sign;
// Pre-allocate array of expected size. May be too large but can
// never be too small. Typically exact.
long numBits = ((numDigits * bitsPerDigit[radix]) >>> 10) + 1;
if (numBits + 31 >= (1L << 32)) {
reportOverflow();
}
int numWords = (int) (numBits + 31) >>> 5;
int[] magnitude = new int[numWords];
// Process first (potentially short) digit group
int firstGroupLen = numDigits % digitsPerInt[radix];
if (firstGroupLen == 0)
firstGroupLen = digitsPerInt[radix];
String group = val.substring(cursor, cursor += firstGroupLen);
magnitude[numWords - 1] = Integer.parseInt(group, radix);
if (magnitude[numWords - 1] < 0)
throw new NumberFormatException("Illegal digit");
// Process remaining digit groups
int superRadix = intRadix[radix];
int groupVal = 0;
while (cursor < len) {
group = val.substring(cursor, cursor += digitsPerInt[radix]);
groupVal = Integer.parseInt(group, radix);
if (groupVal < 0)
throw new NumberFormatException("Illegal digit");
destructiveMulAdd(magnitude, superRadix, groupVal);
}
// Required for cases where the array was overallocated.
mag = trustedStripLeadingZeroInts(magnitude);
if (mag.length >= MAX_MAG_LENGTH) {
checkRange();
}
}
/*
* Constructs a new BigInteger using a char array with radix=10.
* Sign is precalculated outside and not allowed in the val.
*/
BigInteger(char[] val, int sign, int len) {
int cursor = 0, numDigits;
// Skip leading zeros and compute number of digits in magnitude
while (cursor < len && Character.digit(val[cursor], 10) == 0) {
cursor++;
}
if (cursor == len) {
signum = 0;
mag = ZERO.mag;
return;
}
numDigits = len - cursor;
signum = sign;
// Pre-allocate array of expected size
int numWords;
if (len < 10) {
numWords = 1;
} else {
long numBits = ((numDigits * bitsPerDigit[10]) >>> 10) + 1;
if (numBits + 31 >= (1L << 32)) {
reportOverflow();
}
numWords = (int) (numBits + 31) >>> 5;
}
int[] magnitude = new int[numWords];
// Process first (potentially short) digit group
int firstGroupLen = numDigits % digitsPerInt[10];
if (firstGroupLen == 0)
firstGroupLen = digitsPerInt[10];
magnitude[numWords - 1] = parseInt(val, cursor, cursor += firstGroupLen);
// Process remaining digit groups
while (cursor < len) {
int groupVal = parseInt(val, cursor, cursor += digitsPerInt[10]);
destructiveMulAdd(magnitude, intRadix[10], groupVal);
}
mag = trustedStripLeadingZeroInts(magnitude);
if (mag.length >= MAX_MAG_LENGTH) {
checkRange();
}
}
// Create an integer with the digits between the two indexes
// Assumes start < end. The result may be negative, but it
// is to be treated as an unsigned value.
private int parseInt(char[] source, int start, int end) {
int result = Character.digit(source[start++], 10);
if (result == -1)
throw new NumberFormatException(new String(source));
for (int index = start; index < end; index++) {
int nextVal = Character.digit(source[index], 10);
if (nextVal == -1)
throw new NumberFormatException(new String(source));
result = 10*result + nextVal;
}
return result;
}
// bitsPerDigit in the given radix times 1024
// Rounded up to avoid underallocation.
private static long bitsPerDigit[] = { 0, 0,
1024, 1624, 2048, 2378, 2648, 2875, 3072, 3247, 3402, 3543, 3672,
3790, 3899, 4001, 4096, 4186, 4271, 4350, 4426, 4498, 4567, 4633,
4696, 4756, 4814, 4870, 4923, 4975, 5025, 5074, 5120, 5166, 5210,
5253, 5295};
// Multiply x array times word y in place, and add word z
private static void destructiveMulAdd(int[] x, int y, int z) {
// Perform the multiplication word by word
long ylong = y & LONG_MASK;
long zlong = z & LONG_MASK;
int len = x.length;
long product = 0;
long carry = 0;
for (int i = len-1; i >= 0; i--) {
product = ylong * (x[i] & LONG_MASK) + carry;
x[i] = (int)product;
carry = product >>> 32;
}
// Perform the addition
long sum = (x[len-1] & LONG_MASK) + zlong;
x[len-1] = (int)sum;
carry = sum >>> 32;
for (int i = len-2; i >= 0; i--) {
sum = (x[i] & LONG_MASK) + carry;
x[i] = (int)sum;
carry = sum >>> 32;
}
}
/**
* Translates the decimal String representation of a BigInteger into a
* BigInteger. The String representation consists of an optional minus
* sign followed by a sequence of one or more decimal digits. The
* character-to-digit mapping is provided by {@code Character.digit}.
* The String may not contain any extraneous characters (whitespace, for
* example).
*
* @param val decimal String representation of BigInteger.
* @throws NumberFormatException {@code val} is not a valid representation
* of a BigInteger.
* @see Character#digit
*/
public BigInteger(String val) {
this(val, 10);
}
/**
* Constructs a randomly generated BigInteger, uniformly distributed over
* the range 0 to (2<sup>{@code numBits}</sup> - 1), inclusive.
* The uniformity of the distribution assumes that a fair source of random
* bits is provided in {@code rnd}. Note that this constructor always
* constructs a non-negative BigInteger.
*
* @param numBits maximum bitLength of the new BigInteger.
* @param rnd source of randomness to be used in computing the new
* BigInteger.
* @throws IllegalArgumentException {@code numBits} is negative.
* @see #bitLength()
*/
public BigInteger(int numBits, Random rnd) {
this(1, randomBits(numBits, rnd));
}
private static byte[] randomBits(int numBits, Random rnd) {
if (numBits < 0)
throw new IllegalArgumentException("numBits must be non-negative");
int numBytes = (int)(((long)numBits+7)/8); // avoid overflow
byte[] randomBits = new byte[numBytes];
// Generate random bytes and mask out any excess bits
if (numBytes > 0) {
rnd.nextBytes(randomBits);
int excessBits = 8*numBytes - numBits;
randomBits[0] &= (1 << (8-excessBits)) - 1;
}
return randomBits;
}
/**
* Constructs a randomly generated positive BigInteger that is probably
* prime, with the specified bitLength.
*
* <p>It is recommended that the {@link #probablePrime probablePrime}
* method be used in preference to this constructor unless there
* is a compelling need to specify a certainty.
*
* @param bitLength bitLength of the returned BigInteger.
* @param certainty a measure of the uncertainty that the caller is
* willing to tolerate. The probability that the new BigInteger
* represents a prime number will exceed
* (1 - 1/2<sup>{@code certainty}</sup>). The execution time of
* this constructor is proportional to the value of this parameter.
* @param rnd source of random bits used to select candidates to be
* tested for primality.
* @throws ArithmeticException {@code bitLength < 2} or {@code bitLength} is too large.
* @see #bitLength()
*/
public BigInteger(int bitLength, int certainty, Random rnd) {
BigInteger prime;
if (bitLength < 2)
throw new ArithmeticException("bitLength < 2");
prime = (bitLength < SMALL_PRIME_THRESHOLD
? smallPrime(bitLength, certainty, rnd)
: largePrime(bitLength, certainty, rnd));
signum = 1;
mag = prime.mag;
}
// Minimum size in bits that the requested prime number has
// before we use the large prime number generating algorithms.
// The cutoff of 95 was chosen empirically for best performance.
private static final int SMALL_PRIME_THRESHOLD = 95;
// Certainty required to meet the spec of probablePrime
private static final int DEFAULT_PRIME_CERTAINTY = 100;
/**
* Returns a positive BigInteger that is probably prime, with the
* specified bitLength. The probability that a BigInteger returned
* by this method is composite does not exceed 2<sup>-100</sup>.
*
* @param bitLength bitLength of the returned BigInteger.
* @param rnd source of random bits used to select candidates to be
* tested for primality.
* @return a BigInteger of {@code bitLength} bits that is probably prime
* @throws ArithmeticException {@code bitLength < 2} or {@code bitLength} is too large.
* @see #bitLength()
* @since 1.4
*/
public static BigInteger probablePrime(int bitLength, Random rnd) {
if (bitLength < 2)
throw new ArithmeticException("bitLength < 2");
return (bitLength < SMALL_PRIME_THRESHOLD ?
smallPrime(bitLength, DEFAULT_PRIME_CERTAINTY, rnd) :
largePrime(bitLength, DEFAULT_PRIME_CERTAINTY, rnd));
}
/**
* Find a random number of the specified bitLength that is probably prime.
* This method is used for smaller primes, its performance degrades on
* larger bitlengths.
*
* This method assumes bitLength > 1.
*/
private static BigInteger smallPrime(int bitLength, int certainty, Random rnd) {
int magLen = (bitLength + 31) >>> 5;
int temp[] = new int[magLen];
int highBit = 1 << ((bitLength+31) & 0x1f); // High bit of high int
int highMask = (highBit << 1) - 1; // Bits to keep in high int
while (true) {
// Construct a candidate
for (int i=0; i < magLen; i++)
temp[i] = rnd.nextInt();
temp[0] = (temp[0] & highMask) | highBit; // Ensure exact length
if (bitLength > 2)
temp[magLen-1] |= 1; // Make odd if bitlen > 2
BigInteger p = new BigInteger(temp, 1);
// Do cheap "pre-test" if applicable
if (bitLength > 6) {
long r = p.remainder(SMALL_PRIME_PRODUCT).longValue();
if ((r%3==0) || (r%5==0) || (r%7==0) || (r%11==0) ||
(r%13==0) || (r%17==0) || (r%19==0) || (r%23==0) ||
(r%29==0) || (r%31==0) || (r%37==0) || (r%41==0))
continue; // Candidate is composite; try another
}
// All candidates of bitLength 2 and 3 are prime by this point
if (bitLength < 4)
return p;
// Do expensive test if we survive pre-test (or it's inapplicable)
if (p.primeToCertainty(certainty, rnd))
return p;
}
}
private static final BigInteger SMALL_PRIME_PRODUCT
= valueOf(3L*5*7*11*13*17*19*23*29*31*37*41);
/**
* Find a random number of the specified bitLength that is probably prime.
* This method is more appropriate for larger bitlengths since it uses
* a sieve to eliminate most composites before using a more expensive
* test.
*/
private static BigInteger largePrime(int bitLength, int certainty, Random rnd) {
BigInteger p;
p = new BigInteger(bitLength, rnd).setBit(bitLength-1);
p.mag[p.mag.length-1] &= 0xfffffffe;
// Use a sieve length likely to contain the next prime number
int searchLen = getPrimeSearchLen(bitLength);
BitSieve searchSieve = new BitSieve(p, searchLen);
BigInteger candidate = searchSieve.retrieve(p, certainty, rnd);
while ((candidate == null) || (candidate.bitLength() != bitLength)) {
p = p.add(BigInteger.valueOf(2*searchLen));
if (p.bitLength() != bitLength)
p = new BigInteger(bitLength, rnd).setBit(bitLength-1);
p.mag[p.mag.length-1] &= 0xfffffffe;
searchSieve = new BitSieve(p, searchLen);
candidate = searchSieve.retrieve(p, certainty, rnd);
}
return candidate;
}
/**
* Returns the first integer greater than this {@code BigInteger} that
* is probably prime. The probability that the number returned by this
* method is composite does not exceed 2<sup>-100</sup>. This method will
* never skip over a prime when searching: if it returns {@code p}, there
* is no prime {@code q} such that {@code this < q < p}.
*
* @return the first integer greater than this {@code BigInteger} that
* is probably prime.
* @throws ArithmeticException {@code this < 0} or {@code this} is too large.
* @since 1.5
*/
public BigInteger nextProbablePrime() {
if (this.signum < 0)
throw new ArithmeticException("start < 0: " + this);
// Handle trivial cases
if ((this.signum == 0) || this.equals(ONE))
return TWO;
BigInteger result = this.add(ONE);
// Fastpath for small numbers
if (result.bitLength() < SMALL_PRIME_THRESHOLD) {
// Ensure an odd number
if (!result.testBit(0))
result = result.add(ONE);
while (true) {
// Do cheap "pre-test" if applicable
if (result.bitLength() > 6) {
long r = result.remainder(SMALL_PRIME_PRODUCT).longValue();
if ((r%3==0) || (r%5==0) || (r%7==0) || (r%11==0) ||
(r%13==0) || (r%17==0) || (r%19==0) || (r%23==0) ||
(r%29==0) || (r%31==0) || (r%37==0) || (r%41==0)) {
result = result.add(TWO);
continue; // Candidate is composite; try another
}
}
// All candidates of bitLength 2 and 3 are prime by this point
if (result.bitLength() < 4)
return result;
// The expensive test
if (result.primeToCertainty(DEFAULT_PRIME_CERTAINTY, null))
return result;
result = result.add(TWO);
}
}
// Start at previous even number
if (result.testBit(0))
result = result.subtract(ONE);
// Looking for the next large prime
int searchLen = getPrimeSearchLen(result.bitLength());
while (true) {
BitSieve searchSieve = new BitSieve(result, searchLen);
BigInteger candidate = searchSieve.retrieve(result,
DEFAULT_PRIME_CERTAINTY, null);
if (candidate != null)
return candidate;
result = result.add(BigInteger.valueOf(2 * searchLen));
}
}
private static int getPrimeSearchLen(int bitLength) {
if (bitLength > PRIME_SEARCH_BIT_LENGTH_LIMIT + 1) {
throw new ArithmeticException("Prime search implementation restriction on bitLength");
}
return bitLength / 20 * 64;
}
/**
* Returns {@code true} if this BigInteger is probably prime,
* {@code false} if it's definitely composite.
*
* This method assumes bitLength > 2.
*
* @param certainty a measure of the uncertainty that the caller is
* willing to tolerate: if the call returns {@code true}
* the probability that this BigInteger is prime exceeds
* {@code (1 - 1/2<sup>certainty</sup>)}. The execution time of
* this method is proportional to the value of this parameter.
* @return {@code true} if this BigInteger is probably prime,
* {@code false} if it's definitely composite.
*/
boolean primeToCertainty(int certainty, Random random) {
int rounds = 0;
int n = (Math.min(certainty, Integer.MAX_VALUE-1)+1)/2;
// The relationship between the certainty and the number of rounds
// we perform is given in the draft standard ANSI X9.80, "PRIME
// NUMBER GENERATION, PRIMALITY TESTING, AND PRIMALITY CERTIFICATES".
int sizeInBits = this.bitLength();
if (sizeInBits < 100) {
rounds = 50;
rounds = n < rounds ? n : rounds;
return passesMillerRabin(rounds, random);
}
if (sizeInBits < 256) {
rounds = 27;
} else if (sizeInBits < 512) {
rounds = 15;
} else if (sizeInBits < 768) {
rounds = 8;
} else if (sizeInBits < 1024) {
rounds = 4;
} else {
rounds = 2;
}
rounds = n < rounds ? n : rounds;
return passesMillerRabin(rounds, random) && passesLucasLehmer();
}
/**
* Returns true iff this BigInteger is a Lucas-Lehmer probable prime.
*
* The following assumptions are made:
* This BigInteger is a positive, odd number.
*/
private boolean passesLucasLehmer() {
BigInteger thisPlusOne = this.add(ONE);
// Step 1
int d = 5;
while (jacobiSymbol(d, this) != -1) {
// 5, -7, 9, -11, ...
d = (d < 0) ? Math.abs(d)+2 : -(d+2);
}
// Step 2
BigInteger u = lucasLehmerSequence(d, thisPlusOne, this);
// Step 3
return u.mod(this).equals(ZERO);
}
/**
* Computes Jacobi(p,n).
* Assumes n positive, odd, n>=3.
*/
private static int jacobiSymbol(int p, BigInteger n) {
if (p == 0)
return 0;
// Algorithm and comments adapted from Colin Plumb's C library.
int j = 1;
int u = n.mag[n.mag.length-1];
// Make p positive
if (p < 0) {
p = -p;
int n8 = u & 7;
if ((n8 == 3) || (n8 == 7))
j = -j; // 3 (011) or 7 (111) mod 8
}
// Get rid of factors of 2 in p
while ((p & 3) == 0)
p >>= 2;
if ((p & 1) == 0) {
p >>= 1;
if (((u ^ (u>>1)) & 2) != 0)
j = -j; // 3 (011) or 5 (101) mod 8
}
if (p == 1)
return j;
// Then, apply quadratic reciprocity
if ((p & u & 2) != 0) // p = u = 3 (mod 4)?
j = -j;
// And reduce u mod p
u = n.mod(BigInteger.valueOf(p)).intValue();
// Now compute Jacobi(u,p), u < p
while (u != 0) {
while ((u & 3) == 0)
u >>= 2;
if ((u & 1) == 0) {
u >>= 1;
if (((p ^ (p>>1)) & 2) != 0)
j = -j; // 3 (011) or 5 (101) mod 8
}
if (u == 1)
return j;
// Now both u and p are odd, so use quadratic reciprocity
assert (u < p);
int t = u; u = p; p = t;
if ((u & p & 2) != 0) // u = p = 3 (mod 4)?
j = -j;
// Now u >= p, so it can be reduced
u %= p;
}
return 0;
}
private static BigInteger lucasLehmerSequence(int z, BigInteger k, BigInteger n) {
BigInteger d = BigInteger.valueOf(z);
BigInteger u = ONE; BigInteger u2;
BigInteger v = ONE; BigInteger v2;
for (int i=k.bitLength()-2; i >= 0; i--) {
u2 = u.multiply(v).mod(n);
v2 = v.square().add(d.multiply(u.square())).mod(n);
if (v2.testBit(0))
v2 = v2.subtract(n);
v2 = v2.shiftRight(1);
u = u2; v = v2;
if (k.testBit(i)) {
u2 = u.add(v).mod(n);
if (u2.testBit(0))
u2 = u2.subtract(n);
u2 = u2.shiftRight(1);
v2 = v.add(d.multiply(u)).mod(n);
if (v2.testBit(0))
v2 = v2.subtract(n);
v2 = v2.shiftRight(1);
u = u2; v = v2;
}
}
return u;
}
/**
* Returns true iff this BigInteger passes the specified number of
* Miller-Rabin tests. This test is taken from the DSA spec (NIST FIPS
* 186-2).
*
* The following assumptions are made:
* This BigInteger is a positive, odd number greater than 2.
* iterations<=50.
*/
private boolean passesMillerRabin(int iterations, Random rnd) {
// Find a and m such that m is odd and this == 1 + 2**a * m
BigInteger thisMinusOne = this.subtract(ONE);
BigInteger m = thisMinusOne;
int a = m.getLowestSetBit();
m = m.shiftRight(a);
// Do the tests
if (rnd == null) {
rnd = ThreadLocalRandom.current();
}
for (int i=0; i < iterations; i++) {
// Generate a uniform random on (1, this)
BigInteger b;
do {
b = new BigInteger(this.bitLength(), rnd);
} while (b.compareTo(ONE) <= 0 || b.compareTo(this) >= 0);
int j = 0;
BigInteger z = b.modPow(m, this);
while (!((j == 0 && z.equals(ONE)) || z.equals(thisMinusOne))) {
if (j > 0 && z.equals(ONE) || ++j == a)
return false;
z = z.modPow(TWO, this);
}
}
return true;
}
/**
* This internal constructor differs from its public cousin
* with the arguments reversed in two ways: it assumes that its
* arguments are correct, and it doesn't copy the magnitude array.
*/
BigInteger(int[] magnitude, int signum) {
this.signum = (magnitude.length == 0 ? 0 : signum);
this.mag = magnitude;
if (mag.length >= MAX_MAG_LENGTH) {
checkRange();
}
}
/**
* This private constructor is for internal use and assumes that its
* arguments are correct.
*/
private BigInteger(byte[] magnitude, int signum) {
this.signum = (magnitude.length == 0 ? 0 : signum);
this.mag = stripLeadingZeroBytes(magnitude);
if (mag.length >= MAX_MAG_LENGTH) {
checkRange();
}
}
/**
* Throws an {@code ArithmeticException} if the {@code BigInteger} would be
* out of the supported range.
*
* @throws ArithmeticException if {@code this} exceeds the supported range.
*/
private void checkRange() {
if (mag.length > MAX_MAG_LENGTH || mag.length == MAX_MAG_LENGTH && mag[0] < 0) {
reportOverflow();
}
}
private static void reportOverflow() {
throw new ArithmeticException("BigInteger would overflow supported range");
}
//Static Factory Methods
/**
* Returns a BigInteger whose value is equal to that of the
* specified {@code long}. This "static factory method" is
* provided in preference to a ({@code long}) constructor
* because it allows for reuse of frequently used BigIntegers.
*
* @param val value of the BigInteger to return.
* @return a BigInteger with the specified value.
*/
public static BigInteger valueOf(long val) {
// If -MAX_CONSTANT < val < MAX_CONSTANT, return stashed constant
if (val == 0)
return ZERO;
if (val > 0 && val <= MAX_CONSTANT)
return posConst[(int) val];
else if (val < 0 && val >= -MAX_CONSTANT)
return negConst[(int) -val];
return new BigInteger(val);
}
/**
* Constructs a BigInteger with the specified value, which may not be zero.
*/
private BigInteger(long val) {
if (val < 0) {
val = -val;
signum = -1;
} else {
signum = 1;
}
int highWord = (int)(val >>> 32);
if (highWord == 0) {
mag = new int[1];
mag[0] = (int)val;
} else {
mag = new int[2];
mag[0] = highWord;
mag[1] = (int)val;
}
}
/**
* Returns a BigInteger with the given two's complement representation.
* Assumes that the input array will not be modified (the returned
* BigInteger will reference the input array if feasible).
*/
private static BigInteger valueOf(int val[]) {
return (val[0] > 0 ? new BigInteger(val, 1) : new BigInteger(val));
}
// Constants
/**
* Initialize static constant array when class is loaded.
*/
private final static int MAX_CONSTANT = 16;
private static BigInteger posConst[] = new BigInteger[MAX_CONSTANT+1];
private static BigInteger negConst[] = new BigInteger[MAX_CONSTANT+1];
/**
* The cache of powers of each radix. This allows us to not have to
* recalculate powers of radix^(2^n) more than once. This speeds
* Schoenhage recursive base conversion significantly.
*/
private static volatile BigInteger[][] powerCache;
/** The cache of logarithms of radices for base conversion. */
private static final double[] logCache;
/** The natural log of 2. This is used in computing cache indices. */
private static final double LOG_TWO = Math.log(2.0);
static {
for (int i = 1; i <= MAX_CONSTANT; i++) {
int[] magnitude = new int[1];
magnitude[0] = i;
posConst[i] = new BigInteger(magnitude, 1);
negConst[i] = new BigInteger(magnitude, -1);
}
/*
* Initialize the cache of radix^(2^x) values used for base conversion
* with just the very first value. Additional values will be created
* on demand.
*/
powerCache = new BigInteger[Character.MAX_RADIX+1][];
logCache = new double[Character.MAX_RADIX+1];
for (int i=Character.MIN_RADIX; i <= Character.MAX_RADIX; i++) {
powerCache[i] = new BigInteger[] { BigInteger.valueOf(i) };
logCache[i] = Math.log(i);
}
}
/**
* The BigInteger constant zero.
*
* @since 1.2
*/
public static final BigInteger ZERO = new BigInteger(new int[0], 0);
/**
* The BigInteger constant one.
*
* @since 1.2
*/
public static final BigInteger ONE = valueOf(1);
/**
* The BigInteger constant two. (Not exported.)
*/
private static final BigInteger TWO = valueOf(2);
/**
* The BigInteger constant -1. (Not exported.)
*/
private static final BigInteger NEGATIVE_ONE = valueOf(-1);
/**
* The BigInteger constant ten.
*
* @since 1.5
*/
public static final BigInteger TEN = valueOf(10);
// Arithmetic Operations
/**
* Returns a BigInteger whose value is {@code (this + val)}.
*
* @param val value to be added to this BigInteger.
* @return {@code this + val}
*/
public BigInteger add(BigInteger val) {
if (val.signum == 0)
return this;
if (signum == 0)
return val;
if (val.signum == signum)
return new BigInteger(add(mag, val.mag), signum);
int cmp = compareMagnitude(val);
if (cmp == 0)
return ZERO;
int[] resultMag = (cmp > 0 ? subtract(mag, val.mag)
: subtract(val.mag, mag));
resultMag = trustedStripLeadingZeroInts(resultMag);
return new BigInteger(resultMag, cmp == signum ? 1 : -1);
}
/**
* Package private methods used by BigDecimal code to add a BigInteger
* with a long. Assumes val is not equal to INFLATED.
*/
BigInteger add(long val) {
if (val == 0)
return this;
if (signum == 0)
return valueOf(val);
if (Long.signum(val) == signum)
return new BigInteger(add(mag, Math.abs(val)), signum);
int cmp = compareMagnitude(val);
if (cmp == 0)
return ZERO;
int[] resultMag = (cmp > 0 ? subtract(mag, Math.abs(val)) : subtract(Math.abs(val), mag));
resultMag = trustedStripLeadingZeroInts(resultMag);
return new BigInteger(resultMag, cmp == signum ? 1 : -1);
}
/**
* Adds the contents of the int array x and long value val. This
* method allocates a new int array to hold the answer and returns
* a reference to that array. Assumes x.length > 0 and val is
* non-negative
*/
private static int[] add(int[] x, long val) {
int[] y;
long sum = 0;
int xIndex = x.length;
int[] result;
int highWord = (int)(val >>> 32);
if (highWord == 0) {
result = new int[xIndex];
sum = (x[--xIndex] & LONG_MASK) + val;
result[xIndex] = (int)sum;
} else {
if (xIndex == 1) {
result = new int[2];
sum = val + (x[0] & LONG_MASK);
result[1] = (int)sum;
result[0] = (int)(sum >>> 32);
return result;
} else {
result = new int[xIndex];
sum = (x[--xIndex] & LONG_MASK) + (val & LONG_MASK);
result[xIndex] = (int)sum;
sum = (x[--xIndex] & LONG_MASK) + (highWord & LONG_MASK) + (sum >>> 32);
result[xIndex] = (int)sum;
}
}
// Copy remainder of longer number while carry propagation is required
boolean carry = (sum >>> 32 != 0);
while (xIndex > 0 && carry)
carry = ((result[--xIndex] = x[xIndex] + 1) == 0);
// Copy remainder of longer number
while (xIndex > 0)
result[--xIndex] = x[xIndex];
// Grow result if necessary
if (carry) {
int bigger[] = new int[result.length + 1];
System.arraycopy(result, 0, bigger, 1, result.length);
bigger[0] = 0x01;
return bigger;
}
return result;
}
/**
* Adds the contents of the int arrays x and y. This method allocates
* a new int array to hold the answer and returns a reference to that
* array.
*/
private static int[] add(int[] x, int[] y) {
// If x is shorter, swap the two arrays
if (x.length < y.length) {
int[] tmp = x;
x = y;
y = tmp;
}
int xIndex = x.length;
int yIndex = y.length;
int result[] = new int[xIndex];
long sum = 0;
if (yIndex == 1) {
sum = (x[--xIndex] & LONG_MASK) + (y[0] & LONG_MASK) ;
result[xIndex] = (int)sum;
} else {
// Add common parts of both numbers
while (yIndex > 0) {
sum = (x[--xIndex] & LONG_MASK) +
(y[--yIndex] & LONG_MASK) + (sum >>> 32);
result[xIndex] = (int)sum;
}
}
// Copy remainder of longer number while carry propagation is required
boolean carry = (sum >>> 32 != 0);
while (xIndex > 0 && carry)
carry = ((result[--xIndex] = x[xIndex] + 1) == 0);
// Copy remainder of longer number
while (xIndex > 0)
result[--xIndex] = x[xIndex];
// Grow result if necessary
if (carry) {
int bigger[] = new int[result.length + 1];
System.arraycopy(result, 0, bigger, 1, result.length);
bigger[0] = 0x01;
return bigger;
}
return result;
}
private static int[] subtract(long val, int[] little) {
int highWord = (int)(val >>> 32);
if (highWord == 0) {
int result[] = new int[1];
result[0] = (int)(val - (little[0] & LONG_MASK));
return result;
} else {
int result[] = new int[2];
if (little.length == 1) {
long difference = ((int)val & LONG_MASK) - (little[0] & LONG_MASK);
result[1] = (int)difference;
// Subtract remainder of longer number while borrow propagates
boolean borrow = (difference >> 32 != 0);
if (borrow) {
result[0] = highWord - 1;
} else { // Copy remainder of longer number
result[0] = highWord;
}
return result;
} else { // little.length == 2
long difference = ((int)val & LONG_MASK) - (little[1] & LONG_MASK);
result[1] = (int)difference;
difference = (highWord & LONG_MASK) - (little[0] & LONG_MASK) + (difference >> 32);
result[0] = (int)difference;
return result;
}
}
}
/**
* Subtracts the contents of the second argument (val) from the
* first (big). The first int array (big) must represent a larger number
* than the second. This method allocates the space necessary to hold the
* answer.
* assumes val >= 0
*/
private static int[] subtract(int[] big, long val) {
int highWord = (int)(val >>> 32);
int bigIndex = big.length;
int result[] = new int[bigIndex];
long difference = 0;
if (highWord == 0) {
difference = (big[--bigIndex] & LONG_MASK) - val;
result[bigIndex] = (int)difference;
} else {
difference = (big[--bigIndex] & LONG_MASK) - (val & LONG_MASK);
result[bigIndex] = (int)difference;
difference = (big[--bigIndex] & LONG_MASK) - (highWord & LONG_MASK) + (difference >> 32);
result[bigIndex] = (int)difference;
}
// Subtract remainder of longer number while borrow propagates
boolean borrow = (difference >> 32 != 0);
while (bigIndex > 0 && borrow)
borrow = ((result[--bigIndex] = big[bigIndex] - 1) == -1);
// Copy remainder of longer number
while (bigIndex > 0)
result[--bigIndex] = big[bigIndex];
return result;
}
/**
* Returns a BigInteger whose value is {@code (this - val)}.
*
* @param val value to be subtracted from this BigInteger.
* @return {@code this - val}
*/
public BigInteger subtract(BigInteger val) {
if (val.signum == 0)
return this;
if (signum == 0)
return val.negate();
if (val.signum != signum)
return new BigInteger(add(mag, val.mag), signum);
int cmp = compareMagnitude(val);
if (cmp == 0)
return ZERO;
int[] resultMag = (cmp > 0 ? subtract(mag, val.mag)
: subtract(val.mag, mag));
resultMag = trustedStripLeadingZeroInts(resultMag);
return new BigInteger(resultMag, cmp == signum ? 1 : -1);
}
/**
* Subtracts the contents of the second int arrays (little) from the
* first (big). The first int array (big) must represent a larger number
* than the second. This method allocates the space necessary to hold the
* answer.
*/
private static int[] subtract(int[] big, int[] little) {
int bigIndex = big.length;
int result[] = new int[bigIndex];
int littleIndex = little.length;
long difference = 0;
// Subtract common parts of both numbers
while (littleIndex > 0) {
difference = (big[--bigIndex] & LONG_MASK) -
(little[--littleIndex] & LONG_MASK) +
(difference >> 32);
result[bigIndex] = (int)difference;
}
// Subtract remainder of longer number while borrow propagates
boolean borrow = (difference >> 32 != 0);
while (bigIndex > 0 && borrow)
borrow = ((result[--bigIndex] = big[bigIndex] - 1) == -1);
// Copy remainder of longer number
while (bigIndex > 0)
result[--bigIndex] = big[bigIndex];
return result;
}
/**
* Returns a BigInteger whose value is {@code (this * val)}.
*
* @param val value to be multiplied by this BigInteger.
* @return {@code this * val}
*/
public BigInteger multiply(BigInteger val) {
if (val.signum == 0 || signum == 0)
return ZERO;
int xlen = mag.length;
int ylen = val.mag.length;
if ((xlen < KARATSUBA_THRESHOLD) || (ylen < KARATSUBA_THRESHOLD)) {
int resultSign = signum == val.signum ? 1 : -1;
if (val.mag.length == 1) {
return multiplyByInt(mag,val.mag[0], resultSign);
}
if (mag.length == 1) {
return multiplyByInt(val.mag,mag[0], resultSign);
}
int[] result = multiplyToLen(mag, xlen,
val.mag, ylen, null);
result = trustedStripLeadingZeroInts(result);
return new BigInteger(result, resultSign);
} else {
if ((xlen < TOOM_COOK_THRESHOLD) && (ylen < TOOM_COOK_THRESHOLD)) {
return multiplyKaratsuba(this, val);
} else {
return multiplyToomCook3(this, val);
}
}
}
private static BigInteger multiplyByInt(int[] x, int y, int sign) {
if (Integer.bitCount(y) == 1) {
return new BigInteger(shiftLeft(x,Integer.numberOfTrailingZeros(y)), sign);
}
int xlen = x.length;
int[] rmag = new int[xlen + 1];
long carry = 0;
long yl = y & LONG_MASK;
int rstart = rmag.length - 1;
for (int i = xlen - 1; i >= 0; i--) {
long product = (x[i] & LONG_MASK) * yl + carry;
rmag[rstart--] = (int)product;
carry = product >>> 32;
}
if (carry == 0L) {
rmag = java.util.Arrays.copyOfRange(rmag, 1, rmag.length);
} else {
rmag[rstart] = (int)carry;
}
return new BigInteger(rmag, sign);
}
/**
* Package private methods used by BigDecimal code to multiply a BigInteger
* with a long. Assumes v is not equal to INFLATED.
*/
BigInteger multiply(long v) {
if (v == 0 || signum == 0)
return ZERO;
if (v == BigDecimal.INFLATED)
return multiply(BigInteger.valueOf(v));
int rsign = (v > 0 ? signum : -signum);
if (v < 0)
v = -v;
long dh = v >>> 32; // higher order bits
long dl = v & LONG_MASK; // lower order bits
int xlen = mag.length;
int[] value = mag;
int[] rmag = (dh == 0L) ? (new int[xlen + 1]) : (new int[xlen + 2]);
long carry = 0;
int rstart = rmag.length - 1;
for (int i = xlen - 1; i >= 0; i--) {
long product = (value[i] & LONG_MASK) * dl + carry;
rmag[rstart--] = (int)product;
carry = product >>> 32;
}
rmag[rstart] = (int)carry;
if (dh != 0L) {
carry = 0;
rstart = rmag.length - 2;
for (int i = xlen - 1; i >= 0; i--) {
long product = (value[i] & LONG_MASK) * dh +
(rmag[rstart] & LONG_MASK) + carry;
rmag[rstart--] = (int)product;
carry = product >>> 32;
}
rmag[0] = (int)carry;
}
if (carry == 0L)
rmag = java.util.Arrays.copyOfRange(rmag, 1, rmag.length);
return new BigInteger(rmag, rsign);
}
/**
* Multiplies int arrays x and y to the specified lengths and places
* the result into z. There will be no leading zeros in the resultant array.
*/
private int[] multiplyToLen(int[] x, int xlen, int[] y, int ylen, int[] z) {
int xstart = xlen - 1;
int ystart = ylen - 1;
if (z == null || z.length < (xlen+ ylen))
z = new int[xlen+ylen];
long carry = 0;
for (int j=ystart, k=ystart+1+xstart; j >= 0; j--, k--) {
long product = (y[j] & LONG_MASK) *
(x[xstart] & LONG_MASK) + carry;
z[k] = (int)product;
carry = product >>> 32;
}
z[xstart] = (int)carry;
for (int i = xstart-1; i >= 0; i--) {
carry = 0;
for (int j=ystart, k=ystart+1+i; j >= 0; j--, k--) {
long product = (y[j] & LONG_MASK) *
(x[i] & LONG_MASK) +
(z[k] & LONG_MASK) + carry;
z[k] = (int)product;
carry = product >>> 32;
}
z[i] = (int)carry;
}
return z;
}
/**
* Multiplies two BigIntegers using the Karatsuba multiplication
* algorithm. This is a recursive divide-and-conquer algorithm which is
* more efficient for large numbers than what is commonly called the
* "grade-school" algorithm used in multiplyToLen. If the numbers to be
* multiplied have length n, the "grade-school" algorithm has an
* asymptotic complexity of O(n^2). In contrast, the Karatsuba algorithm
* has complexity of O(n^(log2(3))), or O(n^1.585). It achieves this
* increased performance by doing 3 multiplies instead of 4 when
* evaluating the product. As it has some overhead, should be used when
* both numbers are larger than a certain threshold (found
* experimentally).
*
* See: http://en.wikipedia.org/wiki/Karatsuba_algorithm
*/
private static BigInteger multiplyKaratsuba(BigInteger x, BigInteger y) {
int xlen = x.mag.length;
int ylen = y.mag.length;
// The number of ints in each half of the number.
int half = (Math.max(xlen, ylen)+1) / 2;
// xl and yl are the lower halves of x and y respectively,
// xh and yh are the upper halves.
BigInteger xl = x.getLower(half);
BigInteger xh = x.getUpper(half);
BigInteger yl = y.getLower(half);
BigInteger yh = y.getUpper(half);
BigInteger p1 = xh.multiply(yh); // p1 = xh*yh
BigInteger p2 = xl.multiply(yl); // p2 = xl*yl
// p3=(xh+xl)*(yh+yl)
BigInteger p3 = xh.add(xl).multiply(yh.add(yl));
// result = p1 * 2^(32*2*half) + (p3 - p1 - p2) * 2^(32*half) + p2
BigInteger result = p1.shiftLeft(32*half).add(p3.subtract(p1).subtract(p2)).shiftLeft(32*half).add(p2);
if (x.signum != y.signum) {
return result.negate();
} else {
return result;
}
}
/**
* Multiplies two BigIntegers using a 3-way Toom-Cook multiplication
* algorithm. This is a recursive divide-and-conquer algorithm which is
* more efficient for large numbers than what is commonly called the
* "grade-school" algorithm used in multiplyToLen. If the numbers to be
* multiplied have length n, the "grade-school" algorithm has an
* asymptotic complexity of O(n^2). In contrast, 3-way Toom-Cook has a
* complexity of about O(n^1.465). It achieves this increased asymptotic
* performance by breaking each number into three parts and by doing 5
* multiplies instead of 9 when evaluating the product. Due to overhead
* (additions, shifts, and one division) in the Toom-Cook algorithm, it
* should only be used when both numbers are larger than a certain
* threshold (found experimentally). This threshold is generally larger
* than that for Karatsuba multiplication, so this algorithm is generally
* only used when numbers become significantly larger.
*
* The algorithm used is the "optimal" 3-way Toom-Cook algorithm outlined
* by Marco Bodrato.
*
* See: http://bodrato.it/toom-cook/
* http://bodrato.it/papers/#WAIFI2007
*
* "Towards Optimal Toom-Cook Multiplication for Univariate and
* Multivariate Polynomials in Characteristic 2 and 0." by Marco BODRATO;
* In C.Carlet and B.Sunar, Eds., "WAIFI'07 proceedings", p. 116-133,
* LNCS #4547. Springer, Madrid, Spain, June 21-22, 2007.
*
*/
private static BigInteger multiplyToomCook3(BigInteger a, BigInteger b) {
int alen = a.mag.length;
int blen = b.mag.length;
int largest = Math.max(alen, blen);
// k is the size (in ints) of the lower-order slices.
int k = (largest+2)/3; // Equal to ceil(largest/3)
// r is the size (in ints) of the highest-order slice.
int r = largest - 2*k;
// Obtain slices of the numbers. a2 and b2 are the most significant
// bits of the numbers a and b, and a0 and b0 the least significant.
BigInteger a0, a1, a2, b0, b1, b2;
a2 = a.getToomSlice(k, r, 0, largest);
a1 = a.getToomSlice(k, r, 1, largest);
a0 = a.getToomSlice(k, r, 2, largest);
b2 = b.getToomSlice(k, r, 0, largest);
b1 = b.getToomSlice(k, r, 1, largest);
b0 = b.getToomSlice(k, r, 2, largest);
BigInteger v0, v1, v2, vm1, vinf, t1, t2, tm1, da1, db1;
v0 = a0.multiply(b0);
da1 = a2.add(a0);
db1 = b2.add(b0);
vm1 = da1.subtract(a1).multiply(db1.subtract(b1));
da1 = da1.add(a1);
db1 = db1.add(b1);
v1 = da1.multiply(db1);
v2 = da1.add(a2).shiftLeft(1).subtract(a0).multiply(
db1.add(b2).shiftLeft(1).subtract(b0));
vinf = a2.multiply(b2);
// The algorithm requires two divisions by 2 and one by 3.
// All divisions are known to be exact, that is, they do not produce
// remainders, and all results are positive. The divisions by 2 are
// implemented as right shifts which are relatively efficient, leaving
// only an exact division by 3, which is done by a specialized
// linear-time algorithm.
t2 = v2.subtract(vm1).exactDivideBy3();
tm1 = v1.subtract(vm1).shiftRight(1);
t1 = v1.subtract(v0);
t2 = t2.subtract(t1).shiftRight(1);
t1 = t1.subtract(tm1).subtract(vinf);
t2 = t2.subtract(vinf.shiftLeft(1));
tm1 = tm1.subtract(t2);
// Number of bits to shift left.
int ss = k*32;
BigInteger result = vinf.shiftLeft(ss).add(t2).shiftLeft(ss).add(t1).shiftLeft(ss).add(tm1).shiftLeft(ss).add(v0);
if (a.signum != b.signum) {
return result.negate();
} else {
return result;
}
}
/**
* Returns a slice of a BigInteger for use in Toom-Cook multiplication.
*
* @param lowerSize The size of the lower-order bit slices.
* @param upperSize The size of the higher-order bit slices.
* @param slice The index of which slice is requested, which must be a
* number from 0 to size-1. Slice 0 is the highest-order bits, and slice
* size-1 are the lowest-order bits. Slice 0 may be of different size than
* the other slices.
* @param fullsize The size of the larger integer array, used to align
* slices to the appropriate position when multiplying different-sized
* numbers.
*/
private BigInteger getToomSlice(int lowerSize, int upperSize, int slice,
int fullsize) {
int start, end, sliceSize, len, offset;
len = mag.length;
offset = fullsize - len;
if (slice == 0) {
start = 0 - offset;
end = upperSize - 1 - offset;
} else {
start = upperSize + (slice-1)*lowerSize - offset;
end = start + lowerSize - 1;
}
if (start < 0) {
start = 0;
}
if (end < 0) {
return ZERO;
}
sliceSize = (end-start) + 1;
if (sliceSize <= 0) {
return ZERO;
}
// While performing Toom-Cook, all slices are positive and
// the sign is adjusted when the final number is composed.
if (start == 0 && sliceSize >= len) {
return this.abs();
}
int intSlice[] = new int[sliceSize];
System.arraycopy(mag, start, intSlice, 0, sliceSize);
return new BigInteger(trustedStripLeadingZeroInts(intSlice), 1);
}
/**
* Does an exact division (that is, the remainder is known to be zero)
* of the specified number by 3. This is used in Toom-Cook
* multiplication. This is an efficient algorithm that runs in linear
* time. If the argument is not exactly divisible by 3, results are
* undefined. Note that this is expected to be called with positive
* arguments only.
*/
private BigInteger exactDivideBy3() {
int len = mag.length;
int[] result = new int[len];
long x, w, q, borrow;
borrow = 0L;
for (int i=len-1; i >= 0; i--) {
x = (mag[i] & LONG_MASK);
w = x - borrow;
if (borrow > x) { // Did we make the number go negative?
borrow = 1L;
} else {
borrow = 0L;
}
// 0xAAAAAAAB is the modular inverse of 3 (mod 2^32). Thus,
// the effect of this is to divide by 3 (mod 2^32).
// This is much faster than division on most architectures.
q = (w * 0xAAAAAAABL) & LONG_MASK;
result[i] = (int) q;
// Now check the borrow. The second check can of course be
// eliminated if the first fails.
if (q >= 0x55555556L) {
borrow++;
if (q >= 0xAAAAAAABL)
borrow++;
}
}
result = trustedStripLeadingZeroInts(result);
return new BigInteger(result, signum);
}
/**
* Returns a new BigInteger representing n lower ints of the number.
* This is used by Karatsuba multiplication and Karatsuba squaring.
*/
private BigInteger getLower(int n) {
int len = mag.length;
if (len <= n) {
return abs();
}
int lowerInts[] = new int[n];
System.arraycopy(mag, len-n, lowerInts, 0, n);
return new BigInteger(trustedStripLeadingZeroInts(lowerInts), 1);
}
/**
* Returns a new BigInteger representing mag.length-n upper
* ints of the number. This is used by Karatsuba multiplication and
* Karatsuba squaring.
*/
private BigInteger getUpper(int n) {
int len = mag.length;
if (len <= n) {
return ZERO;
}
int upperLen = len - n;
int upperInts[] = new int[upperLen];
System.arraycopy(mag, 0, upperInts, 0, upperLen);
return new BigInteger(trustedStripLeadingZeroInts(upperInts), 1);
}
// Squaring
/**
* Returns a BigInteger whose value is {@code (this<sup>2</sup>)}.
*
* @return {@code this<sup>2</sup>}
*/
private BigInteger square() {
if (signum == 0) {
return ZERO;
}
int len = mag.length;
if (len < KARATSUBA_SQUARE_THRESHOLD) {
int[] z = squareToLen(mag, len, null);
return new BigInteger(trustedStripLeadingZeroInts(z), 1);
} else {
if (len < TOOM_COOK_SQUARE_THRESHOLD) {
return squareKaratsuba();
} else {
return squareToomCook3();
}
}
}
/**
* Squares the contents of the int array x. The result is placed into the
* int array z. The contents of x are not changed.
*/
private static final int[] squareToLen(int[] x, int len, int[] z) {
/*
* The algorithm used here is adapted from Colin Plumb's C library.
* Technique: Consider the partial products in the multiplication
* of "abcde" by itself:
*
* a b c d e
* * a b c d e
* ==================
* ae be ce de ee
* ad bd cd dd de
* ac bc cc cd ce
* ab bb bc bd be
* aa ab ac ad ae
*
* Note that everything above the main diagonal:
* ae be ce de = (abcd) * e
* ad bd cd = (abc) * d
* ac bc = (ab) * c
* ab = (a) * b
*
* is a copy of everything below the main diagonal:
* de
* cd ce
* bc bd be
* ab ac ad ae
*
* Thus, the sum is 2 * (off the diagonal) + diagonal.
*
* This is accumulated beginning with the diagonal (which
* consist of the squares of the digits of the input), which is then
* divided by two, the off-diagonal added, and multiplied by two
* again. The low bit is simply a copy of the low bit of the
* input, so it doesn't need special care.
*/
int zlen = len << 1;
if (z == null || z.length < zlen)
z = new int[zlen];
// Store the squares, right shifted one bit (i.e., divided by 2)
int lastProductLowWord = 0;
for (int j=0, i=0; j < len; j++) {
long piece = (x[j] & LONG_MASK);
long product = piece * piece;
z[i++] = (lastProductLowWord << 31) | (int)(product >>> 33);
z[i++] = (int)(product >>> 1);
lastProductLowWord = (int)product;
}
// Add in off-diagonal sums
for (int i=len, offset=1; i > 0; i--, offset+=2) {
int t = x[i-1];
t = mulAdd(z, x, offset, i-1, t);
addOne(z, offset-1, i, t);
}
// Shift back up and set low bit
primitiveLeftShift(z, zlen, 1);
z[zlen-1] |= x[len-1] & 1;
return z;
}
/**
* Squares a BigInteger using the Karatsuba squaring algorithm. It should
* be used when both numbers are larger than a certain threshold (found
* experimentally). It is a recursive divide-and-conquer algorithm that
* has better asymptotic performance than the algorithm used in
* squareToLen.
*/
private BigInteger squareKaratsuba() {
int half = (mag.length+1) / 2;
BigInteger xl = getLower(half);
BigInteger xh = getUpper(half);
BigInteger xhs = xh.square(); // xhs = xh^2
BigInteger xls = xl.square(); // xls = xl^2
// xh^2 << 64 + (((xl+xh)^2 - (xh^2 + xl^2)) << 32) + xl^2
return xhs.shiftLeft(half*32).add(xl.add(xh).square().subtract(xhs.add(xls))).shiftLeft(half*32).add(xls);
}
/**
* Squares a BigInteger using the 3-way Toom-Cook squaring algorithm. It
* should be used when both numbers are larger than a certain threshold
* (found experimentally). It is a recursive divide-and-conquer algorithm
* that has better asymptotic performance than the algorithm used in
* squareToLen or squareKaratsuba.
*/
private BigInteger squareToomCook3() {
int len = mag.length;
// k is the size (in ints) of the lower-order slices.
int k = (len+2)/3; // Equal to ceil(largest/3)
// r is the size (in ints) of the highest-order slice.
int r = len - 2*k;
// Obtain slices of the numbers. a2 is the most significant
// bits of the number, and a0 the least significant.
BigInteger a0, a1, a2;
a2 = getToomSlice(k, r, 0, len);
a1 = getToomSlice(k, r, 1, len);
a0 = getToomSlice(k, r, 2, len);
BigInteger v0, v1, v2, vm1, vinf, t1, t2, tm1, da1;
v0 = a0.square();
da1 = a2.add(a0);
vm1 = da1.subtract(a1).square();
da1 = da1.add(a1);
v1 = da1.square();
vinf = a2.square();
v2 = da1.add(a2).shiftLeft(1).subtract(a0).square();
// The algorithm requires two divisions by 2 and one by 3.
// All divisions are known to be exact, that is, they do not produce
// remainders, and all results are positive. The divisions by 2 are
// implemented as right shifts which are relatively efficient, leaving
// only a division by 3.
// The division by 3 is done by an optimized algorithm for this case.
t2 = v2.subtract(vm1).exactDivideBy3();
tm1 = v1.subtract(vm1).shiftRight(1);
t1 = v1.subtract(v0);
t2 = t2.subtract(t1).shiftRight(1);
t1 = t1.subtract(tm1).subtract(vinf);
t2 = t2.subtract(vinf.shiftLeft(1));
tm1 = tm1.subtract(t2);
// Number of bits to shift left.
int ss = k*32;
return vinf.shiftLeft(ss).add(t2).shiftLeft(ss).add(t1).shiftLeft(ss).add(tm1).shiftLeft(ss).add(v0);
}
// Division
/**
* Returns a BigInteger whose value is {@code (this / val)}.
*
* @param val value by which this BigInteger is to be divided.
* @return {@code this / val}
* @throws ArithmeticException if {@code val} is zero.
*/
public BigInteger divide(BigInteger val) {
if (val.mag.length < BURNIKEL_ZIEGLER_THRESHOLD ||
mag.length - val.mag.length < BURNIKEL_ZIEGLER_OFFSET) {
return divideKnuth(val);
} else {
return divideBurnikelZiegler(val);
}
}
/**
* Returns a BigInteger whose value is {@code (this / val)} using an O(n^2) algorithm from Knuth.
*
* @param val value by which this BigInteger is to be divided.
* @return {@code this / val}
* @throws ArithmeticException if {@code val} is zero.
* @see MutableBigInteger#divideKnuth(MutableBigInteger, MutableBigInteger, boolean)
*/
private BigInteger divideKnuth(BigInteger val) {
MutableBigInteger q = new MutableBigInteger(),
a = new MutableBigInteger(this.mag),
b = new MutableBigInteger(val.mag);
a.divideKnuth(b, q, false);
return q.toBigInteger(this.signum * val.signum);
}
/**
* Returns an array of two BigIntegers containing {@code (this / val)}
* followed by {@code (this % val)}.
*
* @param val value by which this BigInteger is to be divided, and the
* remainder computed.
* @return an array of two BigIntegers: the quotient {@code (this / val)}
* is the initial element, and the remainder {@code (this % val)}
* is the final element.
* @throws ArithmeticException if {@code val} is zero.
*/
public BigInteger[] divideAndRemainder(BigInteger val) {
if (val.mag.length < BURNIKEL_ZIEGLER_THRESHOLD ||
mag.length - val.mag.length < BURNIKEL_ZIEGLER_OFFSET) {
return divideAndRemainderKnuth(val);
} else {
return divideAndRemainderBurnikelZiegler(val);
}
}
/** Long division */
private BigInteger[] divideAndRemainderKnuth(BigInteger val) {
BigInteger[] result = new BigInteger[2];
MutableBigInteger q = new MutableBigInteger(),
a = new MutableBigInteger(this.mag),
b = new MutableBigInteger(val.mag);
MutableBigInteger r = a.divideKnuth(b, q);
result[0] = q.toBigInteger(this.signum == val.signum ? 1 : -1);
result[1] = r.toBigInteger(this.signum);
return result;
}
/**
* Returns a BigInteger whose value is {@code (this % val)}.
*
* @param val value by which this BigInteger is to be divided, and the
* remainder computed.
* @return {@code this % val}
* @throws ArithmeticException if {@code val} is zero.
*/
public BigInteger remainder(BigInteger val) {
if (val.mag.length < BURNIKEL_ZIEGLER_THRESHOLD ||
mag.length - val.mag.length < BURNIKEL_ZIEGLER_OFFSET) {
return remainderKnuth(val);
} else {
return remainderBurnikelZiegler(val);
}
}
/** Long division */
private BigInteger remainderKnuth(BigInteger val) {
MutableBigInteger q = new MutableBigInteger(),
a = new MutableBigInteger(this.mag),
b = new MutableBigInteger(val.mag);
return a.divideKnuth(b, q).toBigInteger(this.signum);
}
/**
* Calculates {@code this / val} using the Burnikel-Ziegler algorithm.
* @param val the divisor
* @return {@code this / val}
*/
private BigInteger divideBurnikelZiegler(BigInteger val) {
return divideAndRemainderBurnikelZiegler(val)[0];
}
/**
* Calculates {@code this % val} using the Burnikel-Ziegler algorithm.
* @param val the divisor
* @return {@code this % val}
*/
private BigInteger remainderBurnikelZiegler(BigInteger val) {
return divideAndRemainderBurnikelZiegler(val)[1];
}
/**
* Computes {@code this / val} and {@code this % val} using the
* Burnikel-Ziegler algorithm.
* @param val the divisor
* @return an array containing the quotient and remainder
*/
private BigInteger[] divideAndRemainderBurnikelZiegler(BigInteger val) {
MutableBigInteger q = new MutableBigInteger();
MutableBigInteger r = new MutableBigInteger(this).divideAndRemainderBurnikelZiegler(new MutableBigInteger(val), q);
BigInteger qBigInt = q.isZero() ? ZERO : q.toBigInteger(signum*val.signum);
BigInteger rBigInt = r.isZero() ? ZERO : r.toBigInteger(signum);
return new BigInteger[] {qBigInt, rBigInt};
}
/**
* Returns a BigInteger whose value is <tt>(this<sup>exponent</sup>)</tt>.
* Note that {@code exponent} is an integer rather than a BigInteger.
*
* @param exponent exponent to which this BigInteger is to be raised.
* @return <tt>this<sup>exponent</sup></tt>
* @throws ArithmeticException {@code exponent} is negative. (This would
* cause the operation to yield a non-integer value.)
*/
public BigInteger pow(int exponent) {
if (exponent < 0) {
throw new ArithmeticException("Negative exponent");
}
if (signum == 0) {
return (exponent == 0 ? ONE : this);
}
BigInteger partToSquare = this.abs();
// Factor out powers of two from the base, as the exponentiation of
// these can be done by left shifts only.
// The remaining part can then be exponentiated faster. The
// powers of two will be multiplied back at the end.
int powersOfTwo = partToSquare.getLowestSetBit();
long bitsToShift = (long)powersOfTwo * exponent;
if (bitsToShift > Integer.MAX_VALUE) {
reportOverflow();
}
int remainingBits;
// Factor the powers of two out quickly by shifting right, if needed.
if (powersOfTwo > 0) {
partToSquare = partToSquare.shiftRight(powersOfTwo);
remainingBits = partToSquare.bitLength();
if (remainingBits == 1) { // Nothing left but +/- 1?
if (signum < 0 && (exponent&1) == 1) {
return NEGATIVE_ONE.shiftLeft(powersOfTwo*exponent);
} else {
return ONE.shiftLeft(powersOfTwo*exponent);
}
}
} else {
remainingBits = partToSquare.bitLength();
if (remainingBits == 1) { // Nothing left but +/- 1?
if (signum < 0 && (exponent&1) == 1) {
return NEGATIVE_ONE;
} else {
return ONE;
}
}
}
// This is a quick way to approximate the size of the result,
// similar to doing log2[n] * exponent. This will give an upper bound
// of how big the result can be, and which algorithm to use.
long scaleFactor = (long)remainingBits * exponent;
// Use slightly different algorithms for small and large operands.
// See if the result will safely fit into a long. (Largest 2^63-1)
if (partToSquare.mag.length == 1 && scaleFactor <= 62) {
// Small number algorithm. Everything fits into a long.
int newSign = (signum <0 && (exponent&1) == 1 ? -1 : 1);
long result = 1;
long baseToPow2 = partToSquare.mag[0] & LONG_MASK;
int workingExponent = exponent;
// Perform exponentiation using repeated squaring trick
while (workingExponent != 0) {
if ((workingExponent & 1) == 1) {
result = result * baseToPow2;
}
if ((workingExponent >>>= 1) != 0) {
baseToPow2 = baseToPow2 * baseToPow2;
}
}
// Multiply back the powers of two (quickly, by shifting left)
if (powersOfTwo > 0) {
if (bitsToShift + scaleFactor <= 62) { // Fits in long?
return valueOf((result << bitsToShift) * newSign);
} else {
return valueOf(result*newSign).shiftLeft((int) bitsToShift);
}
}
else {
return valueOf(result*newSign);
}
} else {
// Large number algorithm. This is basically identical to
// the algorithm above, but calls multiply() and square()
// which may use more efficient algorithms for large numbers.
BigInteger answer = ONE;
int workingExponent = exponent;
// Perform exponentiation using repeated squaring trick
while (workingExponent != 0) {
if ((workingExponent & 1) == 1) {
answer = answer.multiply(partToSquare);
}
if ((workingExponent >>>= 1) != 0) {
partToSquare = partToSquare.square();
}
}
// Multiply back the (exponentiated) powers of two (quickly,
// by shifting left)
if (powersOfTwo > 0) {
answer = answer.shiftLeft(powersOfTwo*exponent);
}
if (signum < 0 && (exponent&1) == 1) {
return answer.negate();
} else {
return answer;
}
}
}
/**
* Returns a BigInteger whose value is the greatest common divisor of
* {@code abs(this)} and {@code abs(val)}. Returns 0 if
* {@code this == 0 && val == 0}.
*
* @param val value with which the GCD is to be computed.
* @return {@code GCD(abs(this), abs(val))}
*/
public BigInteger gcd(BigInteger val) {
if (val.signum == 0)
return this.abs();
else if (this.signum == 0)
return val.abs();
MutableBigInteger a = new MutableBigInteger(this);
MutableBigInteger b = new MutableBigInteger(val);
MutableBigInteger result = a.hybridGCD(b);
return result.toBigInteger(1);
}
/**
* Package private method to return bit length for an integer.
*/
static int bitLengthForInt(int n) {
return 32 - Integer.numberOfLeadingZeros(n);
}
/**
* Left shift int array a up to len by n bits. Returns the array that
* results from the shift since space may have to be reallocated.
*/
private static int[] leftShift(int[] a, int len, int n) {
int nInts = n >>> 5;
int nBits = n&0x1F;
int bitsInHighWord = bitLengthForInt(a[0]);
// If shift can be done without recopy, do so
if (n <= (32-bitsInHighWord)) {
primitiveLeftShift(a, len, nBits);
return a;
} else { // Array must be resized
if (nBits <= (32-bitsInHighWord)) {
int result[] = new int[nInts+len];
System.arraycopy(a, 0, result, 0, len);
primitiveLeftShift(result, result.length, nBits);
return result;
} else {
int result[] = new int[nInts+len+1];
System.arraycopy(a, 0, result, 0, len);
primitiveRightShift(result, result.length, 32 - nBits);
return result;
}
}
}
// shifts a up to len right n bits assumes no leading zeros, 0<n<32
static void primitiveRightShift(int[] a, int len, int n) {
int n2 = 32 - n;
for (int i=len-1, c=a[i]; i > 0; i--) {
int b = c;
c = a[i-1];
a[i] = (c << n2) | (b >>> n);
}
a[0] >>>= n;
}
// shifts a up to len left n bits assumes no leading zeros, 0<=n<32
static void primitiveLeftShift(int[] a, int len, int n) {
if (len == 0 || n == 0)
return;
int n2 = 32 - n;
for (int i=0, c=a[i], m=i+len-1; i < m; i++) {
int b = c;
c = a[i+1];
a[i] = (b << n) | (c >>> n2);
}
a[len-1] <<= n;
}
/**
* Calculate bitlength of contents of the first len elements an int array,
* assuming there are no leading zero ints.
*/
private static int bitLength(int[] val, int len) {
if (len == 0)
return 0;
return ((len - 1) << 5) + bitLengthForInt(val[0]);
}
/**
* Returns a BigInteger whose value is the absolute value of this
* BigInteger.
*
* @return {@code abs(this)}
*/
public BigInteger abs() {
return (signum >= 0 ? this : this.negate());
}
/**
* Returns a BigInteger whose value is {@code (-this)}.
*
* @return {@code -this}
*/
public BigInteger negate() {
return new BigInteger(this.mag, -this.signum);
}
/**
* Returns the signum function of this BigInteger.
*
* @return -1, 0 or 1 as the value of this BigInteger is negative, zero or
* positive.
*/
public int signum() {
return this.signum;
}
// Modular Arithmetic Operations
/**
* Returns a BigInteger whose value is {@code (this mod m}). This method
* differs from {@code remainder} in that it always returns a
* <i>non-negative</i> BigInteger.
*
* @param m the modulus.
* @return {@code this mod m}
* @throws ArithmeticException {@code m} ≤ 0
* @see #remainder
*/
public BigInteger mod(BigInteger m) {
if (m.signum <= 0)
throw new ArithmeticException("BigInteger: modulus not positive");
BigInteger result = this.remainder(m);
return (result.signum >= 0 ? result : result.add(m));
}
/**
* Returns a BigInteger whose value is
* <tt>(this<sup>exponent</sup> mod m)</tt>. (Unlike {@code pow}, this
* method permits negative exponents.)
*
* @param exponent the exponent.
* @param m the modulus.
* @return <tt>this<sup>exponent</sup> mod m</tt>
* @throws ArithmeticException {@code m} ≤ 0 or the exponent is
* negative and this BigInteger is not <i>relatively
* prime</i> to {@code m}.
* @see #modInverse
*/
public BigInteger modPow(BigInteger exponent, BigInteger m) {
if (m.signum <= 0)
throw new ArithmeticException("BigInteger: modulus not positive");
// Trivial cases
if (exponent.signum == 0)
return (m.equals(ONE) ? ZERO : ONE);
if (this.equals(ONE))
return (m.equals(ONE) ? ZERO : ONE);
if (this.equals(ZERO) && exponent.signum >= 0)
return ZERO;
if (this.equals(negConst[1]) && (!exponent.testBit(0)))
return (m.equals(ONE) ? ZERO : ONE);
boolean invertResult;
if ((invertResult = (exponent.signum < 0)))
exponent = exponent.negate();
BigInteger base = (this.signum < 0 || this.compareTo(m) >= 0
? this.mod(m) : this);
BigInteger result;
if (m.testBit(0)) { // odd modulus
result = base.oddModPow(exponent, m);
} else {
/*
* Even modulus. Tear it into an "odd part" (m1) and power of two
* (m2), exponentiate mod m1, manually exponentiate mod m2, and
* use Chinese Remainder Theorem to combine results.
*/
// Tear m apart into odd part (m1) and power of 2 (m2)
int p = m.getLowestSetBit(); // Max pow of 2 that divides m
BigInteger m1 = m.shiftRight(p); // m/2**p
BigInteger m2 = ONE.shiftLeft(p); // 2**p
// Calculate new base from m1
BigInteger base2 = (this.signum < 0 || this.compareTo(m1) >= 0
? this.mod(m1) : this);
// Caculate (base ** exponent) mod m1.
BigInteger a1 = (m1.equals(ONE) ? ZERO :
base2.oddModPow(exponent, m1));
// Calculate (this ** exponent) mod m2
BigInteger a2 = base.modPow2(exponent, p);
// Combine results using Chinese Remainder Theorem
BigInteger y1 = m2.modInverse(m1);
BigInteger y2 = m1.modInverse(m2);
if (m.mag.length < MAX_MAG_LENGTH / 2) {
result = a1.multiply(m2).multiply(y1).add(a2.multiply(m1).multiply(y2)).mod(m);
} else {
MutableBigInteger t1 = new MutableBigInteger();
new MutableBigInteger(a1.multiply(m2)).multiply(new MutableBigInteger(y1), t1);
MutableBigInteger t2 = new MutableBigInteger();
new MutableBigInteger(a2.multiply(m1)).multiply(new MutableBigInteger(y2), t2);
t1.add(t2);
MutableBigInteger q = new MutableBigInteger();
result = t1.divide(new MutableBigInteger(m), q).toBigInteger();
}
}
return (invertResult ? result.modInverse(m) : result);
}
static int[] bnExpModThreshTable = {7, 25, 81, 241, 673, 1793,
Integer.MAX_VALUE}; // Sentinel
/**
* Returns a BigInteger whose value is x to the power of y mod z.
* Assumes: z is odd && x < z.
*/
private BigInteger oddModPow(BigInteger y, BigInteger z) {
/*
* The algorithm is adapted from Colin Plumb's C library.
*
* The window algorithm:
* The idea is to keep a running product of b1 = n^(high-order bits of exp)
* and then keep appending exponent bits to it. The following patterns
* apply to a 3-bit window (k = 3):
* To append 0: square
* To append 1: square, multiply by n^1
* To append 10: square, multiply by n^1, square
* To append 11: square, square, multiply by n^3
* To append 100: square, multiply by n^1, square, square
* To append 101: square, square, square, multiply by n^5
* To append 110: square, square, multiply by n^3, square
* To append 111: square, square, square, multiply by n^7
*
* Since each pattern involves only one multiply, the longer the pattern
* the better, except that a 0 (no multiplies) can be appended directly.
* We precompute a table of odd powers of n, up to 2^k, and can then
* multiply k bits of exponent at a time. Actually, assuming random
* exponents, there is on average one zero bit between needs to
* multiply (1/2 of the time there's none, 1/4 of the time there's 1,
* 1/8 of the time, there's 2, 1/32 of the time, there's 3, etc.), so
* you have to do one multiply per k+1 bits of exponent.
*
* The loop walks down the exponent, squaring the result buffer as
* it goes. There is a wbits+1 bit lookahead buffer, buf, that is
* filled with the upcoming exponent bits. (What is read after the
* end of the exponent is unimportant, but it is filled with zero here.)
* When the most-significant bit of this buffer becomes set, i.e.
* (buf & tblmask) != 0, we have to decide what pattern to multiply
* by, and when to do it. We decide, remember to do it in future
* after a suitable number of squarings have passed (e.g. a pattern
* of "100" in the buffer requires that we multiply by n^1 immediately;
* a pattern of "110" calls for multiplying by n^3 after one more
* squaring), clear the buffer, and continue.
*
* When we start, there is one more optimization: the result buffer
* is implcitly one, so squaring it or multiplying by it can be
* optimized away. Further, if we start with a pattern like "100"
* in the lookahead window, rather than placing n into the buffer
* and then starting to square it, we have already computed n^2
* to compute the odd-powers table, so we can place that into
* the buffer and save a squaring.
*
* This means that if you have a k-bit window, to compute n^z,
* where z is the high k bits of the exponent, 1/2 of the time
* it requires no squarings. 1/4 of the time, it requires 1
* squaring, ... 1/2^(k-1) of the time, it reqires k-2 squarings.
* And the remaining 1/2^(k-1) of the time, the top k bits are a
* 1 followed by k-1 0 bits, so it again only requires k-2
* squarings, not k-1. The average of these is 1. Add that
* to the one squaring we have to do to compute the table,
* and you'll see that a k-bit window saves k-2 squarings
* as well as reducing the multiplies. (It actually doesn't
* hurt in the case k = 1, either.)
*/
// Special case for exponent of one
if (y.equals(ONE))
return this;
// Special case for base of zero
if (signum == 0)
return ZERO;
int[] base = mag.clone();
int[] exp = y.mag;
int[] mod = z.mag;
int modLen = mod.length;
// Select an appropriate window size
int wbits = 0;
int ebits = bitLength(exp, exp.length);
// if exponent is 65537 (0x10001), use minimum window size
if ((ebits != 17) || (exp[0] != 65537)) {
while (ebits > bnExpModThreshTable[wbits]) {
wbits++;
}
}
// Calculate appropriate table size
int tblmask = 1 << wbits;
// Allocate table for precomputed odd powers of base in Montgomery form
int[][] table = new int[tblmask][];
for (int i=0; i < tblmask; i++)
table[i] = new int[modLen];
// Compute the modular inverse
int inv = -MutableBigInteger.inverseMod32(mod[modLen-1]);
// Convert base to Montgomery form
int[] a = leftShift(base, base.length, modLen << 5);
MutableBigInteger q = new MutableBigInteger(),
a2 = new MutableBigInteger(a),
b2 = new MutableBigInteger(mod);
MutableBigInteger r= a2.divide(b2, q);
table[0] = r.toIntArray();
// Pad table[0] with leading zeros so its length is at least modLen
if (table[0].length < modLen) {
int offset = modLen - table[0].length;
int[] t2 = new int[modLen];
for (int i=0; i < table[0].length; i++)
t2[i+offset] = table[0][i];
table[0] = t2;
}
// Set b to the square of the base
int[] b = squareToLen(table[0], modLen, null);
b = montReduce(b, mod, modLen, inv);
// Set t to high half of b
int[] t = Arrays.copyOf(b, modLen);
// Fill in the table with odd powers of the base
for (int i=1; i < tblmask; i++) {
int[] prod = multiplyToLen(t, modLen, table[i-1], modLen, null);
table[i] = montReduce(prod, mod, modLen, inv);
}
// Pre load the window that slides over the exponent
int bitpos = 1 << ((ebits-1) & (32-1));
int buf = 0;
int elen = exp.length;
int eIndex = 0;
for (int i = 0; i <= wbits; i++) {
buf = (buf << 1) | (((exp[eIndex] & bitpos) != 0)?1:0);
bitpos >>>= 1;
if (bitpos == 0) {
eIndex++;
bitpos = 1 << (32-1);
elen--;
}
}
int multpos = ebits;
// The first iteration, which is hoisted out of the main loop
ebits--;
boolean isone = true;
multpos = ebits - wbits;
while ((buf & 1) == 0) {
buf >>>= 1;
multpos++;
}
int[] mult = table[buf >>> 1];
buf = 0;
if (multpos == ebits)
isone = false;
// The main loop
while (true) {
ebits--;
// Advance the window
buf <<= 1;
if (elen != 0) {
buf |= ((exp[eIndex] & bitpos) != 0) ? 1 : 0;
bitpos >>>= 1;
if (bitpos == 0) {
eIndex++;
bitpos = 1 << (32-1);
elen--;
}
}
// Examine the window for pending multiplies
if ((buf & tblmask) != 0) {
multpos = ebits - wbits;
while ((buf & 1) == 0) {
buf >>>= 1;
multpos++;
}
mult = table[buf >>> 1];
buf = 0;
}
// Perform multiply
if (ebits == multpos) {
if (isone) {
b = mult.clone();
isone = false;
} else {
t = b;
a = multiplyToLen(t, modLen, mult, modLen, a);
a = montReduce(a, mod, modLen, inv);
t = a; a = b; b = t;
}
}
// Check if done
if (ebits == 0)
break;
// Square the input
if (!isone) {
t = b;
a = squareToLen(t, modLen, a);
a = montReduce(a, mod, modLen, inv);
t = a; a = b; b = t;
}
}
// Convert result out of Montgomery form and return
int[] t2 = new int[2*modLen];
System.arraycopy(b, 0, t2, modLen, modLen);
b = montReduce(t2, mod, modLen, inv);
t2 = Arrays.copyOf(b, modLen);
return new BigInteger(1, t2);
}
/**
* Montgomery reduce n, modulo mod. This reduces modulo mod and divides
* by 2^(32*mlen). Adapted from Colin Plumb's C library.
*/
private static int[] montReduce(int[] n, int[] mod, int mlen, int inv) {
int c=0;
int len = mlen;
int offset=0;
do {
int nEnd = n[n.length-1-offset];
int carry = mulAdd(n, mod, offset, mlen, inv * nEnd);
c += addOne(n, offset, mlen, carry);
offset++;
} while (--len > 0);
while (c > 0)
c += subN(n, mod, mlen);
while (intArrayCmpToLen(n, mod, mlen) >= 0)
subN(n, mod, mlen);
return n;
}
/*
* Returns -1, 0 or +1 as big-endian unsigned int array arg1 is less than,
* equal to, or greater than arg2 up to length len.
*/
private static int intArrayCmpToLen(int[] arg1, int[] arg2, int len) {
for (int i=0; i < len; i++) {
long b1 = arg1[i] & LONG_MASK;
long b2 = arg2[i] & LONG_MASK;
if (b1 < b2)
return -1;
if (b1 > b2)
return 1;
}
return 0;
}
/**
* Subtracts two numbers of same length, returning borrow.
*/
private static int subN(int[] a, int[] b, int len) {
long sum = 0;
while (--len >= 0) {
sum = (a[len] & LONG_MASK) -
(b[len] & LONG_MASK) + (sum >> 32);
a[len] = (int)sum;
}
return (int)(sum >> 32);
}
/**
* Multiply an array by one word k and add to result, return the carry
*/
static int mulAdd(int[] out, int[] in, int offset, int len, int k) {
long kLong = k & LONG_MASK;
long carry = 0;
offset = out.length-offset - 1;
for (int j=len-1; j >= 0; j--) {
long product = (in[j] & LONG_MASK) * kLong +
(out[offset] & LONG_MASK) + carry;
out[offset--] = (int)product;
carry = product >>> 32;
}
return (int)carry;
}
/**
* Add one word to the number a mlen words into a. Return the resulting
* carry.
*/
static int addOne(int[] a, int offset, int mlen, int carry) {
offset = a.length-1-mlen-offset;
long t = (a[offset] & LONG_MASK) + (carry & LONG_MASK);
a[offset] = (int)t;
if ((t >>> 32) == 0)
return 0;
while (--mlen >= 0) {
if (--offset < 0) { // Carry out of number
return 1;
} else {
a[offset]++;
if (a[offset] != 0)
return 0;
}
}
return 1;
}
/**
* Returns a BigInteger whose value is (this ** exponent) mod (2**p)
*/
private BigInteger modPow2(BigInteger exponent, int p) {
/*
* Perform exponentiation using repeated squaring trick, chopping off
* high order bits as indicated by modulus.
*/
BigInteger result = ONE;
BigInteger baseToPow2 = this.mod2(p);
int expOffset = 0;
int limit = exponent.bitLength();
if (this.testBit(0))
limit = (p-1) < limit ? (p-1) : limit;
while (expOffset < limit) {
if (exponent.testBit(expOffset))
result = result.multiply(baseToPow2).mod2(p);
expOffset++;
if (expOffset < limit)
baseToPow2 = baseToPow2.square().mod2(p);
}
return result;
}
/**
* Returns a BigInteger whose value is this mod(2**p).
* Assumes that this {@code BigInteger >= 0} and {@code p > 0}.
*/
private BigInteger mod2(int p) {
if (bitLength() <= p)
return this;
// Copy remaining ints of mag
int numInts = (p + 31) >>> 5;
int[] mag = new int[numInts];
System.arraycopy(this.mag, (this.mag.length - numInts), mag, 0, numInts);
// Mask out any excess bits
int excessBits = (numInts << 5) - p;
mag[0] &= (1L << (32-excessBits)) - 1;
return (mag[0] == 0 ? new BigInteger(1, mag) : new BigInteger(mag, 1));
}
/**
* Returns a BigInteger whose value is {@code (this}<sup>-1</sup> {@code mod m)}.
*
* @param m the modulus.
* @return {@code this}<sup>-1</sup> {@code mod m}.
* @throws ArithmeticException {@code m} ≤ 0, or this BigInteger
* has no multiplicative inverse mod m (that is, this BigInteger
* is not <i>relatively prime</i> to m).
*/
public BigInteger modInverse(BigInteger m) {
if (m.signum != 1)
throw new ArithmeticException("BigInteger: modulus not positive");
if (m.equals(ONE))
return ZERO;
// Calculate (this mod m)
BigInteger modVal = this;
if (signum < 0 || (this.compareMagnitude(m) >= 0))
modVal = this.mod(m);
if (modVal.equals(ONE))
return ONE;
MutableBigInteger a = new MutableBigInteger(modVal);
MutableBigInteger b = new MutableBigInteger(m);
MutableBigInteger result = a.mutableModInverse(b);
return result.toBigInteger(1);
}
// Shift Operations
/**
* Returns a BigInteger whose value is {@code (this << n)}.
* The shift distance, {@code n}, may be negative, in which case
* this method performs a right shift.
* (Computes <tt>floor(this * 2<sup>n</sup>)</tt>.)
*
* @param n shift distance, in bits.
* @return {@code this << n}
* @see #shiftRight
*/
public BigInteger shiftLeft(int n) {
if (signum == 0)
return ZERO;
if (n > 0) {
return new BigInteger(shiftLeft(mag, n), signum);
} else if (n == 0) {
return this;
} else {
// Possible int overflow in (-n) is not a trouble,
// because shiftRightImpl considers its argument unsigned
return shiftRightImpl(-n);
}
}
/**
* Returns a magnitude array whose value is {@code (mag << n)}.
* The shift distance, {@code n}, is considered unnsigned.
* (Computes <tt>this * 2<sup>n</sup></tt>.)
*
* @param mag magnitude, the most-significant int ({@code mag[0]}) must be non-zero.
* @param n unsigned shift distance, in bits.
* @return {@code mag << n}
*/
private static int[] shiftLeft(int[] mag, int n) {
int nInts = n >>> 5;
int nBits = n & 0x1f;
int magLen = mag.length;
int newMag[] = null;
if (nBits == 0) {
newMag = new int[magLen + nInts];
System.arraycopy(mag, 0, newMag, 0, magLen);
} else {
int i = 0;
int nBits2 = 32 - nBits;
int highBits = mag[0] >>> nBits2;
if (highBits != 0) {
newMag = new int[magLen + nInts + 1];
newMag[i++] = highBits;
} else {
newMag = new int[magLen + nInts];
}
int j=0;
while (j < magLen-1)
newMag[i++] = mag[j++] << nBits | mag[j] >>> nBits2;
newMag[i] = mag[j] << nBits;
}
return newMag;
}
/**
* Returns a BigInteger whose value is {@code (this >> n)}. Sign
* extension is performed. The shift distance, {@code n}, may be
* negative, in which case this method performs a left shift.
* (Computes <tt>floor(this / 2<sup>n</sup>)</tt>.)
*
* @param n shift distance, in bits.
* @return {@code this >> n}
* @see #shiftLeft
*/
public BigInteger shiftRight(int n) {
if (signum == 0)
return ZERO;
if (n > 0) {
return shiftRightImpl(n);
} else if (n == 0) {
return this;
} else {
// Possible int overflow in {@code -n} is not a trouble,
// because shiftLeft considers its argument unsigned
return new BigInteger(shiftLeft(mag, -n), signum);
}
}
/**
* Returns a BigInteger whose value is {@code (this >> n)}. The shift
* distance, {@code n}, is considered unsigned.
* (Computes <tt>floor(this * 2<sup>-n</sup>)</tt>.)
*
* @param n unsigned shift distance, in bits.
* @return {@code this >> n}
*/
private BigInteger shiftRightImpl(int n) {
int nInts = n >>> 5;
int nBits = n & 0x1f;
int magLen = mag.length;
int newMag[] = null;
// Special case: entire contents shifted off the end
if (nInts >= magLen)
return (signum >= 0 ? ZERO : negConst[1]);
if (nBits == 0) {
int newMagLen = magLen - nInts;
newMag = Arrays.copyOf(mag, newMagLen);
} else {
int i = 0;
int highBits = mag[0] >>> nBits;
if (highBits != 0) {
newMag = new int[magLen - nInts];
newMag[i++] = highBits;
} else {
newMag = new int[magLen - nInts -1];
}
int nBits2 = 32 - nBits;
int j=0;
while (j < magLen - nInts - 1)
newMag[i++] = (mag[j++] << nBits2) | (mag[j] >>> nBits);
}
if (signum < 0) {
// Find out whether any one-bits were shifted off the end.
boolean onesLost = false;
for (int i=magLen-1, j=magLen-nInts; i >= j && !onesLost; i--)
onesLost = (mag[i] != 0);
if (!onesLost && nBits != 0)
onesLost = (mag[magLen - nInts - 1] << (32 - nBits) != 0);
if (onesLost)
newMag = javaIncrement(newMag);
}
return new BigInteger(newMag, signum);
}
int[] javaIncrement(int[] val) {
int lastSum = 0;
for (int i=val.length-1; i >= 0 && lastSum == 0; i--)
lastSum = (val[i] += 1);
if (lastSum == 0) {
val = new int[val.length+1];
val[0] = 1;
}
return val;
}
// Bitwise Operations
/**
* Returns a BigInteger whose value is {@code (this & val)}. (This
* method returns a negative BigInteger if and only if this and val are
* both negative.)
*
* @param val value to be AND'ed with this BigInteger.
* @return {@code this & val}
*/
public BigInteger and(BigInteger val) {
int[] result = new int[Math.max(intLength(), val.intLength())];
for (int i=0; i < result.length; i++)
result[i] = (getInt(result.length-i-1)
& val.getInt(result.length-i-1));
return valueOf(result);
}
/**
* Returns a BigInteger whose value is {@code (this | val)}. (This method
* returns a negative BigInteger if and only if either this or val is
* negative.)
*
* @param val value to be OR'ed with this BigInteger.
* @return {@code this | val}
*/
public BigInteger or(BigInteger val) {
int[] result = new int[Math.max(intLength(), val.intLength())];
for (int i=0; i < result.length; i++)
result[i] = (getInt(result.length-i-1)
| val.getInt(result.length-i-1));
return valueOf(result);
}
/**
* Returns a BigInteger whose value is {@code (this ^ val)}. (This method
* returns a negative BigInteger if and only if exactly one of this and
* val are negative.)
*
* @param val value to be XOR'ed with this BigInteger.
* @return {@code this ^ val}
*/
public BigInteger xor(BigInteger val) {
int[] result = new int[Math.max(intLength(), val.intLength())];
for (int i=0; i < result.length; i++)
result[i] = (getInt(result.length-i-1)
^ val.getInt(result.length-i-1));
return valueOf(result);
}
/**
* Returns a BigInteger whose value is {@code (~this)}. (This method
* returns a negative value if and only if this BigInteger is
* non-negative.)
*
* @return {@code ~this}
*/
public BigInteger not() {
int[] result = new int[intLength()];
for (int i=0; i < result.length; i++)
result[i] = ~getInt(result.length-i-1);
return valueOf(result);
}
/**
* Returns a BigInteger whose value is {@code (this & ~val)}. This
* method, which is equivalent to {@code and(val.not())}, is provided as
* a convenience for masking operations. (This method returns a negative
* BigInteger if and only if {@code this} is negative and {@code val} is
* positive.)
*
* @param val value to be complemented and AND'ed with this BigInteger.
* @return {@code this & ~val}
*/
public BigInteger andNot(BigInteger val) {
int[] result = new int[Math.max(intLength(), val.intLength())];
for (int i=0; i < result.length; i++)
result[i] = (getInt(result.length-i-1)
& ~val.getInt(result.length-i-1));
return valueOf(result);
}
// Single Bit Operations
/**
* Returns {@code true} if and only if the designated bit is set.
* (Computes {@code ((this & (1<<n)) != 0)}.)
*
* @param n index of bit to test.
* @return {@code true} if and only if the designated bit is set.
* @throws ArithmeticException {@code n} is negative.
*/
public boolean testBit(int n) {
if (n < 0)
throw new ArithmeticException("Negative bit address");
return (getInt(n >>> 5) & (1 << (n & 31))) != 0;
}
/**
* Returns a BigInteger whose value is equivalent to this BigInteger
* with the designated bit set. (Computes {@code (this | (1<<n))}.)
*
* @param n index of bit to set.
* @return {@code this | (1<<n)}
* @throws ArithmeticException {@code n} is negative.
*/
public BigInteger setBit(int n) {
if (n < 0)
throw new ArithmeticException("Negative bit address");
int intNum = n >>> 5;
int[] result = new int[Math.max(intLength(), intNum+2)];
for (int i=0; i < result.length; i++)
result[result.length-i-1] = getInt(i);
result[result.length-intNum-1] |= (1 << (n & 31));
return valueOf(result);
}
/**
* Returns a BigInteger whose value is equivalent to this BigInteger
* with the designated bit cleared.
* (Computes {@code (this & ~(1<<n))}.)
*
* @param n index of bit to clear.
* @return {@code this & ~(1<<n)}
* @throws ArithmeticException {@code n} is negative.
*/
public BigInteger clearBit(int n) {
if (n < 0)
throw new ArithmeticException("Negative bit address");
int intNum = n >>> 5;
int[] result = new int[Math.max(intLength(), ((n + 1) >>> 5) + 1)];
for (int i=0; i < result.length; i++)
result[result.length-i-1] = getInt(i);
result[result.length-intNum-1] &= ~(1 << (n & 31));
return valueOf(result);
}
/**
* Returns a BigInteger whose value is equivalent to this BigInteger
* with the designated bit flipped.
* (Computes {@code (this ^ (1<<n))}.)
*
* @param n index of bit to flip.
* @return {@code this ^ (1<<n)}
* @throws ArithmeticException {@code n} is negative.
*/
public BigInteger flipBit(int n) {
if (n < 0)
throw new ArithmeticException("Negative bit address");
int intNum = n >>> 5;
int[] result = new int[Math.max(intLength(), intNum+2)];
for (int i=0; i < result.length; i++)
result[result.length-i-1] = getInt(i);
result[result.length-intNum-1] ^= (1 << (n & 31));
return valueOf(result);
}
/**
* Returns the index of the rightmost (lowest-order) one bit in this
* BigInteger (the number of zero bits to the right of the rightmost
* one bit). Returns -1 if this BigInteger contains no one bits.
* (Computes {@code (this == 0? -1 : log2(this & -this))}.)
*
* @return index of the rightmost one bit in this BigInteger.
*/
public int getLowestSetBit() {
@SuppressWarnings("deprecation") int lsb = lowestSetBit - 2;
if (lsb == -2) { // lowestSetBit not initialized yet
lsb = 0;
if (signum == 0) {
lsb -= 1;
} else {
// Search for lowest order nonzero int
int i,b;
for (i=0; (b = getInt(i)) == 0; i++)
;
lsb += (i << 5) + Integer.numberOfTrailingZeros(b);
}
lowestSetBit = lsb + 2;
}
return lsb;
}
// Miscellaneous Bit Operations
/**
* Returns the number of bits in the minimal two's-complement
* representation of this BigInteger, <i>excluding</i> a sign bit.
* For positive BigIntegers, this is equivalent to the number of bits in
* the ordinary binary representation. (Computes
* {@code (ceil(log2(this < 0 ? -this : this+1)))}.)
*
* @return number of bits in the minimal two's-complement
* representation of this BigInteger, <i>excluding</i> a sign bit.
*/
public int bitLength() {
@SuppressWarnings("deprecation") int n = bitLength - 1;
if (n == -1) { // bitLength not initialized yet
int[] m = mag;
int len = m.length;
if (len == 0) {
n = 0; // offset by one to initialize
} else {
// Calculate the bit length of the magnitude
int magBitLength = ((len - 1) << 5) + bitLengthForInt(mag[0]);
if (signum < 0) {
// Check if magnitude is a power of two
boolean pow2 = (Integer.bitCount(mag[0]) == 1);
for (int i=1; i< len && pow2; i++)
pow2 = (mag[i] == 0);
n = (pow2 ? magBitLength -1 : magBitLength);
} else {
n = magBitLength;
}
}
bitLength = n + 1;
}
return n;
}
/**
* Returns the number of bits in the two's complement representation
* of this BigInteger that differ from its sign bit. This method is
* useful when implementing bit-vector style sets atop BigIntegers.
*
* @return number of bits in the two's complement representation
* of this BigInteger that differ from its sign bit.
*/
public int bitCount() {
@SuppressWarnings("deprecation") int bc = bitCount - 1;
if (bc == -1) { // bitCount not initialized yet
bc = 0; // offset by one to initialize
// Count the bits in the magnitude
for (int i=0; i < mag.length; i++)
bc += Integer.bitCount(mag[i]);
if (signum < 0) {
// Count the trailing zeros in the magnitude
int magTrailingZeroCount = 0, j;
for (j=mag.length-1; mag[j] == 0; j--)
magTrailingZeroCount += 32;
magTrailingZeroCount += Integer.numberOfTrailingZeros(mag[j]);
bc += magTrailingZeroCount - 1;
}
bitCount = bc + 1;
}
return bc;
}
// Primality Testing
/**
* Returns {@code true} if this BigInteger is probably prime,
* {@code false} if it's definitely composite. If
* {@code certainty} is ≤ 0, {@code true} is
* returned.
*
* @param certainty a measure of the uncertainty that the caller is
* willing to tolerate: if the call returns {@code true}
* the probability that this BigInteger is prime exceeds
* (1 - 1/2<sup>{@code certainty}</sup>). The execution time of
* this method is proportional to the value of this parameter.
* @return {@code true} if this BigInteger is probably prime,
* {@code false} if it's definitely composite.
*/
public boolean isProbablePrime(int certainty) {
if (certainty <= 0)
return true;
BigInteger w = this.abs();
if (w.equals(TWO))
return true;
if (!w.testBit(0) || w.equals(ONE))
return false;
return w.primeToCertainty(certainty, null);
}
// Comparison Operations
/**
* Compares this BigInteger with the specified BigInteger. This
* method is provided in preference to individual methods for each
* of the six boolean comparison operators ({@literal <}, ==,
* {@literal >}, {@literal >=}, !=, {@literal <=}). The suggested
* idiom for performing these comparisons is: {@code
* (x.compareTo(y)} <<i>op</i>> {@code 0)}, where
* <<i>op</i>> is one of the six comparison operators.
*
* @param val BigInteger to which this BigInteger is to be compared.
* @return -1, 0 or 1 as this BigInteger is numerically less than, equal
* to, or greater than {@code val}.
*/
public int compareTo(BigInteger val) {
if (signum == val.signum) {
switch (signum) {
case 1:
return compareMagnitude(val);
case -1:
return val.compareMagnitude(this);
default:
return 0;
}
}
return signum > val.signum ? 1 : -1;
}
/**
* Compares the magnitude array of this BigInteger with the specified
* BigInteger's. This is the version of compareTo ignoring sign.
*
* @param val BigInteger whose magnitude array to be compared.
* @return -1, 0 or 1 as this magnitude array is less than, equal to or
* greater than the magnitude aray for the specified BigInteger's.
*/
final int compareMagnitude(BigInteger val) {
int[] m1 = mag;
int len1 = m1.length;
int[] m2 = val.mag;
int len2 = m2.length;
if (len1 < len2)
return -1;
if (len1 > len2)
return 1;
for (int i = 0; i < len1; i++) {
int a = m1[i];
int b = m2[i];
if (a != b)
return ((a & LONG_MASK) < (b & LONG_MASK)) ? -1 : 1;
}
return 0;
}
/**
* Version of compareMagnitude that compares magnitude with long value.
* val can't be Long.MIN_VALUE.
*/
final int compareMagnitude(long val) {
assert val != Long.MIN_VALUE;
int[] m1 = mag;
int len = m1.length;
if (len > 2) {
return 1;
}
if (val < 0) {
val = -val;
}
int highWord = (int)(val >>> 32);
if (highWord == 0) {
if (len < 1)
return -1;
if (len > 1)
return 1;
int a = m1[0];
int b = (int)val;
if (a != b) {
return ((a & LONG_MASK) < (b & LONG_MASK))? -1 : 1;
}
return 0;
} else {
if (len < 2)
return -1;
int a = m1[0];
int b = highWord;
if (a != b) {
return ((a & LONG_MASK) < (b & LONG_MASK))? -1 : 1;
}
a = m1[1];
b = (int)val;
if (a != b) {
return ((a & LONG_MASK) < (b & LONG_MASK))? -1 : 1;
}
return 0;
}
}
/**
* Compares this BigInteger with the specified Object for equality.
*
* @param x Object to which this BigInteger is to be compared.
* @return {@code true} if and only if the specified Object is a
* BigInteger whose value is numerically equal to this BigInteger.
*/
public boolean equals(Object x) {
// This test is just an optimization, which may or may not help
if (x == this)
return true;
if (!(x instanceof BigInteger))
return false;
BigInteger xInt = (BigInteger) x;
if (xInt.signum != signum)
return false;
int[] m = mag;
int len = m.length;
int[] xm = xInt.mag;
if (len != xm.length)
return false;
for (int i = 0; i < len; i++)
if (xm[i] != m[i])
return false;
return true;
}
/**
* Returns the minimum of this BigInteger and {@code val}.
*
* @param val value with which the minimum is to be computed.
* @return the BigInteger whose value is the lesser of this BigInteger and
* {@code val}. If they are equal, either may be returned.
*/
public BigInteger min(BigInteger val) {
return (compareTo(val) < 0 ? this : val);
}
/**
* Returns the maximum of this BigInteger and {@code val}.
*
* @param val value with which the maximum is to be computed.
* @return the BigInteger whose value is the greater of this and
* {@code val}. If they are equal, either may be returned.
*/
public BigInteger max(BigInteger val) {
return (compareTo(val) > 0 ? this : val);
}
// Hash Function
/**
* Returns the hash code for this BigInteger.
*
* @return hash code for this BigInteger.
*/
public int hashCode() {
int hashCode = 0;
for (int i=0; i < mag.length; i++)
hashCode = (int)(31*hashCode + (mag[i] & LONG_MASK));
return hashCode * signum;
}
/**
* Returns the String representation of this BigInteger in the
* given radix. If the radix is outside the range from {@link
* Character#MIN_RADIX} to {@link Character#MAX_RADIX} inclusive,
* it will default to 10 (as is the case for
* {@code Integer.toString}). The digit-to-character mapping
* provided by {@code Character.forDigit} is used, and a minus
* sign is prepended if appropriate. (This representation is
* compatible with the {@link #BigInteger(String, int) (String,
* int)} constructor.)
*
* @param radix radix of the String representation.
* @return String representation of this BigInteger in the given radix.
* @see Integer#toString
* @see Character#forDigit
* @see #BigInteger(java.lang.String, int)
*/
public String toString(int radix) {
if (signum == 0)
return "0";
if (radix < Character.MIN_RADIX || radix > Character.MAX_RADIX)
radix = 10;
// If it's small enough, use smallToString.
if (mag.length <= SCHOENHAGE_BASE_CONVERSION_THRESHOLD)
return smallToString(radix);
// Otherwise use recursive toString, which requires positive arguments.
// The results will be concatenated into this StringBuilder
StringBuilder sb = new StringBuilder();
if (signum < 0) {
toString(this.negate(), sb, radix, 0);
sb.insert(0, '-');
}
else
toString(this, sb, radix, 0);
return sb.toString();
}
/** This method is used to perform toString when arguments are small. */
private String smallToString(int radix) {
if (signum == 0) {
return "0";
}
// Compute upper bound on number of digit groups and allocate space
int maxNumDigitGroups = (4*mag.length + 6)/7;
String digitGroup[] = new String[maxNumDigitGroups];
// Translate number to string, a digit group at a time
BigInteger tmp = this.abs();
int numGroups = 0;
while (tmp.signum != 0) {
BigInteger d = longRadix[radix];
MutableBigInteger q = new MutableBigInteger(),
a = new MutableBigInteger(tmp.mag),
b = new MutableBigInteger(d.mag);
MutableBigInteger r = a.divide(b, q);
BigInteger q2 = q.toBigInteger(tmp.signum * d.signum);
BigInteger r2 = r.toBigInteger(tmp.signum * d.signum);
digitGroup[numGroups++] = Long.toString(r2.longValue(), radix);
tmp = q2;
}
// Put sign (if any) and first digit group into result buffer
StringBuilder buf = new StringBuilder(numGroups*digitsPerLong[radix]+1);
if (signum < 0) {
buf.append('-');
}
buf.append(digitGroup[numGroups-1]);
// Append remaining digit groups padded with leading zeros
for (int i=numGroups-2; i >= 0; i--) {
// Prepend (any) leading zeros for this digit group
int numLeadingZeros = digitsPerLong[radix]-digitGroup[i].length();
if (numLeadingZeros != 0) {
buf.append(zeros[numLeadingZeros]);
}
buf.append(digitGroup[i]);
}
return buf.toString();
}
/**
* Converts the specified BigInteger to a string and appends to
* {@code sb}. This implements the recursive Schoenhage algorithm
* for base conversions.
* <p/>
* See Knuth, Donald, _The Art of Computer Programming_, Vol. 2,
* Answers to Exercises (4.4) Question 14.
*
* @param u The number to convert to a string.
* @param sb The StringBuilder that will be appended to in place.
* @param radix The base to convert to.
* @param digits The minimum number of digits to pad to.
*/
private static void toString(BigInteger u, StringBuilder sb, int radix,
int digits) {
/* If we're smaller than a certain threshold, use the smallToString
method, padding with leading zeroes when necessary. */
if (u.mag.length <= SCHOENHAGE_BASE_CONVERSION_THRESHOLD) {
String s = u.smallToString(radix);
// Pad with internal zeros if necessary.
// Don't pad if we're at the beginning of the string.
if ((s.length() < digits) && (sb.length() > 0)) {
for (int i=s.length(); i < digits; i++) { // May be a faster way to
sb.append('0'); // do this?
}
}
sb.append(s);
return;
}
int b, n;
b = u.bitLength();
// Calculate a value for n in the equation radix^(2^n) = u
// and subtract 1 from that value. This is used to find the
// cache index that contains the best value to divide u.
n = (int) Math.round(Math.log(b * LOG_TWO / logCache[radix]) / LOG_TWO - 1.0);
BigInteger v = getRadixConversionCache(radix, n);
BigInteger[] results;
results = u.divideAndRemainder(v);
int expectedDigits = 1 << n;
// Now recursively build the two halves of each number.
toString(results[0], sb, radix, digits-expectedDigits);
toString(results[1], sb, radix, expectedDigits);
}
/**
* Returns the value radix^(2^exponent) from the cache.
* If this value doesn't already exist in the cache, it is added.
* <p/>
* This could be changed to a more complicated caching method using
* {@code Future}.
*/
private static BigInteger getRadixConversionCache(int radix, int exponent) {
BigInteger[] cacheLine = powerCache[radix]; // volatile read
if (exponent < cacheLine.length) {
return cacheLine[exponent];
}
int oldLength = cacheLine.length;
cacheLine = Arrays.copyOf(cacheLine, exponent + 1);
for (int i = oldLength; i <= exponent; i++) {
cacheLine[i] = cacheLine[i - 1].pow(2);
}
BigInteger[][] pc = powerCache; // volatile read again
if (exponent >= pc[radix].length) {
pc = pc.clone();
pc[radix] = cacheLine;
powerCache = pc; // volatile write, publish
}
return cacheLine[exponent];
}
/* zero[i] is a string of i consecutive zeros. */
private static String zeros[] = new String[64];
static {
zeros[63] =
"000000000000000000000000000000000000000000000000000000000000000";
for (int i=0; i < 63; i++)
zeros[i] = zeros[63].substring(0, i);
}
/**
* Returns the decimal String representation of this BigInteger.
* The digit-to-character mapping provided by
* {@code Character.forDigit} is used, and a minus sign is
* prepended if appropriate. (This representation is compatible
* with the {@link #BigInteger(String) (String)} constructor, and
* allows for String concatenation with Java's + operator.)
*
* @return decimal String representation of this BigInteger.
* @see Character#forDigit
* @see #BigInteger(java.lang.String)
*/
public String toString() {
return toString(10);
}
/**
* Returns a byte array containing the two's-complement
* representation of this BigInteger. The byte array will be in
* <i>big-endian</i> byte-order: the most significant byte is in
* the zeroth element. The array will contain the minimum number
* of bytes required to represent this BigInteger, including at
* least one sign bit, which is {@code (ceil((this.bitLength() +
* 1)/8))}. (This representation is compatible with the
* {@link #BigInteger(byte[]) (byte[])} constructor.)
*
* @return a byte array containing the two's-complement representation of
* this BigInteger.
* @see #BigInteger(byte[])
*/
public byte[] toByteArray() {
int byteLen = bitLength()/8 + 1;
byte[] byteArray = new byte[byteLen];
for (int i=byteLen-1, bytesCopied=4, nextInt=0, intIndex=0; i >= 0; i--) {
if (bytesCopied == 4) {
nextInt = getInt(intIndex++);
bytesCopied = 1;
} else {
nextInt >>>= 8;
bytesCopied++;
}
byteArray[i] = (byte)nextInt;
}
return byteArray;
}
/**
* Converts this BigInteger to an {@code int}. This
* conversion is analogous to a
* <i>narrowing primitive conversion</i> from {@code long} to
* {@code int} as defined in section 5.1.3 of
* <cite>The Java™ Language Specification</cite>:
* if this BigInteger is too big to fit in an
* {@code int}, only the low-order 32 bits are returned.
* Note that this conversion can lose information about the
* overall magnitude of the BigInteger value as well as return a
* result with the opposite sign.
*
* @return this BigInteger converted to an {@code int}.
* @see #intValueExact()
*/
public int intValue() {
int result = 0;
result = getInt(0);
return result;
}
/**
* Converts this BigInteger to a {@code long}. This
* conversion is analogous to a
* <i>narrowing primitive conversion</i> from {@code long} to
* {@code int} as defined in section 5.1.3 of
* <cite>The Java™ Language Specification</cite>:
* if this BigInteger is too big to fit in a
* {@code long}, only the low-order 64 bits are returned.
* Note that this conversion can lose information about the
* overall magnitude of the BigInteger value as well as return a
* result with the opposite sign.
*
* @return this BigInteger converted to a {@code long}.
* @see #longValueExact()
*/
public long longValue() {
long result = 0;
for (int i=1; i >= 0; i--)
result = (result << 32) + (getInt(i) & LONG_MASK);
return result;
}
/**
* Converts this BigInteger to a {@code float}. This
* conversion is similar to the
* <i>narrowing primitive conversion</i> from {@code double} to
* {@code float} as defined in section 5.1.3 of
* <cite>The Java™ Language Specification</cite>:
* if this BigInteger has too great a magnitude
* to represent as a {@code float}, it will be converted to
* {@link Float#NEGATIVE_INFINITY} or {@link
* Float#POSITIVE_INFINITY} as appropriate. Note that even when
* the return value is finite, this conversion can lose
* information about the precision of the BigInteger value.
*
* @return this BigInteger converted to a {@code float}.
*/
public float floatValue() {
if (signum == 0) {
return 0.0f;
}
int exponent = ((mag.length - 1) << 5) + bitLengthForInt(mag[0]) - 1;
// exponent == floor(log2(abs(this)))
if (exponent < Long.SIZE - 1) {
return longValue();
} else if (exponent > Float.MAX_EXPONENT) {
return signum > 0 ? Float.POSITIVE_INFINITY : Float.NEGATIVE_INFINITY;
}
/*
* We need the top SIGNIFICAND_WIDTH bits, including the "implicit"
* one bit. To make rounding easier, we pick out the top
* SIGNIFICAND_WIDTH + 1 bits, so we have one to help us round up or
* down. twiceSignifFloor will contain the top SIGNIFICAND_WIDTH + 1
* bits, and signifFloor the top SIGNIFICAND_WIDTH.
*
* It helps to consider the real number signif = abs(this) *
* 2^(SIGNIFICAND_WIDTH - 1 - exponent).
*/
int shift = exponent - FloatConsts.SIGNIFICAND_WIDTH;
int twiceSignifFloor;
// twiceSignifFloor will be == abs().shiftRight(shift).intValue()
// We do the shift into an int directly to improve performance.
int nBits = shift & 0x1f;
int nBits2 = 32 - nBits;
if (nBits == 0) {
twiceSignifFloor = mag[0];
} else {
twiceSignifFloor = mag[0] >>> nBits;
if (twiceSignifFloor == 0) {
twiceSignifFloor = (mag[0] << nBits2) | (mag[1] >>> nBits);
}
}
int signifFloor = twiceSignifFloor >> 1;
signifFloor &= FloatConsts.SIGNIF_BIT_MASK; // remove the implied bit
/*
* We round up if either the fractional part of signif is strictly
* greater than 0.5 (which is true if the 0.5 bit is set and any lower
* bit is set), or if the fractional part of signif is >= 0.5 and
* signifFloor is odd (which is true if both the 0.5 bit and the 1 bit
* are set). This is equivalent to the desired HALF_EVEN rounding.
*/
boolean increment = (twiceSignifFloor & 1) != 0
&& ((signifFloor & 1) != 0 || abs().getLowestSetBit() < shift);
int signifRounded = increment ? signifFloor + 1 : signifFloor;
int bits = ((exponent + FloatConsts.EXP_BIAS))
<< (FloatConsts.SIGNIFICAND_WIDTH - 1);
bits += signifRounded;
/*
* If signifRounded == 2^24, we'd need to set all of the significand
* bits to zero and add 1 to the exponent. This is exactly the behavior
* we get from just adding signifRounded to bits directly. If the
* exponent is Float.MAX_EXPONENT, we round up (correctly) to
* Float.POSITIVE_INFINITY.
*/
bits |= signum & FloatConsts.SIGN_BIT_MASK;
return Float.intBitsToFloat(bits);
}
/**
* Converts this BigInteger to a {@code double}. This
* conversion is similar to the
* <i>narrowing primitive conversion</i> from {@code double} to
* {@code float} as defined in section 5.1.3 of
* <cite>The Java™ Language Specification</cite>:
* if this BigInteger has too great a magnitude
* to represent as a {@code double}, it will be converted to
* {@link Double#NEGATIVE_INFINITY} or {@link
* Double#POSITIVE_INFINITY} as appropriate. Note that even when
* the return value is finite, this conversion can lose
* information about the precision of the BigInteger value.
*
* @return this BigInteger converted to a {@code double}.
*/
public double doubleValue() {
if (signum == 0) {
return 0.0;
}
int exponent = ((mag.length - 1) << 5) + bitLengthForInt(mag[0]) - 1;
// exponent == floor(log2(abs(this))Double)
if (exponent < Long.SIZE - 1) {
return longValue();
} else if (exponent > Double.MAX_EXPONENT) {
return signum > 0 ? Double.POSITIVE_INFINITY : Double.NEGATIVE_INFINITY;
}
/*
* We need the top SIGNIFICAND_WIDTH bits, including the "implicit"
* one bit. To make rounding easier, we pick out the top
* SIGNIFICAND_WIDTH + 1 bits, so we have one to help us round up or
* down. twiceSignifFloor will contain the top SIGNIFICAND_WIDTH + 1
* bits, and signifFloor the top SIGNIFICAND_WIDTH.
*
* It helps to consider the real number signif = abs(this) *
* 2^(SIGNIFICAND_WIDTH - 1 - exponent).
*/
int shift = exponent - DoubleConsts.SIGNIFICAND_WIDTH;
long twiceSignifFloor;
// twiceSignifFloor will be == abs().shiftRight(shift).longValue()
// We do the shift into a long directly to improve performance.
int nBits = shift & 0x1f;
int nBits2 = 32 - nBits;
int highBits;
int lowBits;
if (nBits == 0) {
highBits = mag[0];
lowBits = mag[1];
} else {
highBits = mag[0] >>> nBits;
lowBits = (mag[0] << nBits2) | (mag[1] >>> nBits);
if (highBits == 0) {
highBits = lowBits;
lowBits = (mag[1] << nBits2) | (mag[2] >>> nBits);
}
}
twiceSignifFloor = ((highBits & LONG_MASK) << 32)
| (lowBits & LONG_MASK);
long signifFloor = twiceSignifFloor >> 1;
signifFloor &= DoubleConsts.SIGNIF_BIT_MASK; // remove the implied bit
/*
* We round up if either the fractional part of signif is strictly
* greater than 0.5 (which is true if the 0.5 bit is set and any lower
* bit is set), or if the fractional part of signif is >= 0.5 and
* signifFloor is odd (which is true if both the 0.5 bit and the 1 bit
* are set). This is equivalent to the desired HALF_EVEN rounding.
*/
boolean increment = (twiceSignifFloor & 1) != 0
&& ((signifFloor & 1) != 0 || abs().getLowestSetBit() < shift);
long signifRounded = increment ? signifFloor + 1 : signifFloor;
long bits = (long) ((exponent + DoubleConsts.EXP_BIAS))
<< (DoubleConsts.SIGNIFICAND_WIDTH - 1);
bits += signifRounded;
/*
* If signifRounded == 2^53, we'd need to set all of the significand
* bits to zero and add 1 to the exponent. This is exactly the behavior
* we get from just adding signifRounded to bits directly. If the
* exponent is Double.MAX_EXPONENT, we round up (correctly) to
* Double.POSITIVE_INFINITY.
*/
bits |= signum & DoubleConsts.SIGN_BIT_MASK;
return Double.longBitsToDouble(bits);
}
/**
* Returns a copy of the input array stripped of any leading zero bytes.
*/
private static int[] stripLeadingZeroInts(int val[]) {
int vlen = val.length;
int keep;
// Find first nonzero byte
for (keep = 0; keep < vlen && val[keep] == 0; keep++)
;
return java.util.Arrays.copyOfRange(val, keep, vlen);
}
/**
* Returns the input array stripped of any leading zero bytes.
* Since the source is trusted the copying may be skipped.
*/
private static int[] trustedStripLeadingZeroInts(int val[]) {
int vlen = val.length;
int keep;
// Find first nonzero byte
for (keep = 0; keep < vlen && val[keep] == 0; keep++)
;
return keep == 0 ? val : java.util.Arrays.copyOfRange(val, keep, vlen);
}
/**
* Returns a copy of the input array stripped of any leading zero bytes.
*/
private static int[] stripLeadingZeroBytes(byte a[]) {
int byteLength = a.length;
int keep;
// Find first nonzero byte
for (keep = 0; keep < byteLength && a[keep] == 0; keep++)
;
// Allocate new array and copy relevant part of input array
int intLength = ((byteLength - keep) + 3) >>> 2;
int[] result = new int[intLength];
int b = byteLength - 1;
for (int i = intLength-1; i >= 0; i--) {
result[i] = a[b--] & 0xff;
int bytesRemaining = b - keep + 1;
int bytesToTransfer = Math.min(3, bytesRemaining);
for (int j=8; j <= (bytesToTransfer << 3); j += 8)
result[i] |= ((a[b--] & 0xff) << j);
}
return result;
}
/**
* Takes an array a representing a negative 2's-complement number and
* returns the minimal (no leading zero bytes) unsigned whose value is -a.
*/
private static int[] makePositive(byte a[]) {
int keep, k;
int byteLength = a.length;
// Find first non-sign (0xff) byte of input
for (keep=0; keep < byteLength && a[keep] == -1; keep++)
;
/* Allocate output array. If all non-sign bytes are 0x00, we must
* allocate space for one extra output byte. */
for (k=keep; k < byteLength && a[k] == 0; k++)
;
int extraByte = (k == byteLength) ? 1 : 0;
int intLength = ((byteLength - keep + extraByte) + 3) >>> 2;
int result[] = new int[intLength];
/* Copy one's complement of input into output, leaving extra
* byte (if it exists) == 0x00 */
int b = byteLength - 1;
for (int i = intLength-1; i >= 0; i--) {
result[i] = a[b--] & 0xff;
int numBytesToTransfer = Math.min(3, b-keep+1);
if (numBytesToTransfer < 0)
numBytesToTransfer = 0;
for (int j=8; j <= 8*numBytesToTransfer; j += 8)
result[i] |= ((a[b--] & 0xff) << j);
// Mask indicates which bits must be complemented
int mask = -1 >>> (8*(3-numBytesToTransfer));
result[i] = ~result[i] & mask;
}
// Add one to one's complement to generate two's complement
for (int i=result.length-1; i >= 0; i--) {
result[i] = (int)((result[i] & LONG_MASK) + 1);
if (result[i] != 0)
break;
}
return result;
}
/**
* Takes an array a representing a negative 2's-complement number and
* returns the minimal (no leading zero ints) unsigned whose value is -a.
*/
private static int[] makePositive(int a[]) {
int keep, j;
// Find first non-sign (0xffffffff) int of input
for (keep=0; keep < a.length && a[keep] == -1; keep++)
;
/* Allocate output array. If all non-sign ints are 0x00, we must
* allocate space for one extra output int. */
for (j=keep; j < a.length && a[j] == 0; j++)
;
int extraInt = (j == a.length ? 1 : 0);
int result[] = new int[a.length - keep + extraInt];
/* Copy one's complement of input into output, leaving extra
* int (if it exists) == 0x00 */
for (int i = keep; i < a.length; i++)
result[i - keep + extraInt] = ~a[i];
// Add one to one's complement to generate two's complement
for (int i=result.length-1; ++result[i] == 0; i--)
;
return result;
}
/*
* The following two arrays are used for fast String conversions. Both
* are indexed by radix. The first is the number of digits of the given
* radix that can fit in a Java long without "going negative", i.e., the
* highest integer n such that radix**n < 2**63. The second is the
* "long radix" that tears each number into "long digits", each of which
* consists of the number of digits in the corresponding element in
* digitsPerLong (longRadix[i] = i**digitPerLong[i]). Both arrays have
* nonsense values in their 0 and 1 elements, as radixes 0 and 1 are not
* used.
*/
private static int digitsPerLong[] = {0, 0,
62, 39, 31, 27, 24, 22, 20, 19, 18, 18, 17, 17, 16, 16, 15, 15, 15, 14,
14, 14, 14, 13, 13, 13, 13, 13, 13, 12, 12, 12, 12, 12, 12, 12, 12};
private static BigInteger longRadix[] = {null, null,
valueOf(0x4000000000000000L), valueOf(0x383d9170b85ff80bL),
valueOf(0x4000000000000000L), valueOf(0x6765c793fa10079dL),
valueOf(0x41c21cb8e1000000L), valueOf(0x3642798750226111L),
valueOf(0x1000000000000000L), valueOf(0x12bf307ae81ffd59L),
valueOf( 0xde0b6b3a7640000L), valueOf(0x4d28cb56c33fa539L),
valueOf(0x1eca170c00000000L), valueOf(0x780c7372621bd74dL),
valueOf(0x1e39a5057d810000L), valueOf(0x5b27ac993df97701L),
valueOf(0x1000000000000000L), valueOf(0x27b95e997e21d9f1L),
valueOf(0x5da0e1e53c5c8000L), valueOf( 0xb16a458ef403f19L),
valueOf(0x16bcc41e90000000L), valueOf(0x2d04b7fdd9c0ef49L),
valueOf(0x5658597bcaa24000L), valueOf( 0x6feb266931a75b7L),
valueOf( 0xc29e98000000000L), valueOf(0x14adf4b7320334b9L),
valueOf(0x226ed36478bfa000L), valueOf(0x383d9170b85ff80bL),
valueOf(0x5a3c23e39c000000L), valueOf( 0x4e900abb53e6b71L),
valueOf( 0x7600ec618141000L), valueOf( 0xaee5720ee830681L),
valueOf(0x1000000000000000L), valueOf(0x172588ad4f5f0981L),
valueOf(0x211e44f7d02c1000L), valueOf(0x2ee56725f06e5c71L),
valueOf(0x41c21cb8e1000000L)};
/*
* These two arrays are the integer analogue of above.
*/
private static int digitsPerInt[] = {0, 0, 30, 19, 15, 13, 11,
11, 10, 9, 9, 8, 8, 8, 8, 7, 7, 7, 7, 7, 7, 7, 6, 6, 6, 6,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 5};
private static int intRadix[] = {0, 0,
0x40000000, 0x4546b3db, 0x40000000, 0x48c27395, 0x159fd800,
0x75db9c97, 0x40000000, 0x17179149, 0x3b9aca00, 0xcc6db61,
0x19a10000, 0x309f1021, 0x57f6c100, 0xa2f1b6f, 0x10000000,
0x18754571, 0x247dbc80, 0x3547667b, 0x4c4b4000, 0x6b5a6e1d,
0x6c20a40, 0x8d2d931, 0xb640000, 0xe8d4a51, 0x1269ae40,
0x17179149, 0x1cb91000, 0x23744899, 0x2b73a840, 0x34e63b41,
0x40000000, 0x4cfa3cc1, 0x5c13d840, 0x6d91b519, 0x39aa400
};
/**
* These routines provide access to the two's complement representation
* of BigIntegers.
*/
/**
* Returns the length of the two's complement representation in ints,
* including space for at least one sign bit.
*/
private int intLength() {
return (bitLength() >>> 5) + 1;
}
/* Returns sign bit */
private int signBit() {
return signum < 0 ? 1 : 0;
}
/* Returns an int of sign bits */
private int signInt() {
return signum < 0 ? -1 : 0;
}
/**
* Returns the specified int of the little-endian two's complement
* representation (int 0 is the least significant). The int number can
* be arbitrarily high (values are logically preceded by infinitely many
* sign ints).
*/
private int getInt(int n) {
if (n < 0)
return 0;
if (n >= mag.length)
return signInt();
int magInt = mag[mag.length-n-1];
return (signum >= 0 ? magInt :
(n <= firstNonzeroIntNum() ? -magInt : ~magInt));
}
/**
* Returns the index of the int that contains the first nonzero int in the
* little-endian binary representation of the magnitude (int 0 is the
* least significant). If the magnitude is zero, return value is undefined.
*/
private int firstNonzeroIntNum() {
int fn = firstNonzeroIntNum - 2;
if (fn == -2) { // firstNonzeroIntNum not initialized yet
fn = 0;
// Search for the first nonzero int
int i;
int mlen = mag.length;
for (i = mlen - 1; i >= 0 && mag[i] == 0; i--)
;
fn = mlen - i - 1;
firstNonzeroIntNum = fn + 2; // offset by two to initialize
}
return fn;
}
/** use serialVersionUID from JDK 1.1. for interoperability */
private static final long serialVersionUID = -8287574255936472291L;
/**
* Serializable fields for BigInteger.
*
* @serialField signum int
* signum of this BigInteger.
* @serialField magnitude int[]
* magnitude array of this BigInteger.
* @serialField bitCount int
* number of bits in this BigInteger
* @serialField bitLength int
* the number of bits in the minimal two's-complement
* representation of this BigInteger
* @serialField lowestSetBit int
* lowest set bit in the twos complement representation
*/
private static final ObjectStreamField[] serialPersistentFields = {
new ObjectStreamField("signum", Integer.TYPE),
new ObjectStreamField("magnitude", byte[].class),
new ObjectStreamField("bitCount", Integer.TYPE),
new ObjectStreamField("bitLength", Integer.TYPE),
new ObjectStreamField("firstNonzeroByteNum", Integer.TYPE),
new ObjectStreamField("lowestSetBit", Integer.TYPE)
};
/**
* Reconstitute the {@code BigInteger} instance from a stream (that is,
* deserialize it). The magnitude is read in as an array of bytes
* for historical reasons, but it is converted to an array of ints
* and the byte array is discarded.
* Note:
* The current convention is to initialize the cache fields, bitCount,
* bitLength and lowestSetBit, to 0 rather than some other marker value.
* Therefore, no explicit action to set these fields needs to be taken in
* readObject because those fields already have a 0 value be default since
* defaultReadObject is not being used.
*/
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
/*
* In order to maintain compatibility with previous serialized forms,
* the magnitude of a BigInteger is serialized as an array of bytes.
* The magnitude field is used as a temporary store for the byte array
* that is deserialized. The cached computation fields should be
* transient but are serialized for compatibility reasons.
*/
// prepare to read the alternate persistent fields
ObjectInputStream.GetField fields = s.readFields();
// Read the alternate persistent fields that we care about
int sign = fields.get("signum", -2);
byte[] magnitude = (byte[])fields.get("magnitude", null);
// Validate signum
if (sign < -1 || sign > 1) {
String message = "BigInteger: Invalid signum value";
if (fields.defaulted("signum"))
message = "BigInteger: Signum not present in stream";
throw new java.io.StreamCorruptedException(message);
}
int[] mag = stripLeadingZeroBytes(magnitude);
if ((mag.length == 0) != (sign == 0)) {
String message = "BigInteger: signum-magnitude mismatch";
if (fields.defaulted("magnitude"))
message = "BigInteger: Magnitude not present in stream";
throw new java.io.StreamCorruptedException(message);
}
// Commit final fields via Unsafe
UnsafeHolder.putSign(this, sign);
// Calculate mag field from magnitude and discard magnitude
UnsafeHolder.putMag(this, mag);
if (mag.length >= MAX_MAG_LENGTH) {
try {
checkRange();
} catch (ArithmeticException e) {
throw new java.io.StreamCorruptedException("BigInteger: Out of the supported range");
}
}
}
// Support for resetting final fields while deserializing
private static class UnsafeHolder {
private static final sun.misc.Unsafe unsafe;
private static final long signumOffset;
private static final long magOffset;
static {
try {
unsafe = sun.misc.Unsafe.getUnsafe();
signumOffset = unsafe.objectFieldOffset
(BigInteger.class.getDeclaredField("signum"));
magOffset = unsafe.objectFieldOffset
(BigInteger.class.getDeclaredField("mag"));
} catch (Exception ex) {
throw new ExceptionInInitializerError(ex);
}
}
static void putSign(BigInteger bi, int sign) {
unsafe.putIntVolatile(bi, signumOffset, sign);
}
static void putMag(BigInteger bi, int[] magnitude) {
unsafe.putObjectVolatile(bi, magOffset, magnitude);
}
}
/**
* Save the {@code BigInteger} instance to a stream.
* The magnitude of a BigInteger is serialized as a byte array for
* historical reasons.
*
* @serialData two necessary fields are written as well as obsolete
* fields for compatibility with older versions.
*/
private void writeObject(ObjectOutputStream s) throws IOException {
// set the values of the Serializable fields
ObjectOutputStream.PutField fields = s.putFields();
fields.put("signum", signum);
fields.put("magnitude", magSerializedForm());
// The values written for cached fields are compatible with older
// versions, but are ignored in readObject so don't otherwise matter.
fields.put("bitCount", -1);
fields.put("bitLength", -1);
fields.put("lowestSetBit", -2);
fields.put("firstNonzeroByteNum", -2);
// save them
s.writeFields();
}
/**
* Returns the mag array as an array of bytes.
*/
private byte[] magSerializedForm() {
int len = mag.length;
int bitLen = (len == 0 ? 0 : ((len - 1) << 5) + bitLengthForInt(mag[0]));
int byteLen = (bitLen + 7) >>> 3;
byte[] result = new byte[byteLen];
for (int i = byteLen - 1, bytesCopied = 4, intIndex = len - 1, nextInt = 0;
i >= 0; i--) {
if (bytesCopied == 4) {
nextInt = mag[intIndex--];
bytesCopied = 1;
} else {
nextInt >>>= 8;
bytesCopied++;
}
result[i] = (byte)nextInt;
}
return result;
}
/**
* Converts this {@code BigInteger} to a {@code long}, checking
* for lost information. If the value of this {@code BigInteger}
* is out of the range of the {@code long} type, then an
* {@code ArithmeticException} is thrown.
*
* @return this {@code BigInteger} converted to a {@code long}.
* @throws ArithmeticException if the value of {@code this} will
* not exactly fit in a {@code long}.
* @see BigInteger#longValue
* @since 1.8
*/
public long longValueExact() {
if (mag.length <= 2 && bitLength() <= 63)
return longValue();
else
throw new ArithmeticException("BigInteger out of long range");
}
/**
* Converts this {@code BigInteger} to an {@code int}, checking
* for lost information. If the value of this {@code BigInteger}
* is out of the range of the {@code int} type, then an
* {@code ArithmeticException} is thrown.
*
* @return this {@code BigInteger} converted to an {@code int}.
* @throws ArithmeticException if the value of {@code this} will
* not exactly fit in a {@code int}.
* @see BigInteger#intValue
* @since 1.8
*/
public int intValueExact() {
if (mag.length <= 1 && bitLength() <= 31)
return intValue();
else
throw new ArithmeticException("BigInteger out of int range");
}
/**
* Converts this {@code BigInteger} to a {@code short}, checking
* for lost information. If the value of this {@code BigInteger}
* is out of the range of the {@code short} type, then an
* {@code ArithmeticException} is thrown.
*
* @return this {@code BigInteger} converted to a {@code short}.
* @throws ArithmeticException if the value of {@code this} will
* not exactly fit in a {@code short}.
* @see BigInteger#shortValue
* @since 1.8
*/
public short shortValueExact() {
if (mag.length <= 1 && bitLength() <= 31) {
int value = intValue();
if (value >= Short.MIN_VALUE && value <= Short.MAX_VALUE)
return shortValue();
}
throw new ArithmeticException("BigInteger out of short range");
}
/**
* Converts this {@code BigInteger} to a {@code byte}, checking
* for lost information. If the value of this {@code BigInteger}
* is out of the range of the {@code byte} type, then an
* {@code ArithmeticException} is thrown.
*
* @return this {@code BigInteger} converted to a {@code byte}.
* @throws ArithmeticException if the value of {@code this} will
* not exactly fit in a {@code byte}.
* @see BigInteger#byteValue
* @since 1.8
*/
public byte byteValueExact() {
if (mag.length <= 1 && bitLength() <= 31) {
int value = intValue();
if (value >= Byte.MIN_VALUE && value <= Byte.MAX_VALUE)
return byteValue();
}
throw new ArithmeticException("BigInteger out of byte range");
}
}
|
package com.tuling.config;
import org.springframework.context.annotation.DeferredImportSelector;
import org.springframework.core.type.AnnotationMetadata;
import java.util.Arrays;
/**
* @author dunxinlong
* @slogan 愿半生编码 如一生好友
* @email dunxl1010@163.com
* @since 4/5/21 8:16 PM
**/
public class MyDeferredImportSelect implements DeferredImportSelector {
@Override
public Class<? extends Group> getImportGroup() {
return MyGroup.class;
}
@Override
public String[] selectImports(AnnotationMetadata importingClassMetadata) {
return new String[0];
}
public static class MyGroup implements Group{
AnnotationMetadata metadata;
@Override
public void process(AnnotationMetadata metadata, DeferredImportSelector selector) {
this.metadata=metadata;
}
@Override
public Iterable<Entry> selectImports() {
return Arrays.asList(new Entry(metadata,"com.tuling.entity.User"));
}
}
}
|
/***
* ASM: a very small and fast Java bytecode manipulation framework
* Copyright (c) 2000-2005 INRIA, France Telecom
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.objectweb.asm;
/**
* A label represents a position in the bytecode of a method. Labels are used
* for jump, goto, and switch instructions, and for try catch blocks.
*
* @author Eric Bruneton
*/
public class Label {
/**
* The line number corresponding to this label, if known.
*/
int line;
/**
* Indicates if the position of this label is known.
*/
boolean resolved;
/**
* The position of this label in the code, if known.
*/
int position;
/**
* If the label position has been updated, after instruction resizing.
*/
boolean resized;
/**
* Number of forward references to this label, times two.
*/
private int referenceCount;
/**
* Informations about forward references. Each forward reference is
* described by two consecutive integers in this array: the first one is the
* position of the first byte of the bytecode instruction that contains the
* forward reference, while the second is the position of the first byte of
* the forward reference itself. In fact the sign of the first integer
* indicates if this reference uses 2 or 4 bytes, and its absolute value
* gives the position of the bytecode instruction.
*/
private int[] srcAndRefPositions;
/*
* Fields for the control flow graph analysis algorithm (used to compute the
* maximum stack size). A control flow graph contains one node per "basic
* block", and one edge per "jump" from one basic block to another. Each
* node (i.e., each basic block) is represented by the Label object that
* corresponds to the first instruction of this basic block. Each node also
* stores the list of it successors in the graph, as a linked list of Edge
* objects.
*/
/**
* The stack size at the beginning of this basic block. This size is
* initially unknown. It is computed by the control flow analysis algorithm
* (see {@link MethodWriter#visitMaxs visitMaxs}).
*/
int beginStackSize;
/**
* The (relative) maximum stack size corresponding to this basic block. This
* size is relative to the stack size at the beginning of the basic block,
* i.e., the true maximum stack size is equal to {@link #beginStackSize
* beginStackSize} + {@link #maxStackSize maxStackSize}.
*/
int maxStackSize;
/**
* The successors of this node in the control flow graph. These successors
* are stored in a linked list of {@link Edge Edge} objects, linked to each
* other by their {@link Edge#next} field.
*/
Edge successors;
/**
* The next basic block in the basic block stack. See
* {@link MethodWriter#visitMaxs visitMaxs}.
*/
Label next;
/**
* <tt>true</tt> if this basic block has been pushed in the basic block
* stack. See {@link MethodWriter#visitMaxs visitMaxs}.
*/
boolean pushed;
// ------------------------------------------------------------------------
// Constructor
// ------------------------------------------------------------------------
/**
* Constructs a new label.
*/
public Label() {
}
// ------------------------------------------------------------------------
// Methods to compute offsets and to manage forward references
// ------------------------------------------------------------------------
/**
* Returns the offset corresponding to this label. This offset is computed
* from the start of the method's bytecode. <i>This method is intended for
* {@link Attribute} sub classes, and is normally not needed by class
* generators or adapters.</i>
*
* @return the offset corresponding to this label.
* @throws IllegalStateException if this label is not resolved yet.
*/
public int getOffset() {
if (!resolved) {
throw new IllegalStateException("Label offset position has not been resolved yet");
}
return position;
}
/**
* Puts a reference to this label in the bytecode of a method. If the
* position of the label is known, the offset is computed and written
* directly. Otherwise, a null offset is written and a new forward reference
* is declared for this label.
*
* @param owner the code writer that calls this method.
* @param out the bytecode of the method.
* @param source the position of first byte of the bytecode instruction that
* contains this label.
* @param wideOffset <tt>true</tt> if the reference must be stored in 4
* bytes, or <tt>false</tt> if it must be stored with 2 bytes.
* @throws IllegalArgumentException if this label has not been created by
* the given code writer.
*/
void put(
final MethodWriter owner,
final ByteVector out,
final int source,
final boolean wideOffset)
{
if (resolved) {
if (wideOffset) {
out.putInt(position - source);
} else {
out.putShort(position - source);
}
} else {
if (wideOffset) {
addReference(-1 - source, out.length);
out.putInt(-1);
} else {
addReference(source, out.length);
out.putShort(-1);
}
}
}
/**
* Adds a forward reference to this label. This method must be called only
* for a true forward reference, i.e. only if this label is not resolved
* yet. For backward references, the offset of the reference can be, and
* must be, computed and stored directly.
*
* @param sourcePosition the position of the referencing instruction. This
* position will be used to compute the offset of this forward
* reference.
* @param referencePosition the position where the offset for this forward
* reference must be stored.
*/
private void addReference(
final int sourcePosition,
final int referencePosition)
{
if (srcAndRefPositions == null) {
srcAndRefPositions = new int[6];
}
if (referenceCount >= srcAndRefPositions.length) {
int[] a = new int[srcAndRefPositions.length + 6];
System.arraycopy(srcAndRefPositions,
0,
a,
0,
srcAndRefPositions.length);
srcAndRefPositions = a;
}
srcAndRefPositions[referenceCount++] = sourcePosition;
srcAndRefPositions[referenceCount++] = referencePosition;
}
/**
* Resolves all forward references to this label. This method must be called
* when this label is added to the bytecode of the method, i.e. when its
* position becomes known. This method fills in the blanks that where left
* in the bytecode by each forward reference previously added to this label.
*
* @param owner the code writer that calls this method.
* @param position the position of this label in the bytecode.
* @param data the bytecode of the method.
* @return <tt>true</tt> if a blank that was left for this label was to
* small to store the offset. In such a case the corresponding jump
* instruction is replaced with a pseudo instruction (using unused
* opcodes) using an unsigned two bytes offset. These pseudo
* instructions will need to be replaced with true instructions with
* wider offsets (4 bytes instead of 2). This is done in
* {@link MethodWriter#resizeInstructions}.
* @throws IllegalArgumentException if this label has already been resolved,
* or if it has not been created by the given code writer.
*/
boolean resolve(
final MethodWriter owner,
final int position,
final byte[] data)
{
boolean needUpdate = false;
this.resolved = true;
this.position = position;
int i = 0;
while (i < referenceCount) {
int source = srcAndRefPositions[i++];
int reference = srcAndRefPositions[i++];
int offset;
if (source >= 0) {
offset = position - source;
if (offset < Short.MIN_VALUE || offset > Short.MAX_VALUE) {
/*
* changes the opcode of the jump instruction, in order to
* be able to find it later (see resizeInstructions in
* MethodWriter). These temporary opcodes are similar to
* jump instruction opcodes, except that the 2 bytes offset
* is unsigned (and can therefore represent values from 0 to
* 65535, which is sufficient since the size of a method is
* limited to 65535 bytes).
*/
int opcode = data[reference - 1] & 0xFF;
if (opcode <= Opcodes.JSR) {
// changes IFEQ ... JSR to opcodes 202 to 217
data[reference - 1] = (byte) (opcode + 49);
} else {
// changes IFNULL and IFNONNULL to opcodes 218 and 219
data[reference - 1] = (byte) (opcode + 20);
}
needUpdate = true;
}
data[reference++] = (byte) (offset >>> 8);
data[reference] = (byte) offset;
} else {
offset = position + source + 1;
data[reference++] = (byte) (offset >>> 24);
data[reference++] = (byte) (offset >>> 16);
data[reference++] = (byte) (offset >>> 8);
data[reference] = (byte) offset;
}
}
return needUpdate;
}
// ------------------------------------------------------------------------
// Overriden Object methods
// ------------------------------------------------------------------------
/**
* Returns a string representation of this label.
*
* @return a string representation of this label.
*/
public String toString() {
return "L" + System.identityHashCode(this);
}
}
|
package ex1;
import ex1.testin.CanCry;
public class Animal {
void makecry(CanCry c)
{
c.cry();
}
public static void main(String[] args) {
// TODO Auto-generated method stub
Dog a=new Dog();
Cat b=new Cat();
Animal c=new Animal();
c.makecry(a);
c.makecry(b);
}
}
|
package com.webcheckers.ui.HtmlRoutes;
import static spark.Spark.halt;
import com.webcheckers.application.GameCenter;
import com.webcheckers.application.PlayerLobby;
import com.webcheckers.model.Player.Player;
import com.webcheckers.ui.WebServer;
import java.util.Objects;
import java.util.logging.Logger;
import spark.Request;
import spark.Response;
import spark.Route;
import spark.TemplateEngine;
/**
* Abstract class for HtmlRoutes.
*
* @author <a href='mailto:axf5592@rit.edu'>Andrew Festa</a>
*/
public abstract class HtmlRoute implements Route {
//
// Attributes
//
final GameCenter gameCenter;
final PlayerLobby playerLobby;
final TemplateEngine templateEngine;
final Logger LOG = Logger.getLogger(this.getClass().getName());
/**
* Create the Spark Route (UI controller) for the HTTP request.
*
* @param gameCenter the {@link GameCenter} for tracking all ongoing games
* @param playerLobby the default {@link PlayerLobby} for tracking all players
* @param templateEngine the {@link TemplateEngine} used for rendering page HTML.
* @throws NullPointerException when the {@code gameCenter}, {@code playerLobby}, or {@code
* templateEngine} parameter is null
*/
public HtmlRoute(GameCenter gameCenter, PlayerLobby playerLobby, final TemplateEngine templateEngine)
throws NullPointerException {
Objects.requireNonNull(playerLobby, "playerLobby must not be null");
Objects.requireNonNull(gameCenter, "gameCenter must not be null");
Objects.requireNonNull(templateEngine, "templateEngine must not be null");
this.playerLobby = playerLobby;
this.gameCenter = gameCenter;
this.templateEngine = templateEngine;
}
/**
* Abstract method for handling HTTP requests.
*
* @param request the HTTP request
* @param response the HTTP response
* @return a message indicating whether the request was successful
*/
public abstract Object handle(Request request, Response response);
boolean checkValidPlayerName(String playerName){
boolean playerStored = playerName != null;
Player currPlayer = playerLobby.getPlayer(playerName);
boolean playerExits = currPlayer != null;
return playerStored && playerExits;
}
}
|
package com.tipdm.framework.persist;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
import org.springframework.data.repository.NoRepositoryBean;
import org.springframework.data.repository.PagingAndSortingRepository;
import java.io.Serializable;
/**
* 基础Dao
* @author yarn
*
* @param <T> 实体类 泛型
* @param <ID> id
*/
@NoRepositoryBean
public interface BaseRepository<T, ID extends Serializable> extends
PagingAndSortingRepository<T, ID>, JpaSpecificationExecutor<T> {
public Page<T> findAll(Pageable pageable);
}
|
/*
* Copyright 2012 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.eureka.registry;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
import com.netflix.appinfo.AmazonInfo;
import com.netflix.appinfo.AmazonInfo.MetaDataKey;
import com.netflix.appinfo.ApplicationInfoManager;
import com.netflix.appinfo.DataCenterInfo;
import com.netflix.appinfo.DataCenterInfo.Name;
import com.netflix.appinfo.InstanceInfo;
import com.netflix.appinfo.InstanceInfo.InstanceStatus;
import com.netflix.appinfo.LeaseInfo;
import com.netflix.discovery.EurekaClient;
import com.netflix.discovery.EurekaClientConfig;
import com.netflix.discovery.shared.Application;
import com.netflix.discovery.shared.Applications;
import com.netflix.eureka.registry.rule.DownOrStartingRule;
import com.netflix.eureka.registry.rule.FirstMatchWinsCompositeRule;
import com.netflix.eureka.registry.rule.InstanceStatusOverrideRule;
import com.netflix.eureka.registry.rule.LeaseExistsRule;
import com.netflix.eureka.registry.rule.OverrideExistsRule;
import com.netflix.eureka.resources.CurrentRequestVersion;
import com.netflix.eureka.EurekaServerConfig;
import com.netflix.eureka.Version;
import com.netflix.eureka.cluster.PeerEurekaNode;
import com.netflix.eureka.cluster.PeerEurekaNodes;
import com.netflix.eureka.lease.Lease;
import com.netflix.eureka.resources.ASGResource.ASGStatus;
import com.netflix.eureka.resources.ServerCodecs;
import com.netflix.eureka.util.MeasuredRate;
import com.netflix.servo.DefaultMonitorRegistry;
import com.netflix.servo.annotations.DataSourceType;
import com.netflix.servo.monitor.Monitors;
import com.netflix.servo.monitor.Stopwatch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Singleton;
/**
* Handles replication of all operations to {@link AbstractInstanceRegistry} to peer
* <em>Eureka</em> nodes to keep them all in sync.
*
* <p>
* Primary operations that are replicated are the
* <em>Registers,Renewals,Cancels,Expirations and Status Changes</em>
* </p>
*
* <p>
* When the eureka server starts up it tries to fetch all the registry
* information from the peer eureka nodes.If for some reason this operation
* fails, the server does not allow the user to get the registry information for
* a period specified in
* {@link com.netflix.eureka.EurekaServerConfig#getWaitTimeInMsWhenSyncEmpty()}.
* </p>
*
* <p>
* One important thing to note about <em>renewals</em>.If the renewal drops more
* than the specified threshold as specified in
* {@link com.netflix.eureka.EurekaServerConfig#getRenewalPercentThreshold()} within a period of
* {@link com.netflix.eureka.EurekaServerConfig#getRenewalThresholdUpdateIntervalMs()}, eureka
* perceives this as a danger and stops expiring instances.
* </p>
*
* @author Karthik Ranganathan, Greg Kim
*
*/
@Singleton
public class PeerAwareInstanceRegistryImpl extends AbstractInstanceRegistry implements PeerAwareInstanceRegistry {
private static final Logger logger = LoggerFactory.getLogger(PeerAwareInstanceRegistryImpl.class);
private static final String US_EAST_1 = "us-east-1";
private static final int PRIME_PEER_NODES_RETRY_MS = 30000;
private long startupTime = 0;
private boolean peerInstancesTransferEmptyOnStartup = true;
public enum Action {
Heartbeat, Register, Cancel, StatusUpdate, DeleteStatusOverride;
private com.netflix.servo.monitor.Timer timer = Monitors.newTimer(this.name());
public com.netflix.servo.monitor.Timer getTimer() {
return this.timer;
}
}
private static final Comparator<Application> APP_COMPARATOR = new Comparator<Application>() {
public int compare(Application l, Application r) {
return l.getName().compareTo(r.getName());
}
};
private final MeasuredRate numberOfReplicationsLastMin;
protected final EurekaClient eurekaClient;
protected volatile PeerEurekaNodes peerEurekaNodes;
private final InstanceStatusOverrideRule instanceStatusOverrideRule;
private Timer timer = new Timer(
"ReplicaAwareInstanceRegistry - RenewalThresholdUpdater", true);
@Inject
public PeerAwareInstanceRegistryImpl(
EurekaServerConfig serverConfig,
EurekaClientConfig clientConfig,
ServerCodecs serverCodecs,
EurekaClient eurekaClient
) {
super(serverConfig, clientConfig, serverCodecs);
this.eurekaClient = eurekaClient;
this.numberOfReplicationsLastMin = new MeasuredRate(1000 * 60 * 1);
// We first check if the instance is STARTING or DOWN, then we check explicit overrides,
// then we check the status of a potentially existing lease.
this.instanceStatusOverrideRule = new FirstMatchWinsCompositeRule(new DownOrStartingRule(),
new OverrideExistsRule(overriddenInstanceStatusMap), new LeaseExistsRule());
}
@Override
protected InstanceStatusOverrideRule getInstanceInfoOverrideRule() {
return this.instanceStatusOverrideRule;
}
@Override
public void init(PeerEurekaNodes peerEurekaNodes) throws Exception {
this.numberOfReplicationsLastMin.start();
this.peerEurekaNodes = peerEurekaNodes;
initializedResponseCache();
// 默认是15分钟,会跑一次定时任务,算一下服务实例的数量,如果从别的eureka server拉取到的服务实例的数量,
// 大于当前的服务实例的数量,会重新计算每分钟期望心跳数,主要是跟其他的eureka server做一下同步
scheduleRenewalThresholdUpdateTask();
initRemoteRegionRegistry();
try {
Monitors.registerObject(this);
} catch (Throwable e) {
logger.warn("Cannot register the JMX monitor for the InstanceRegistry :", e);
}
}
/**
* Perform all cleanup and shutdown operations.
*/
@Override
public void shutdown() {
try {
DefaultMonitorRegistry.getInstance().unregister(Monitors.newObjectMonitor(this));
} catch (Throwable t) {
logger.error("Cannot shutdown monitor registry", t);
}
try {
peerEurekaNodes.shutdown();
} catch (Throwable t) {
logger.error("Cannot shutdown ReplicaAwareInstanceRegistry", t);
}
numberOfReplicationsLastMin.stop();
super.shutdown();
}
/**
* Schedule the task that updates <em>renewal threshold</em> periodically.
* The renewal threshold would be used to determine if the renewals drop
* dramatically because of network partition and to protect expiring too
* many instances at a time.
*
*/
private void scheduleRenewalThresholdUpdateTask() {
timer.schedule(new TimerTask() {
@Override
public void run() {
updateRenewalThreshold();
}
}, serverConfig.getRenewalThresholdUpdateIntervalMs(), // 15分钟
serverConfig.getRenewalThresholdUpdateIntervalMs());
}
/**
* Populates the registry information from a peer eureka node. This
* operation fails over to other nodes until the list is exhausted if the
* communication fails.
* 从相邻的eureka server节点,拷贝过来注册表,如果是自己本地还没注册的服务实例,就在自己本地注册一下
* 会记录一下从别的eureka server拉取过来的服务实例的数量,将这个服务实例的数量,就作为自己eureka server本地初始化的这么一个服务实例的数量
*/
@Override
public int syncUp() {
// Copy entire entry from neighboring DS node
int count = 0;
// serverConfig.getRegistrySyncRetries() 默认5次重试
for (int i = 0; ((i < serverConfig.getRegistrySyncRetries()) && (count == 0)); i++) {
if (i > 0) { // 说明前几次拉取失败,可能本地的eureka client还没有从相邻eureka serve上拉取来数据
try {
// 这里休眠一段时间,等待eureka client从相邻eureka serve上进行注册并拉取来全量的注册表信息
Thread.sleep(serverConfig.getRegistrySyncRetryWaitMs());
} catch (InterruptedException e) {
logger.warn("Interrupted during registry transfer..");
break;
}
}
// 获取本地的注册表信息,如果没有那么count就一直是0,上面for循环会一直走
Applications apps = eurekaClient.getApplications();
for (Application app : apps.getRegisteredApplications()) {
for (InstanceInfo instance : app.getInstances()) {
try {
if (isRegisterable(instance)) {
register(instance, instance.getLeaseInfo().getDurationInSecs(), true);
count++;
}
} catch (Throwable t) {
logger.error("During DS init copy", t);
}
}
}
}
return count;
}
@Override
public void openForTraffic(ApplicationInfoManager applicationInfoManager, int count) {
// Renewals happen every 30 seconds and for a minute it should be a factor of 2.
// 每个服务实例每30秒发送一次心跳,于是一分钟一个服务实例应该发送2次心跳,1分钟内我期望获取到的心跳的次数,应该是 num * 2
// 居然硬编码了,没想象中的强,如果30s发送一次心跳的配置变更了,那么这个代码就是个bug
// 优化:count * (60 / 心跳时间间隔)
this.expectedNumberOfRenewsPerMin = count * 2;
// 初始化numberOfRenewsPerMinThreshold 期望1分钟得有多少次心跳
// 默认情况下 count * 2 * 0.85
this.numberOfRenewsPerMinThreshold =
(int) (this.expectedNumberOfRenewsPerMin * serverConfig.getRenewalPercentThreshold()); // 0.85
logger.info("Got " + count + " instances from neighboring DS node");
logger.info("Renew threshold is: " + numberOfRenewsPerMinThreshold);
this.startupTime = System.currentTimeMillis();
if (count > 0) {
this.peerInstancesTransferEmptyOnStartup = false;
}
DataCenterInfo.Name selfName = applicationInfoManager.getInfo().getDataCenterInfo().getName();
boolean isAws = Name.Amazon == selfName;
if (isAws && serverConfig.shouldPrimeAwsReplicaConnections()) {
logger.info("Priming AWS connections for all replicas..");
primeAwsReplicas(applicationInfoManager);
}
logger.info("Changing status to UP");
applicationInfoManager.setInstanceStatus(InstanceStatus.UP);
// 启动一个线程 服务实例自动故障感知及服务自动摘除功能逻辑
// 感觉这个定时任务应该放到PeerAwareInstanceRegistryImpl初始化的时候,感觉更符合软件设计
super.postInit();
}
/**
* Prime connections for Aws replicas.
* <p>
* Sometimes when the eureka servers comes up, AWS firewall may not allow
* the network connections immediately. This will cause the outbound
* connections to fail, but the inbound connections continue to work. What
* this means is the clients would have switched to this node (after EIP
* binding) and so the other eureka nodes will expire all instances that
* have been switched because of the lack of outgoing heartbeats from this
* instance.
* </p>
* <p>
* The best protection in this scenario is to block and wait until we are
* able to ping all eureka nodes successfully atleast once. Until then we
* won't open up the traffic.
* </p>
*/
private void primeAwsReplicas(ApplicationInfoManager applicationInfoManager) {
boolean areAllPeerNodesPrimed = false;
while (!areAllPeerNodesPrimed) {
String peerHostName = null;
try {
Application eurekaApps = this.getApplication(applicationInfoManager.getInfo().getAppName(), false);
if (eurekaApps == null) {
areAllPeerNodesPrimed = true;
logger.info("No peers needed to prime.");
return;
}
for (PeerEurekaNode node : peerEurekaNodes.getPeerEurekaNodes()) {
for (InstanceInfo peerInstanceInfo : eurekaApps.getInstances()) {
LeaseInfo leaseInfo = peerInstanceInfo.getLeaseInfo();
// If the lease is expired - do not worry about priming
if (System.currentTimeMillis() > (leaseInfo
.getRenewalTimestamp() + (leaseInfo
.getDurationInSecs() * 1000))
+ (2 * 60 * 1000)) {
continue;
}
peerHostName = peerInstanceInfo.getHostName();
logger.info("Trying to send heartbeat for the eureka server at {} to make sure the " +
"network channels are open", peerHostName);
// Only try to contact the eureka nodes that are in this instance's registry - because
// the other instances may be legitimately down
if (peerHostName.equalsIgnoreCase(new URI(node.getServiceUrl()).getHost())) {
node.heartbeat(
peerInstanceInfo.getAppName(),
peerInstanceInfo.getId(),
peerInstanceInfo,
null,
true);
}
}
}
areAllPeerNodesPrimed = true;
} catch (Throwable e) {
logger.error("Could not contact " + peerHostName, e);
try {
Thread.sleep(PRIME_PEER_NODES_RETRY_MS);
} catch (InterruptedException e1) {
logger.warn("Interrupted while priming : ", e1);
areAllPeerNodesPrimed = true;
}
}
}
}
/**
* Checks to see if the registry access is allowed or the server is in a
* situation where it does not all getting registry information. The server
* does not return registry information for a period specified in
* {@link EurekaServerConfig#getWaitTimeInMsWhenSyncEmpty()}, if it cannot
* get the registry information from the peer eureka nodes at start up.
*
* @return false - if the instances count from a replica transfer returned
* zero and if the wait time has not elapsed, otherwise returns true
*/
@Override
public boolean shouldAllowAccess(boolean remoteRegionRequired) {
if (this.peerInstancesTransferEmptyOnStartup) {
if (!(System.currentTimeMillis() > this.startupTime + serverConfig.getWaitTimeInMsWhenSyncEmpty())) {
return false;
}
}
if (remoteRegionRequired) {
for (RemoteRegionRegistry remoteRegionRegistry : this.regionNameVSRemoteRegistry.values()) {
if (!remoteRegionRegistry.isReadyForServingData()) {
return false;
}
}
}
return true;
}
public boolean shouldAllowAccess() {
return shouldAllowAccess(true);
}
/**
* @deprecated use {@link com.netflix.eureka.cluster.PeerEurekaNodes#getPeerEurekaNodes()} directly.
*
* Gets the list of peer eureka nodes which is the list to replicate
* information to.
*
* @return the list of replica nodes.
*/
@Deprecated
public List<PeerEurekaNode> getReplicaNodes() {
return Collections.unmodifiableList(peerEurekaNodes.getPeerEurekaNodes());
}
/*
* (non-Javadoc)
*
* @see com.netflix.eureka.registry.InstanceRegistry#cancel(java.lang.String,
* java.lang.String, long, boolean)
*/
@Override
public boolean cancel(final String appName, final String id,
final boolean isReplication) {
if (super.cancel(appName, id, isReplication)) {
replicateToPeers(Action.Cancel, appName, id, null, null, isReplication);
synchronized (lock) {
if (this.expectedNumberOfRenewsPerMin > 0) {
// Since the client wants to cancel it, reduce the threshold (1 for 30 seconds, 2 for a minute)
// 硬编码, 简直是个笑话
this.expectedNumberOfRenewsPerMin = this.expectedNumberOfRenewsPerMin - 2;
// 服务实例下线,更新每分钟期望的心跳数
this.numberOfRenewsPerMinThreshold =
(int) (this.expectedNumberOfRenewsPerMin * serverConfig.getRenewalPercentThreshold());
}
}
return true;
}
return false;
}
/**
* Registers the information about the {@link InstanceInfo} and replicates
* this information to all peer eureka nodes. If this is replication event
* from other replica nodes then it is not replicated.
*
* @param info
* the {@link InstanceInfo} to be registered and replicated.
* @param isReplication
* true if this is a replication event from other replica nodes,
* false otherwise.
*/
@Override
public void register(final InstanceInfo info, final boolean isReplication) {
int leaseDuration = Lease.DEFAULT_DURATION_IN_SECS;
if (info.getLeaseInfo() != null && info.getLeaseInfo().getDurationInSecs() > 0) {
leaseDuration = info.getLeaseInfo().getDurationInSecs();
}
// **进行注册
super.register(info, leaseDuration, isReplication);
// 将这次注册请求,同步到其他所有的eureka server上去
replicateToPeers(Action.Register, info.getAppName(), info.getId(), info, null, isReplication);
}
/*
* (non-Javadoc)
*
* @see com.netflix.eureka.registry.InstanceRegistry#renew(java.lang.String,
* java.lang.String, long, boolean)
*/
public boolean renew(final String appName, final String id, final boolean isReplication) {
if (super.renew(appName, id, isReplication)) {
replicateToPeers(Action.Heartbeat, appName, id, null, null, isReplication);
return true;
}
return false;
}
/*
* (non-Javadoc)
*
* @see com.netflix.eureka.registry.InstanceRegistry#statusUpdate(java.lang.String,
* java.lang.String, com.netflix.appinfo.InstanceInfo.InstanceStatus,
* java.lang.String, boolean)
*/
@Override
public boolean statusUpdate(final String appName, final String id,
final InstanceStatus newStatus, String lastDirtyTimestamp,
final boolean isReplication) {
if (super.statusUpdate(appName, id, newStatus, lastDirtyTimestamp, isReplication)) {
replicateToPeers(Action.StatusUpdate, appName, id, null, newStatus, isReplication);
return true;
}
return false;
}
@Override
public boolean deleteStatusOverride(String appName, String id,
InstanceStatus newStatus,
String lastDirtyTimestamp,
boolean isReplication) {
if (super.deleteStatusOverride(appName, id, newStatus, lastDirtyTimestamp, isReplication)) {
replicateToPeers(Action.DeleteStatusOverride, appName, id, null, null, isReplication);
return true;
}
return false;
}
/**
* Replicate the <em>ASG status</em> updates to peer eureka nodes. If this
* event is a replication from other nodes, then it is not replicated to
* other nodes.
*
* @param asgName the asg name for which the status needs to be replicated.
* @param newStatus the {@link ASGStatus} information that needs to be replicated.
* @param isReplication true if this is a replication event from other nodes, false otherwise.
*/
@Override
public void statusUpdate(final String asgName, final ASGStatus newStatus, final boolean isReplication) {
// If this is replicated from an other node, do not try to replicate again.
if (isReplication) {
return;
}
for (final PeerEurekaNode node : peerEurekaNodes.getPeerEurekaNodes()) {
replicateASGInfoToReplicaNodes(asgName, newStatus, node);
}
}
@Override
public boolean isLeaseExpirationEnabled() {
// isSelfPreservationModeEnabled() 默认是true
if (!isSelfPreservationModeEnabled()) {
// The self preservation mode is disabled, hence allowing the instances to expire.
return true;
}
// numberOfRenewsPerMinThreshold --> 我期望的是一分钟要有多少次心跳发送过来,假设是100次
// getNumOfRenewsInLastMin() ---> 上一分钟所有服务实例一共发送过来多少次心跳,假设是102次
// 如果上一分钟的心跳次数(102次) > 期望的100次,返回true,那么就可以清理故障的服务实例
// 如果上一分钟中心跳次数太多(20次) < 期望的100次,此时会返回false
// getNumOfRenewsInLastMin() 记录实际上一分钟的心跳数,这一块逻辑应在心跳续约中,应该是renews()
return numberOfRenewsPerMinThreshold > 0 && getNumOfRenewsInLastMin() > numberOfRenewsPerMinThreshold;
}
/**
* Checks to see if the self-preservation mode is enabled.
*
* <p>
* The self-preservation mode is enabled if the expected number of renewals
* per minute {@link #getNumOfRenewsInLastMin()} is lesser than the expected
* threshold which is determined by {@link #getNumOfRenewsPerMinThreshold()}
* . Eureka perceives this as a danger and stops expiring instances as this
* is most likely because of a network event. The mode is disabled only when
* the renewals get back to above the threshold or if the flag
* {@link EurekaServerConfig#shouldEnableSelfPreservation()} is set to
* false.
* </p>
*
* @return true if the self-preservation mode is enabled, false otherwise.
*/
@Override
public boolean isSelfPreservationModeEnabled() {
return serverConfig.shouldEnableSelfPreservation();
}
@Override
public InstanceInfo getNextServerFromEureka(String virtualHostname, boolean secure) {
// TODO Auto-generated method stub
return null;
}
/**
* Updates the <em>renewal threshold</em> based on the current number of
* renewals. The threshold is a percentage as specified in
* {@link EurekaServerConfig#getRenewalPercentThreshold()} of renewals
* received per minute {@link #getNumOfRenewsInLastMin()}.
* 如果从别的eureka server拉取到的服务实例的数量,大于当前的服务实例的数量,会重新计算每分钟期望心跳数,主要是跟其他的eureka server做一下同步
*/
private void updateRenewalThreshold() {
try {
// 将自己作为eureka client,从其他的eureka server拉取注册表
// 合并到自己的本地去
Applications apps = eurekaClient.getApplications();
int count = 0;
// 将别的服务拉取的服务实例的数量作为count
for (Application app : apps.getRegisteredApplications()) {
for (InstanceInfo instance : app.getInstances()) {
if (this.isRegisterable(instance)) {
++count;
}
}
}
synchronized (lock) {
// Update threshold only if the threshold is greater than the
// current expected threshold of if the self preservation is disabled.
// 如果从别的eureka server拉取到的服务实例的数量,大于当前的服务实例的数量,会重新计算每分钟期望心跳数
if ((count * 2) > (serverConfig.getRenewalPercentThreshold() * numberOfRenewsPerMinThreshold)
|| (!this.isSelfPreservationModeEnabled())) {
this.expectedNumberOfRenewsPerMin = count * 2;
this.numberOfRenewsPerMinThreshold = (int) ((count * 2) * serverConfig.getRenewalPercentThreshold());
}
}
logger.info("Current renewal threshold is : {}", numberOfRenewsPerMinThreshold);
} catch (Throwable e) {
logger.error("Cannot update renewal threshold", e);
}
}
/**
* Gets the list of all {@link Applications} from the registry in sorted
* lexical order of {@link Application#getName()}.
*
* @return the list of {@link Applications} in lexical order.
*/
@Override
public List<Application> getSortedApplications() {
List<Application> apps = new ArrayList<Application>(getApplications().getRegisteredApplications());
Collections.sort(apps, APP_COMPARATOR);
return apps;
}
/**
* Gets the number of <em>renewals</em> in the last minute.
*
* @return a long value representing the number of <em>renewals</em> in the last minute.
*/
@com.netflix.servo.annotations.Monitor(name = "numOfReplicationsInLastMin",
description = "Number of total replications received in the last minute",
type = com.netflix.servo.annotations.DataSourceType.GAUGE)
public long getNumOfReplicationsInLastMin() {
return numberOfReplicationsLastMin.getCount();
}
/**
* Checks if the number of renewals is lesser than threshold.
*
* @return 0 if the renewals are greater than threshold, 1 otherwise.
*/
@com.netflix.servo.annotations.Monitor(name = "isBelowRenewThreshold", description = "0 = false, 1 = true",
type = com.netflix.servo.annotations.DataSourceType.GAUGE)
@Override
public int isBelowRenewThresold() {
if ((getNumOfRenewsInLastMin() <= numberOfRenewsPerMinThreshold)
&&
((this.startupTime > 0) && (System.currentTimeMillis() > this.startupTime + (serverConfig.getWaitTimeInMsWhenSyncEmpty())))) {
return 1;
} else {
return 0;
}
}
/**
* Checks if an instance is registerable in this region. Instances from other regions are rejected.
*
* @param instanceInfo th instance info information of the instance
* @return true, if it can be registered in this server, false otherwise.
*/
public boolean isRegisterable(InstanceInfo instanceInfo) {
DataCenterInfo datacenterInfo = instanceInfo.getDataCenterInfo();
String serverRegion = clientConfig.getRegion();
if (AmazonInfo.class.isInstance(datacenterInfo)) {
AmazonInfo info = AmazonInfo.class.cast(instanceInfo.getDataCenterInfo());
String availabilityZone = info.get(MetaDataKey.availabilityZone);
// Can be null for dev environments in non-AWS data center
if (availabilityZone == null && US_EAST_1.equalsIgnoreCase(serverRegion)) {
return true;
} else if ((availabilityZone != null) && (availabilityZone.contains(serverRegion))) {
// If in the same region as server, then consider it registerable
return true;
}
}
return true; // Everything non-amazon is registrable.
}
/**
* Replicates all eureka actions to peer eureka nodes except for replication
* traffic to this node.
*
*/
private void replicateToPeers(Action action, String appName, String id,
InstanceInfo info /* optional */,
InstanceStatus newStatus /* optional */, boolean isReplication) {
// 上面的isReplication在当前eureka serve收到那就是false
// 因为当前serve执行了注册、心跳等心跳之后会同步给其他eureka serve
// 但是其他eureka serve收到这个请求时这个isReplication参数是true(在同步发送请求的时候会把这个参数设置成true)
// 因为它不能再向其他serve进行同步了,不然就是死循环了
Stopwatch tracer = action.getTimer().start();
try {
if (isReplication) {
numberOfReplicationsLastMin.increment();
}
// If it is a replication already, do not replicate again as this will create a poison replication
if (peerEurekaNodes == Collections.EMPTY_LIST || isReplication) {
return;
}
for (final PeerEurekaNode node : peerEurekaNodes.getPeerEurekaNodes()) {
// If the url represents this host, do not replicate to yourself.
if (peerEurekaNodes.isThisMyUrl(node.getServiceUrl())) {
continue;
}
// 同步
replicateInstanceActionsToPeers(action, appName, id, info, newStatus, node);
}
} finally {
tracer.stop();
}
}
/**
* Replicates all instance changes to peer eureka nodes except for
* replication traffic to this node.
*
*/
private void replicateInstanceActionsToPeers(Action action, String appName,
String id, InstanceInfo info, InstanceStatus newStatus,
PeerEurekaNode node) {
try {
InstanceInfo infoFromRegistry = null;
CurrentRequestVersion.set(Version.V2);
switch (action) {
case Cancel:
node.cancel(appName, id);
break;
case Heartbeat:
InstanceStatus overriddenStatus = overriddenInstanceStatusMap.get(id);
infoFromRegistry = getInstanceByAppAndId(appName, id, false);
node.heartbeat(appName, id, infoFromRegistry, overriddenStatus, false);
break;
case Register:
node.register(info);
break;
case StatusUpdate:
infoFromRegistry = getInstanceByAppAndId(appName, id, false);
node.statusUpdate(appName, id, newStatus, infoFromRegistry);
break;
case DeleteStatusOverride:
infoFromRegistry = getInstanceByAppAndId(appName, id, false);
node.deleteStatusOverride(appName, id, infoFromRegistry);
break;
}
} catch (Throwable t) {
logger.error("Cannot replicate information to {} for action {}", node.getServiceUrl(), action.name(), t);
}
}
/**
* Replicates all ASG status changes to peer eureka nodes except for
* replication traffic to this node.
*/
private void replicateASGInfoToReplicaNodes(final String asgName,
final ASGStatus newStatus, final PeerEurekaNode node) {
CurrentRequestVersion.set(Version.V2);
try {
node.statusUpdate(asgName, newStatus);
} catch (Throwable e) {
logger.error("Cannot replicate ASG status information to {}", node.getServiceUrl(), e);
}
}
@Override
@com.netflix.servo.annotations.Monitor(name = "localRegistrySize",
description = "Current registry size", type = DataSourceType.GAUGE)
public long getLocalRegistrySize() {
return super.getLocalRegistrySize();
}
}
|
package barqsoft.footballscores;
import android.content.Intent;
import android.database.Cursor;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.CursorLoader;
import android.support.v4.content.Loader;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ListView;
import barqsoft.footballscores.service.myFetchService;
/**
* A placeholder fragment containing a simple view.
*/
public class MainScreenFragment extends Fragment implements LoaderManager.LoaderCallbacks<Cursor>
{
public scoresAdapter mAdapter;
public static final int SCORES_LOADER = 0;
private String[] fragmentdate = new String[1];
public MainScreenFragment()
{
}
private void update_scores()
{
Intent service_start = new Intent(getActivity(), myFetchService.class);
getActivity().startService(service_start);
}
public void setFragmentDate(String date)
{
fragmentdate[0] = date;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
final Bundle savedInstanceState) {
update_scores();
View rootView = inflater.inflate(R.layout.fragment_main, container, false);
final ListView score_list = (ListView) rootView.findViewById(R.id.scores_list);
mAdapter = new scoresAdapter(getActivity(),null,0);
score_list.setAdapter(mAdapter);
getLoaderManager().initLoader(SCORES_LOADER,null,this);
mAdapter.detail_match_id = MainActivity.selected_match_id;
score_list.setOnItemClickListener(new AdapterView.OnItemClickListener()
{
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id)
{
ViewHolder selected = (ViewHolder) view.getTag();
mAdapter.detail_match_id = selected.match_id;
MainActivity.selected_match_id = (int) selected.match_id;
mAdapter.notifyDataSetChanged();
}
});
return rootView;
}
@Override
public Loader<Cursor> onCreateLoader(int i, Bundle bundle)
{
return new CursorLoader(getActivity(),DatabaseContract.scores_table.buildScoreWithDate(),
null,null,fragmentdate,null);
}
@Override
public void onLoadFinished(Loader<Cursor> cursorLoader, Cursor cursor)
{
//Log.v(FetchScoreTask.LOG_TAG,"loader finished");
//cursor.moveToFirst();
/*
while (!cursor.isAfterLast())
{
Log.v(FetchScoreTask.LOG_TAG,cursor.getString(1));
cursor.moveToNext();
}
*/
int counter = 0;
cursor.moveToFirst();
while (!cursor.isAfterLast())
{
counter++;
cursor.moveToNext();
}
//Log.v(FetchScoreTask.LOG_TAG,"Loader query: " + String.valueOf(i));
mAdapter.swapCursor(cursor);
//mAdapter.notifyDataSetChanged();
}
@Override
public void onLoaderReset(Loader<Cursor> cursorLoader)
{
mAdapter.swapCursor(null);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.startup;
import java.util.Set;
import javax.servlet.Servlet;
import javax.servlet.ServletContainerInitializer;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletRegistration;
public class TesterServletContainerInitializer2 implements
ServletContainerInitializer {
@Override
public void onStartup(Set<Class<?>> c, ServletContext ctx)
throws ServletException {
Servlet s = new TesterServlet();
ServletRegistration.Dynamic r = ctx.addServlet("TesterServlet2", s);
r.addMapping("/TesterServlet2");
}
}
|
/*
* Copyright 2010-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.kotlin.resolve;
import com.google.common.collect.Lists;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiElement;
import kotlin.Pair;
import kotlin.TuplesKt;
import kotlin.collections.CollectionsKt;
import kotlin.collections.SetsKt;
import kotlin.jvm.functions.Function0;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.kotlin.builtins.FunctionTypesKt;
import org.jetbrains.kotlin.builtins.KotlinBuiltIns;
import org.jetbrains.kotlin.config.LanguageFeature;
import org.jetbrains.kotlin.config.LanguageVersionSettings;
import org.jetbrains.kotlin.descriptors.*;
import org.jetbrains.kotlin.descriptors.annotations.AnnotationSplitter;
import org.jetbrains.kotlin.descriptors.annotations.AnnotationUseSiteTarget;
import org.jetbrains.kotlin.descriptors.annotations.Annotations;
import org.jetbrains.kotlin.descriptors.annotations.CompositeAnnotations;
import org.jetbrains.kotlin.descriptors.impl.*;
import org.jetbrains.kotlin.diagnostics.Errors;
import org.jetbrains.kotlin.incremental.components.NoLookupLocation;
import org.jetbrains.kotlin.lexer.KtTokens;
import org.jetbrains.kotlin.name.Name;
import org.jetbrains.kotlin.psi.*;
import org.jetbrains.kotlin.psi.psiUtil.PsiUtilsKt;
import org.jetbrains.kotlin.resolve.calls.components.InferenceSession;
import org.jetbrains.kotlin.resolve.calls.smartcasts.DataFlowInfo;
import org.jetbrains.kotlin.resolve.calls.smartcasts.DataFlowInfoFactory;
import org.jetbrains.kotlin.resolve.calls.smartcasts.DataFlowValueFactory;
import org.jetbrains.kotlin.resolve.calls.util.UnderscoreUtilKt;
import org.jetbrains.kotlin.resolve.extensions.SyntheticResolveExtension;
import org.jetbrains.kotlin.resolve.lazy.ForceResolveUtil;
import org.jetbrains.kotlin.resolve.lazy.descriptors.LazyTypeAliasDescriptor;
import org.jetbrains.kotlin.resolve.scopes.*;
import org.jetbrains.kotlin.resolve.scopes.receivers.ExpressionReceiver;
import org.jetbrains.kotlin.resolve.scopes.receivers.TransientReceiver;
import org.jetbrains.kotlin.resolve.scopes.utils.ScopeUtilsKt;
import org.jetbrains.kotlin.resolve.source.KotlinSourceElementKt;
import org.jetbrains.kotlin.storage.StorageManager;
import org.jetbrains.kotlin.types.*;
import org.jetbrains.kotlin.types.checker.KotlinTypeChecker;
import org.jetbrains.kotlin.types.expressions.*;
import org.jetbrains.kotlin.types.typeUtil.TypeUtilsKt;
import java.util.*;
import static org.jetbrains.kotlin.descriptors.annotations.AnnotationUseSiteTarget.*;
import static org.jetbrains.kotlin.diagnostics.Errors.*;
import static org.jetbrains.kotlin.lexer.KtTokens.*;
import static org.jetbrains.kotlin.resolve.BindingContext.CONSTRUCTOR;
import static org.jetbrains.kotlin.resolve.BindingContext.TYPE_ALIAS;
import static org.jetbrains.kotlin.resolve.DescriptorUtils.*;
import static org.jetbrains.kotlin.resolve.ModifiersChecker.resolveMemberModalityFromModifiers;
import static org.jetbrains.kotlin.resolve.ModifiersChecker.resolveVisibilityFromModifiers;
public class DescriptorResolver {
private final TypeResolver typeResolver;
private final AnnotationResolver annotationResolver;
private final StorageManager storageManager;
private final KotlinBuiltIns builtIns;
private final SupertypeLoopChecker supertypeLoopsResolver;
private final VariableTypeAndInitializerResolver variableTypeAndInitializerResolver;
private final ExpressionTypingServices expressionTypingServices;
private final OverloadChecker overloadChecker;
private final LanguageVersionSettings languageVersionSettings;
private final FunctionsTypingVisitor functionsTypingVisitor;
private final DestructuringDeclarationResolver destructuringDeclarationResolver;
private final ModifiersChecker modifiersChecker;
private final WrappedTypeFactory wrappedTypeFactory;
private final SyntheticResolveExtension syntheticResolveExtension;
private final TypeApproximator typeApproximator;
private final DeclarationReturnTypeSanitizer declarationReturnTypeSanitizer;
private final DataFlowValueFactory dataFlowValueFactory;
private final Iterable<DeclarationSignatureAnonymousTypeTransformer> anonymousTypeTransformers;
public DescriptorResolver(
@NotNull AnnotationResolver annotationResolver,
@NotNull KotlinBuiltIns builtIns,
@NotNull StorageManager storageManager,
@NotNull TypeResolver typeResolver,
@NotNull SupertypeLoopChecker supertypeLoopsResolver,
@NotNull VariableTypeAndInitializerResolver variableTypeAndInitializerResolver,
@NotNull ExpressionTypingServices expressionTypingServices,
@NotNull OverloadChecker overloadChecker,
@NotNull LanguageVersionSettings languageVersionSettings,
@NotNull FunctionsTypingVisitor functionsTypingVisitor,
@NotNull DestructuringDeclarationResolver destructuringDeclarationResolver,
@NotNull ModifiersChecker modifiersChecker,
@NotNull WrappedTypeFactory wrappedTypeFactory,
@NotNull Project project,
@NotNull TypeApproximator approximator,
@NotNull DeclarationReturnTypeSanitizer declarationReturnTypeSanitizer,
@NotNull DataFlowValueFactory dataFlowValueFactory,
@NotNull Iterable<DeclarationSignatureAnonymousTypeTransformer> anonymousTypeTransformers
) {
this.annotationResolver = annotationResolver;
this.builtIns = builtIns;
this.storageManager = storageManager;
this.typeResolver = typeResolver;
this.supertypeLoopsResolver = supertypeLoopsResolver;
this.variableTypeAndInitializerResolver = variableTypeAndInitializerResolver;
this.expressionTypingServices = expressionTypingServices;
this.overloadChecker = overloadChecker;
this.languageVersionSettings = languageVersionSettings;
this.functionsTypingVisitor = functionsTypingVisitor;
this.destructuringDeclarationResolver = destructuringDeclarationResolver;
this.modifiersChecker = modifiersChecker;
this.wrappedTypeFactory = wrappedTypeFactory;
this.syntheticResolveExtension = SyntheticResolveExtension.Companion.getInstance(project);
typeApproximator = approximator;
this.declarationReturnTypeSanitizer = declarationReturnTypeSanitizer;
this.dataFlowValueFactory = dataFlowValueFactory;
this.anonymousTypeTransformers = anonymousTypeTransformers;
}
public List<KotlinType> resolveSupertypes(
@NotNull LexicalScope scope,
@NotNull ClassDescriptor classDescriptor,
@Nullable KtPureClassOrObject correspondingClassOrObject,
BindingTrace trace
) {
List<KotlinType> supertypes = Lists.newArrayList();
List<KtSuperTypeListEntry> delegationSpecifiers =
correspondingClassOrObject == null ? Collections.emptyList() : correspondingClassOrObject.getSuperTypeListEntries();
Collection<KotlinType> declaredSupertypes = resolveSuperTypeListEntries(
scope,
delegationSpecifiers,
typeResolver, trace, false);
for (KotlinType declaredSupertype : declaredSupertypes) {
addValidSupertype(supertypes, declaredSupertype);
}
if (classDescriptor.getKind() == ClassKind.ENUM_CLASS && !containsClass(supertypes)) {
supertypes.add(0, builtIns.getEnumType(classDescriptor.getDefaultType()));
}
syntheticResolveExtension.addSyntheticSupertypes(classDescriptor, supertypes);
if (supertypes.isEmpty()) {
addValidSupertype(supertypes, getDefaultSupertype(classDescriptor));
}
return supertypes;
}
private static void addValidSupertype(List<KotlinType> supertypes, KotlinType declaredSupertype) {
if (!KotlinTypeKt.isError(declaredSupertype)) {
supertypes.add(declaredSupertype);
}
}
private static boolean containsClass(Collection<KotlinType> result) {
for (KotlinType type : result) {
ClassifierDescriptor descriptor = type.getConstructor().getDeclarationDescriptor();
if (descriptor instanceof ClassDescriptor && ((ClassDescriptor) descriptor).getKind() != ClassKind.INTERFACE) {
return true;
}
}
return false;
}
@NotNull
private KotlinType getDefaultSupertype(@NotNull ClassDescriptor classDescriptor) {
if (classDescriptor.getKind() == ClassKind.ENUM_ENTRY) {
return ((ClassDescriptor) classDescriptor.getContainingDeclaration()).getDefaultType();
}
else if (classDescriptor.getKind() == ClassKind.ANNOTATION_CLASS) {
return builtIns.getAnnotationType();
}
return builtIns.getAnyType();
}
private static Collection<KotlinType> resolveSuperTypeListEntries(
LexicalScope extensibleScope,
List<KtSuperTypeListEntry> delegationSpecifiers,
@NotNull TypeResolver resolver,
BindingTrace trace,
boolean checkBounds
) {
if (delegationSpecifiers.isEmpty()) {
return Collections.emptyList();
}
Collection<KotlinType> result = Lists.newArrayList();
for (KtSuperTypeListEntry delegationSpecifier : delegationSpecifiers) {
KtTypeReference typeReference = delegationSpecifier.getTypeReference();
if (typeReference != null) {
KotlinType supertype = resolver.resolveType(extensibleScope, typeReference, trace, checkBounds);
if (DynamicTypesKt.isDynamic(supertype)) {
trace.report(DYNAMIC_SUPERTYPE.on(typeReference));
}
else {
result.add(supertype);
KtTypeElement bareSuperType = checkNullableSupertypeAndStripQuestionMarks(trace, typeReference.getTypeElement());
checkProjectionsInImmediateArguments(trace, bareSuperType, supertype);
}
}
else {
result.add(ErrorUtils.createErrorType("No type reference"));
}
}
return result;
}
@Nullable
private static KtTypeElement checkNullableSupertypeAndStripQuestionMarks(@NotNull BindingTrace trace, @Nullable KtTypeElement typeElement) {
while (typeElement instanceof KtNullableType) {
KtNullableType nullableType = (KtNullableType) typeElement;
typeElement = nullableType.getInnerType();
// report only for innermost '?', the rest gets a 'redundant' warning
if (!(typeElement instanceof KtNullableType) && typeElement != null) {
trace.report(NULLABLE_SUPERTYPE.on(nullableType));
}
}
return typeElement;
}
private static void checkProjectionsInImmediateArguments(
@NotNull BindingTrace trace,
@Nullable KtTypeElement typeElement,
@NotNull KotlinType type
) {
if (typeElement == null) return;
boolean hasProjectionsInWrittenArguments = false;
if (typeElement instanceof KtUserType) {
KtUserType userType = (KtUserType) typeElement;
List<KtTypeProjection> typeArguments = userType.getTypeArguments();
for (KtTypeProjection typeArgument : typeArguments) {
if (typeArgument.getProjectionKind() != KtProjectionKind.NONE) {
trace.report(PROJECTION_IN_IMMEDIATE_ARGUMENT_TO_SUPERTYPE.on(typeArgument));
hasProjectionsInWrittenArguments = true;
}
}
}
// If we have an abbreviated type (written with a type alias), it still can contain type projections in top-level arguments.
if (!KotlinTypeKt.isError(type) && SpecialTypesKt.getAbbreviatedType(type) != null && !hasProjectionsInWrittenArguments) {
// Only interface inheritance should be checked here.
// Corresponding check for classes is performed for type alias constructor calls in CandidateResolver.
if (TypeUtilsKt.isInterface(type) && TypeUtilsKt.containsTypeProjectionsInTopLevelArguments(type)) {
trace.report(EXPANDED_TYPE_CANNOT_BE_INHERITED.on(typeElement, type));
}
}
}
public static DescriptorVisibility getDefaultVisibility(KtModifierListOwner modifierListOwner, DeclarationDescriptor containingDescriptor) {
DescriptorVisibility defaultVisibility;
if (containingDescriptor instanceof ClassDescriptor) {
KtModifierList modifierList = modifierListOwner.getModifierList();
defaultVisibility = modifierList != null && modifierList.hasModifier(OVERRIDE_KEYWORD)
? DescriptorVisibilities.INHERITED
: DescriptorVisibilities.DEFAULT_VISIBILITY;
}
else if (containingDescriptor instanceof FunctionDescriptor || containingDescriptor instanceof PropertyDescriptor) {
defaultVisibility = DescriptorVisibilities.LOCAL;
}
else {
defaultVisibility = DescriptorVisibilities.DEFAULT_VISIBILITY;
}
return defaultVisibility;
}
public static Modality getDefaultModality(DeclarationDescriptor containingDescriptor, DescriptorVisibility visibility, boolean isBodyPresent) {
Modality defaultModality;
if (containingDescriptor instanceof ClassDescriptor) {
boolean isTrait = ((ClassDescriptor) containingDescriptor).getKind() == ClassKind.INTERFACE;
boolean isDefinitelyAbstract = isTrait && !isBodyPresent;
Modality basicModality = isTrait && !DescriptorVisibilities.isPrivate(visibility) ? Modality.OPEN : Modality.FINAL;
defaultModality = isDefinitelyAbstract ? Modality.ABSTRACT : basicModality;
}
else {
defaultModality = Modality.FINAL;
}
return defaultModality;
}
@NotNull
public ValueParameterDescriptorImpl resolveValueParameterDescriptor(
@NotNull LexicalScope scope,
@NotNull FunctionDescriptor owner,
@NotNull KtParameter valueParameter,
int index,
@NotNull KotlinType type,
@NotNull BindingTrace trace,
@NotNull Annotations additionalAnnotations
) {
KotlinType varargElementType = null;
KotlinType variableType = type;
if (valueParameter.hasModifier(VARARG_KEYWORD)) {
varargElementType = type;
variableType = getVarargParameterType(type);
}
Annotations valueParameterAnnotations = resolveValueParameterAnnotations(scope, valueParameter, trace, additionalAnnotations);
KtDestructuringDeclaration destructuringDeclaration = valueParameter.getDestructuringDeclaration();
Function0<List<VariableDescriptor>> destructuringVariables;
if (destructuringDeclaration != null) {
if (!languageVersionSettings.supportsFeature(LanguageFeature.DestructuringLambdaParameters)) {
trace.report(Errors.UNSUPPORTED_FEATURE.on(valueParameter,
TuplesKt.to(LanguageFeature.DestructuringLambdaParameters, languageVersionSettings)));
}
destructuringVariables = () -> {
assert owner.getDispatchReceiverParameter() == null
: "Destructuring declarations are only be parsed for lambdas, and they must not have a dispatch receiver";
LexicalScope scopeForDestructuring =
ScopeUtilsKt.createScopeForDestructuring(scope, owner.getExtensionReceiverParameter());
List<VariableDescriptor> result =
destructuringDeclarationResolver.resolveLocalVariablesFromDestructuringDeclaration(
scope,
destructuringDeclaration, new TransientReceiver(type), /* initializer = */ null,
ExpressionTypingContext.newContext(
trace, scopeForDestructuring, DataFlowInfoFactory.EMPTY, TypeUtils.NO_EXPECTED_TYPE,
languageVersionSettings, dataFlowValueFactory
)
);
modifiersChecker.withTrace(trace).checkModifiersForDestructuringDeclaration(destructuringDeclaration);
return result;
};
}
else {
destructuringVariables = null;
}
Name parameterName;
if (destructuringDeclaration == null) {
// NB: val/var for parameter is only allowed in primary constructors where single underscore names are still prohibited.
// The problem with val/var is that when lazy resolve try to find their descriptor, it searches through the member scope
// of containing class where, it can not find a descriptor with special name.
// Thus, to preserve behavior, we don't use a special name for val/var.
parameterName = !valueParameter.hasValOrVar() && UnderscoreUtilKt.isSingleUnderscore(valueParameter)
? Name.special("<anonymous parameter " + index + ">")
: KtPsiUtil.safeName(valueParameter.getName());
}
else {
parameterName = Name.special("<name for destructuring parameter " + index + ">");
}
ValueParameterDescriptorImpl valueParameterDescriptor = ValueParameterDescriptorImpl.createWithDestructuringDeclarations(
owner,
null,
index,
valueParameterAnnotations,
parameterName,
variableType,
valueParameter.hasDefaultValue(),
valueParameter.hasModifier(CROSSINLINE_KEYWORD),
valueParameter.hasModifier(NOINLINE_KEYWORD),
varargElementType,
KotlinSourceElementKt.toSourceElement(valueParameter),
destructuringVariables
);
trace.record(BindingContext.VALUE_PARAMETER, valueParameter, valueParameterDescriptor);
return valueParameterDescriptor;
}
@NotNull
private Annotations resolveValueParameterAnnotations(
@NotNull LexicalScope scope,
@NotNull KtParameter parameter,
@NotNull BindingTrace trace,
@NotNull Annotations additionalAnnotations
) {
KtModifierList modifierList = parameter.getModifierList();
if (modifierList == null) {
return additionalAnnotations;
}
Annotations allAnnotations = annotationResolver.resolveAnnotationsWithoutArguments(scope, modifierList, trace);
if (!parameter.hasValOrVar()) {
return new CompositeAnnotations(allAnnotations, additionalAnnotations);
}
AnnotationSplitter splitter = new AnnotationSplitter(storageManager, allAnnotations, SetsKt.setOf(CONSTRUCTOR_PARAMETER));
return new CompositeAnnotations(splitter.getAnnotationsForTarget(CONSTRUCTOR_PARAMETER), additionalAnnotations);
}
@NotNull
private KotlinType getVarargParameterType(@NotNull KotlinType elementType) {
KotlinType primitiveArrayType = builtIns.getPrimitiveArrayKotlinTypeByPrimitiveKotlinType(elementType);
if (primitiveArrayType != null) {
return primitiveArrayType;
}
return builtIns.getArrayType(Variance.OUT_VARIANCE, elementType);
}
public List<TypeParameterDescriptorImpl> resolveTypeParametersForDescriptor(
DeclarationDescriptor containingDescriptor,
LexicalWritableScope extensibleScope,
LexicalScope scopeForAnnotationsResolve,
List<KtTypeParameter> typeParameters,
BindingTrace trace
) {
List<TypeParameterDescriptorImpl> descriptors =
resolveTypeParametersForDescriptor(containingDescriptor, scopeForAnnotationsResolve, typeParameters, trace);
for (TypeParameterDescriptorImpl descriptor : descriptors) {
extensibleScope.addClassifierDescriptor(descriptor);
}
return descriptors;
}
private List<TypeParameterDescriptorImpl> resolveTypeParametersForDescriptor(
DeclarationDescriptor containingDescriptor,
LexicalScope scopeForAnnotationsResolve,
List<KtTypeParameter> typeParameters,
BindingTrace trace
) {
assert containingDescriptor instanceof FunctionDescriptor ||
containingDescriptor instanceof PropertyDescriptor ||
containingDescriptor instanceof TypeAliasDescriptor
: "This method should be called for functions, properties, or type aliases, got " + containingDescriptor;
List<TypeParameterDescriptorImpl> result = new ArrayList<>();
for (int i = 0, typeParametersSize = typeParameters.size(); i < typeParametersSize; i++) {
KtTypeParameter typeParameter = typeParameters.get(i);
result.add(resolveTypeParameterForDescriptor(containingDescriptor, scopeForAnnotationsResolve, typeParameter, i, trace));
}
return result;
}
private TypeParameterDescriptorImpl resolveTypeParameterForDescriptor(
DeclarationDescriptor containingDescriptor,
LexicalScope scopeForAnnotationsResolve,
KtTypeParameter typeParameter,
int index,
BindingTrace trace
) {
if (typeParameter.getVariance() != Variance.INVARIANT) {
trace.report(VARIANCE_ON_TYPE_PARAMETER_NOT_ALLOWED.on(typeParameter));
}
Annotations annotations =
annotationResolver.resolveAnnotationsWithArguments(scopeForAnnotationsResolve, typeParameter.getModifierList(), trace);
TypeParameterDescriptorImpl typeParameterDescriptor = TypeParameterDescriptorImpl.createForFurtherModification(
containingDescriptor,
annotations,
typeParameter.hasModifier(KtTokens.REIFIED_KEYWORD),
typeParameter.getVariance(),
KtPsiUtil.safeName(typeParameter.getName()),
index,
KotlinSourceElementKt.toSourceElement(typeParameter),
type -> {
if (!(containingDescriptor instanceof TypeAliasDescriptor)) {
trace.report(Errors.CYCLIC_GENERIC_UPPER_BOUND.on(typeParameter));
}
return null;
},
supertypeLoopsResolver,
storageManager
);
trace.record(BindingContext.TYPE_PARAMETER, typeParameter, typeParameterDescriptor);
return typeParameterDescriptor;
}
@NotNull
public static ClassConstructorDescriptorImpl createAndRecordPrimaryConstructorForObject(
@Nullable KtPureClassOrObject object,
@NotNull ClassDescriptor classDescriptor,
@NotNull BindingTrace trace
) {
ClassConstructorDescriptorImpl constructorDescriptor =
DescriptorFactory.createPrimaryConstructorForObject(classDescriptor, KotlinSourceElementKt.toSourceElement(object));
if (object instanceof PsiElement) {
KtPrimaryConstructor primaryConstructor = object.getPrimaryConstructor();
trace.record(CONSTRUCTOR, primaryConstructor != null ? primaryConstructor : (PsiElement)object, constructorDescriptor);
}
return constructorDescriptor;
}
static final class UpperBoundCheckRequest {
public final Name typeParameterName;
public final KtTypeReference upperBound;
public final KotlinType upperBoundType;
UpperBoundCheckRequest(Name typeParameterName, KtTypeReference upperBound, KotlinType upperBoundType) {
this.typeParameterName = typeParameterName;
this.upperBound = upperBound;
this.upperBoundType = upperBoundType;
}
}
public void resolveGenericBounds(
@NotNull KtTypeParameterListOwner declaration,
@NotNull DeclarationDescriptor descriptor,
LexicalScope scope,
List<TypeParameterDescriptorImpl> parameters,
BindingTrace trace
) {
List<UpperBoundCheckRequest> upperBoundCheckRequests = Lists.newArrayList();
List<KtTypeParameter> typeParameters = declaration.getTypeParameters();
Map<Name, TypeParameterDescriptorImpl> parameterByName = new HashMap<>();
for (int i = 0; i < typeParameters.size(); i++) {
KtTypeParameter ktTypeParameter = typeParameters.get(i);
TypeParameterDescriptorImpl typeParameterDescriptor = parameters.get(i);
parameterByName.put(typeParameterDescriptor.getName(), typeParameterDescriptor);
KtTypeReference extendsBound = ktTypeParameter.getExtendsBound();
if (extendsBound != null) {
KotlinType type = typeResolver.resolveType(scope, extendsBound, trace, false);
typeParameterDescriptor.addUpperBound(type);
upperBoundCheckRequests.add(new UpperBoundCheckRequest(ktTypeParameter.getNameAsName(), extendsBound, type));
}
}
for (KtTypeConstraint constraint : declaration.getTypeConstraints()) {
KtSimpleNameExpression subjectTypeParameterName = constraint.getSubjectTypeParameterName();
if (subjectTypeParameterName == null) {
continue;
}
Name referencedName = subjectTypeParameterName.getReferencedNameAsName();
TypeParameterDescriptorImpl typeParameterDescriptor = parameterByName.get(referencedName);
KtTypeReference boundTypeReference = constraint.getBoundTypeReference();
KotlinType bound = null;
if (boundTypeReference != null) {
bound = typeResolver.resolveType(scope, boundTypeReference, trace, false);
upperBoundCheckRequests.add(new UpperBoundCheckRequest(referencedName, boundTypeReference, bound));
}
if (typeParameterDescriptor != null) {
trace.record(BindingContext.REFERENCE_TARGET, subjectTypeParameterName, typeParameterDescriptor);
if (bound != null) {
typeParameterDescriptor.addUpperBound(bound);
}
}
}
for (TypeParameterDescriptorImpl parameter : parameters) {
parameter.addDefaultUpperBound();
parameter.setInitialized();
}
for (TypeParameterDescriptorImpl parameter : parameters) {
checkConflictingUpperBounds(trace, parameter, typeParameters.get(parameter.getIndex()));
}
if (!(declaration instanceof KtClass)) {
checkUpperBoundTypes(trace, upperBoundCheckRequests, declaration.hasModifier(KtTokens.OVERRIDE_KEYWORD));
checkNamesInConstraints(declaration, descriptor, scope, trace);
}
}
public static void checkUpperBoundTypes(
@NotNull BindingTrace trace,
@NotNull List<UpperBoundCheckRequest> requests,
boolean hasOverrideModifier
) {
if (requests.isEmpty()) return;
Set<Name> classBoundEncountered = new HashSet<>();
Set<Pair<Name, TypeConstructor>> allBounds = new HashSet<>();
for (UpperBoundCheckRequest request : requests) {
Name typeParameterName = request.typeParameterName;
KotlinType upperBound = request.upperBoundType;
KtTypeReference upperBoundElement = request.upperBound;
if (!KotlinTypeKt.isError(upperBound)) {
if (!allBounds.add(new Pair<>(typeParameterName, upperBound.getConstructor()))) {
trace.report(REPEATED_BOUND.on(upperBoundElement));
}
else {
ClassDescriptor classDescriptor = TypeUtils.getClassDescriptor(upperBound);
if (classDescriptor != null) {
ClassKind kind = classDescriptor.getKind();
if (kind == ClassKind.CLASS || kind == ClassKind.ENUM_CLASS || kind == ClassKind.OBJECT) {
if (!classBoundEncountered.add(typeParameterName)) {
trace.report(ONLY_ONE_CLASS_BOUND_ALLOWED.on(upperBoundElement));
}
}
}
}
}
checkUpperBoundType(upperBoundElement, upperBound, trace, hasOverrideModifier);
}
}
public static void checkConflictingUpperBounds(
@NotNull BindingTrace trace,
@NotNull TypeParameterDescriptor parameter,
@NotNull KtTypeParameter typeParameter
) {
if (KotlinBuiltIns.isNothing(TypeIntersector.getUpperBoundsAsType(parameter))) {
trace.report(CONFLICTING_UPPER_BOUNDS.on(typeParameter, parameter));
}
}
public void checkNamesInConstraints(
@NotNull KtTypeParameterListOwner declaration,
@NotNull DeclarationDescriptor descriptor,
@NotNull LexicalScope scope,
@NotNull BindingTrace trace
) {
for (KtTypeConstraint constraint : declaration.getTypeConstraints()) {
KtSimpleNameExpression nameExpression = constraint.getSubjectTypeParameterName();
if (nameExpression == null) continue;
Name name = nameExpression.getReferencedNameAsName();
ClassifierDescriptor classifier = ScopeUtilsKt.findClassifier(scope, name, NoLookupLocation.FOR_NON_TRACKED_SCOPE);
if (classifier instanceof TypeParameterDescriptor && classifier.getContainingDeclaration() == descriptor) continue;
if (classifier != null) {
// To tell the user that we look only for locally defined type parameters
trace.report(NAME_IN_CONSTRAINT_IS_NOT_A_TYPE_PARAMETER.on(nameExpression, constraint, declaration));
trace.record(BindingContext.REFERENCE_TARGET, nameExpression, classifier);
}
else {
trace.report(UNRESOLVED_REFERENCE.on(nameExpression, nameExpression));
}
KtTypeReference boundTypeReference = constraint.getBoundTypeReference();
if (boundTypeReference != null) {
typeResolver.resolveType(scope, boundTypeReference, trace, true);
}
}
}
public static void checkUpperBoundType(
KtTypeReference upperBound,
@NotNull KotlinType upperBoundType,
BindingTrace trace,
boolean hasOverrideModifier
) {
if (!hasOverrideModifier && !TypeUtils.canHaveSubtypes(KotlinTypeChecker.DEFAULT, upperBoundType)) {
trace.report(FINAL_UPPER_BOUND.on(upperBound, upperBoundType));
}
if (DynamicTypesKt.isDynamic(upperBoundType)) {
trace.report(DYNAMIC_UPPER_BOUND.on(upperBound));
}
if (FunctionTypesKt.isExtensionFunctionType(upperBoundType)) {
trace.report(UPPER_BOUND_IS_EXTENSION_FUNCTION_TYPE.on(upperBound));
}
}
@NotNull
public VariableDescriptor resolveLocalVariableDescriptor(
@NotNull LexicalScope scope,
@NotNull KtParameter parameter,
BindingTrace trace
) {
KotlinType type = resolveParameterType(scope, parameter, trace);
return resolveLocalVariableDescriptor(parameter, type, trace, scope);
}
private KotlinType resolveParameterType(LexicalScope scope, KtParameter parameter, BindingTrace trace) {
KtTypeReference typeReference = parameter.getTypeReference();
KotlinType type;
if (typeReference != null) {
type = typeResolver.resolveType(scope, typeReference, trace, true);
}
else {
// Error is reported by the parser
type = ErrorUtils.createErrorType("Annotation is absent");
}
if (parameter.hasModifier(VARARG_KEYWORD)) {
return getVarargParameterType(type);
}
return type;
}
public VariableDescriptor resolveLocalVariableDescriptor(
@NotNull KtParameter parameter,
@NotNull KotlinType type,
BindingTrace trace,
@NotNull LexicalScope scope
) {
UnwrappedType approximatedType = typeApproximator.approximateDeclarationType(type, true, languageVersionSettings);
VariableDescriptor variableDescriptor = new LocalVariableDescriptor(
scope.getOwnerDescriptor(),
annotationResolver.resolveAnnotationsWithArguments(scope, parameter.getModifierList(), trace),
KtPsiUtil.safeName(parameter.getName()),
approximatedType,
KotlinSourceElementKt.toSourceElement(parameter)
);
trace.record(BindingContext.VALUE_PARAMETER, parameter, variableDescriptor);
// Type annotations also should be resolved
ForceResolveUtil.forceResolveAllContents(type.getAnnotations());
return variableDescriptor;
}
@NotNull
public TypeAliasDescriptor resolveTypeAliasDescriptor(
@NotNull DeclarationDescriptor containingDeclaration,
@NotNull LexicalScope scope,
@NotNull KtTypeAlias typeAlias,
@NotNull BindingTrace trace
) {
if (!(containingDeclaration instanceof PackageFragmentDescriptor) &&
!(containingDeclaration instanceof ScriptDescriptor)) {
trace.report(TOPLEVEL_TYPEALIASES_ONLY.on(typeAlias));
}
KtModifierList modifierList = typeAlias.getModifierList();
DescriptorVisibility visibility = resolveVisibilityFromModifiers(typeAlias, getDefaultVisibility(typeAlias, containingDeclaration));
Annotations allAnnotations = annotationResolver.resolveAnnotationsWithArguments(scope, modifierList, trace);
Name name = KtPsiUtil.safeName(typeAlias.getName());
SourceElement sourceElement = KotlinSourceElementKt.toSourceElement(typeAlias);
LazyTypeAliasDescriptor typeAliasDescriptor = LazyTypeAliasDescriptor.create(
storageManager, trace, containingDeclaration, allAnnotations, name, sourceElement, visibility);
List<TypeParameterDescriptorImpl> typeParameterDescriptors;
LexicalScope scopeWithTypeParameters;
{
List<KtTypeParameter> typeParameters = typeAlias.getTypeParameters();
if (typeParameters.isEmpty()) {
scopeWithTypeParameters = scope;
typeParameterDescriptors = Collections.emptyList();
}
else {
LexicalWritableScope writableScope = new LexicalWritableScope(
scope, containingDeclaration, false, new TraceBasedLocalRedeclarationChecker(trace, overloadChecker),
LexicalScopeKind.TYPE_ALIAS_HEADER);
typeParameterDescriptors = resolveTypeParametersForDescriptor(
typeAliasDescriptor, writableScope, scope, typeParameters, trace);
writableScope.freeze();
checkNoGenericBoundsOnTypeAliasParameters(typeAlias, trace);
resolveGenericBounds(typeAlias, typeAliasDescriptor, writableScope, typeParameterDescriptors, trace);
scopeWithTypeParameters = writableScope;
}
}
KtTypeReference typeReference = typeAlias.getTypeReference();
if (typeReference == null) {
typeAliasDescriptor.initialize(
typeParameterDescriptors,
ErrorUtils.createErrorType(name.asString()),
ErrorUtils.createErrorType(name.asString()));
}
else if (!languageVersionSettings.supportsFeature(LanguageFeature.TypeAliases)) {
typeResolver.resolveAbbreviatedType(scopeWithTypeParameters, typeReference, trace);
PsiElement typeAliasKeyword = typeAlias.getTypeAliasKeyword();
trace.report(UNSUPPORTED_FEATURE.on(typeAliasKeyword != null ? typeAliasKeyword : typeAlias,
TuplesKt.to(LanguageFeature.TypeAliases, languageVersionSettings)));
typeAliasDescriptor.initialize(
typeParameterDescriptors,
ErrorUtils.createErrorType(name.asString()),
ErrorUtils.createErrorType(name.asString()));
}
else {
typeAliasDescriptor.initialize(
typeParameterDescriptors,
storageManager.createRecursionTolerantLazyValue(
() -> typeResolver.resolveAbbreviatedType(scopeWithTypeParameters, typeReference, trace),
ErrorUtils.createErrorType("Recursive type alias expansion for " + typeAliasDescriptor.getName().asString())
),
storageManager.createRecursionTolerantLazyValue(
() -> typeResolver.resolveExpandedTypeForTypeAlias(typeAliasDescriptor),
ErrorUtils.createErrorType("Recursive type alias expansion for " + typeAliasDescriptor.getName().asString())
)
);
}
trace.record(TYPE_ALIAS, typeAlias, typeAliasDescriptor);
return typeAliasDescriptor;
}
private static void checkNoGenericBoundsOnTypeAliasParameters(@NotNull KtTypeAlias typeAlias, @NotNull BindingTrace trace) {
for (KtTypeParameter typeParameter : typeAlias.getTypeParameters()) {
KtTypeReference bound = typeParameter.getExtendsBound();
if (bound != null) {
trace.report(BOUND_ON_TYPE_ALIAS_PARAMETER_NOT_ALLOWED.on(bound));
}
}
}
@NotNull
public PropertyDescriptor resolveDestructuringDeclarationEntryAsProperty(
@NotNull DeclarationDescriptor containingDeclaration,
@NotNull LexicalScope scopeForDeclarationResolution,
@NotNull LexicalScope scopeForInitializerResolution,
@NotNull KtDestructuringDeclarationEntry entry,
@NotNull BindingTrace trace,
@NotNull DataFlowInfo dataFlowInfo,
@NotNull InferenceSession inferenceSession
) {
KtDestructuringDeclaration destructuringDeclaration = (KtDestructuringDeclaration) entry.getParent();
KtExpression initializer = destructuringDeclaration.getInitializer();
ExpressionTypingContext context = ExpressionTypingContext.newContext(
trace, scopeForDeclarationResolution, dataFlowInfo, TypeUtils.NO_EXPECTED_TYPE, languageVersionSettings, dataFlowValueFactory
);
ExpressionReceiver receiver = createReceiverForDestructuringDeclaration(destructuringDeclaration, context);
int componentIndex = destructuringDeclaration.getEntries().indexOf(entry);
KotlinType componentType = destructuringDeclarationResolver.resolveInitializer(entry, receiver, initializer, context, componentIndex);
return resolveAsPropertyDescriptor(
containingDeclaration,
scopeForDeclarationResolution,
scopeForInitializerResolution,
entry,
trace,
dataFlowInfo,
inferenceSession,
VariableAsPropertyInfo.Companion.createFromDestructuringDeclarationEntry(componentType));
}
private ExpressionReceiver createReceiverForDestructuringDeclaration(
@NotNull KtDestructuringDeclaration destructuringDeclaration,
@NotNull ExpressionTypingContext context
) {
KtExpression initializer = destructuringDeclaration.getInitializer();
if (initializer == null) return null;
KotlinType initializerType = expressionTypingServices.getTypeInfo(initializer, context).getType();
if (initializerType == null) return null;
return ExpressionReceiver.Companion.create(initializer, initializerType, context.trace.getBindingContext());
}
@NotNull
public PropertyDescriptor resolvePropertyDescriptor(
@NotNull DeclarationDescriptor containingDeclaration,
@NotNull LexicalScope scopeForDeclarationResolution,
@NotNull LexicalScope scopeForInitializerResolution,
@NotNull KtProperty property,
@NotNull BindingTrace trace,
@NotNull DataFlowInfo dataFlowInfo,
@NotNull InferenceSession inferenceSession
) {
return resolveAsPropertyDescriptor(
containingDeclaration,
scopeForDeclarationResolution,
scopeForInitializerResolution,
property,
trace,
dataFlowInfo,
inferenceSession,
VariableAsPropertyInfo.Companion.createFromProperty(property));
}
@NotNull
private PropertyDescriptor resolveAsPropertyDescriptor(
@NotNull DeclarationDescriptor container,
@NotNull LexicalScope scopeForDeclarationResolution,
@NotNull LexicalScope scopeForInitializerResolution,
@NotNull KtVariableDeclaration variableDeclaration,
@NotNull BindingTrace trace,
@NotNull DataFlowInfo dataFlowInfo,
@NotNull InferenceSession inferenceSession,
@NotNull VariableAsPropertyInfo propertyInfo
) {
KtModifierList modifierList = variableDeclaration.getModifierList();
boolean isVar = variableDeclaration.isVar();
DescriptorVisibility visibility = resolveVisibilityFromModifiers(variableDeclaration, getDefaultVisibility(variableDeclaration, container));
Modality modality = container instanceof ClassDescriptor
? resolveMemberModalityFromModifiers(variableDeclaration,
getDefaultModality(container, visibility, propertyInfo.getHasBody()),
trace.getBindingContext(), container)
: Modality.FINAL;
Annotations allAnnotations = annotationResolver.resolveAnnotationsWithoutArguments(scopeForDeclarationResolution, modifierList, trace);
Set<AnnotationUseSiteTarget> targetSet = EnumSet.of(PROPERTY, PROPERTY_GETTER, FIELD);
if (isVar) {
targetSet.add(PROPERTY_SETTER);
targetSet.add(SETTER_PARAMETER);
}
if (variableDeclaration instanceof KtProperty && ((KtProperty) variableDeclaration).hasDelegate()) {
targetSet.add(PROPERTY_DELEGATE_FIELD);
}
AnnotationSplitter annotationSplitter = new AnnotationSplitter(storageManager, allAnnotations, targetSet);
Annotations propertyAnnotations = new CompositeAnnotations(CollectionsKt.listOf(
annotationSplitter.getAnnotationsForTarget(PROPERTY),
annotationSplitter.getOtherAnnotations())
);
PropertyDescriptorImpl propertyDescriptor = PropertyDescriptorImpl.create(
container,
propertyAnnotations,
modality,
visibility,
isVar,
KtPsiUtil.safeName(variableDeclaration.getName()),
CallableMemberDescriptor.Kind.DECLARATION,
KotlinSourceElementKt.toSourceElement(variableDeclaration),
modifierList != null && modifierList.hasModifier(KtTokens.LATEINIT_KEYWORD),
modifierList != null && modifierList.hasModifier(KtTokens.CONST_KEYWORD),
modifierList != null && PsiUtilsKt.hasExpectModifier(modifierList) && container instanceof PackageFragmentDescriptor ||
container instanceof ClassDescriptor && ((ClassDescriptor) container).isExpect(),
modifierList != null && PsiUtilsKt.hasActualModifier(modifierList),
modifierList != null && modifierList.hasModifier(KtTokens.EXTERNAL_KEYWORD),
propertyInfo.getHasDelegate()
);
List<TypeParameterDescriptorImpl> typeParameterDescriptors;
LexicalScope scopeForDeclarationResolutionWithTypeParameters;
LexicalScope scopeForInitializerResolutionWithTypeParameters;
KotlinType receiverType = null;
{
List<KtTypeParameter> typeParameters = variableDeclaration.getTypeParameters();
if (typeParameters.isEmpty()) {
scopeForDeclarationResolutionWithTypeParameters = scopeForDeclarationResolution;
scopeForInitializerResolutionWithTypeParameters = scopeForInitializerResolution;
typeParameterDescriptors = Collections.emptyList();
}
else {
LexicalWritableScope writableScopeForDeclarationResolution = new LexicalWritableScope(
scopeForDeclarationResolution, container, false, new TraceBasedLocalRedeclarationChecker(trace, overloadChecker),
LexicalScopeKind.PROPERTY_HEADER);
LexicalWritableScope writableScopeForInitializerResolution = new LexicalWritableScope(
scopeForInitializerResolution, container, false, LocalRedeclarationChecker.DO_NOTHING.INSTANCE,
LexicalScopeKind.PROPERTY_HEADER);
typeParameterDescriptors = resolveTypeParametersForDescriptor(
propertyDescriptor,
scopeForDeclarationResolution, typeParameters, trace);
for (TypeParameterDescriptor descriptor : typeParameterDescriptors) {
writableScopeForDeclarationResolution.addClassifierDescriptor(descriptor);
writableScopeForInitializerResolution.addClassifierDescriptor(descriptor);
}
writableScopeForDeclarationResolution.freeze();
writableScopeForInitializerResolution.freeze();
resolveGenericBounds(variableDeclaration, propertyDescriptor, writableScopeForDeclarationResolution, typeParameterDescriptors, trace);
scopeForDeclarationResolutionWithTypeParameters = writableScopeForDeclarationResolution;
scopeForInitializerResolutionWithTypeParameters = writableScopeForInitializerResolution;
}
KtTypeReference receiverTypeRef = variableDeclaration.getReceiverTypeReference();
if (receiverTypeRef != null) {
receiverType = typeResolver.resolveType(scopeForDeclarationResolutionWithTypeParameters, receiverTypeRef, trace, true);
}
}
ReceiverParameterDescriptor receiverDescriptor;
if (receiverType != null) {
AnnotationSplitter splitter = new AnnotationSplitter(storageManager, receiverType.getAnnotations(), EnumSet.of(RECEIVER));
receiverDescriptor = DescriptorFactory.createExtensionReceiverParameterForCallable(
propertyDescriptor, receiverType, splitter.getAnnotationsForTarget(RECEIVER)
);
}
else {
receiverDescriptor = null;
}
LexicalScope scopeForInitializer = ScopeUtils.makeScopeForPropertyInitializer(scopeForInitializerResolutionWithTypeParameters, propertyDescriptor);
KotlinType propertyType = propertyInfo.getVariableType();
KotlinType typeIfKnown = propertyType != null ? propertyType : variableTypeAndInitializerResolver.resolveTypeNullable(
propertyDescriptor, scopeForInitializer,
variableDeclaration, dataFlowInfo, inferenceSession,
trace, /* local = */ false
);
PropertyGetterDescriptorImpl getter = resolvePropertyGetterDescriptor(
scopeForDeclarationResolutionWithTypeParameters,
variableDeclaration,
propertyDescriptor,
annotationSplitter,
trace,
typeIfKnown,
propertyInfo.getPropertyGetter(),
propertyInfo.getHasDelegate());
KotlinType type = typeIfKnown != null ? typeIfKnown : getter.getReturnType();
assert type != null : "At least getter type must be initialized via resolvePropertyGetterDescriptor";
variableTypeAndInitializerResolver.setConstantForVariableIfNeeded(
propertyDescriptor, scopeForInitializer, variableDeclaration, dataFlowInfo, type, inferenceSession, trace
);
propertyDescriptor.setType(type, typeParameterDescriptors, getDispatchReceiverParameterIfNeeded(container), receiverDescriptor);
PropertySetterDescriptor setter = resolvePropertySetterDescriptor(
scopeForDeclarationResolutionWithTypeParameters,
variableDeclaration,
propertyDescriptor,
annotationSplitter,
trace,
propertyInfo.getPropertySetter(),
propertyInfo.getHasDelegate());
propertyDescriptor.initialize(
getter, setter,
new FieldDescriptorImpl(annotationSplitter.getAnnotationsForTarget(FIELD), propertyDescriptor),
new FieldDescriptorImpl(annotationSplitter.getAnnotationsForTarget(PROPERTY_DELEGATE_FIELD), propertyDescriptor)
);
trace.record(BindingContext.VARIABLE, variableDeclaration, propertyDescriptor);
return propertyDescriptor;
}
@NotNull
/*package*/ static KotlinType transformAnonymousTypeIfNeeded(
@NotNull DeclarationDescriptorWithVisibility descriptor,
@NotNull KtDeclaration declaration,
@NotNull KotlinType type,
@NotNull BindingTrace trace,
@NotNull Iterable<DeclarationSignatureAnonymousTypeTransformer> anonymousTypeTransformers
) {
for (DeclarationSignatureAnonymousTypeTransformer transformer : anonymousTypeTransformers) {
KotlinType transformedType = transformer.transformAnonymousType(descriptor, type);
if (transformedType != null) {
return transformedType;
}
}
ClassifierDescriptor classifier = type.getConstructor().getDeclarationDescriptor();
if (classifier == null || !DescriptorUtils.isAnonymousObject(classifier) || DescriptorUtils.isLocal(descriptor)) {
return type;
}
if (!DescriptorVisibilities.isPrivate(descriptor.getVisibility())) {
if (type.getConstructor().getSupertypes().size() == 1) {
return type.getConstructor().getSupertypes().iterator().next();
}
else {
trace.report(AMBIGUOUS_ANONYMOUS_TYPE_INFERRED.on(declaration, type.getConstructor().getSupertypes()));
}
}
return type;
}
@Nullable
private PropertySetterDescriptor resolvePropertySetterDescriptor(
@NotNull LexicalScope scopeWithTypeParameters,
@NotNull KtVariableDeclaration property,
@NotNull PropertyDescriptor propertyDescriptor,
@NotNull AnnotationSplitter annotationSplitter,
@NotNull BindingTrace trace,
@Nullable KtPropertyAccessor setter,
boolean hasDelegate
) {
PropertySetterDescriptorImpl setterDescriptor = null;
Annotations setterTargetedAnnotations = annotationSplitter.getAnnotationsForTarget(PROPERTY_SETTER);
Annotations parameterTargetedAnnotations = annotationSplitter.getAnnotationsForTarget(SETTER_PARAMETER);
if (setter != null) {
Annotations annotations = new CompositeAnnotations(CollectionsKt.listOf(
setterTargetedAnnotations,
annotationResolver.resolveAnnotationsWithoutArguments(scopeWithTypeParameters, setter.getModifierList(), trace)
));
KtParameter parameter = setter.getParameter();
setterDescriptor = new PropertySetterDescriptorImpl(
propertyDescriptor, annotations,
resolveMemberModalityFromModifiers(setter, propertyDescriptor.getModality(),
trace.getBindingContext(), propertyDescriptor.getContainingDeclaration()),
resolveVisibilityFromModifiers(setter, propertyDescriptor.getVisibility()),
/* isDefault = */ false, setter.hasModifier(EXTERNAL_KEYWORD),
property.hasModifier(KtTokens.INLINE_KEYWORD) || setter.hasModifier(KtTokens.INLINE_KEYWORD),
CallableMemberDescriptor.Kind.DECLARATION, null, KotlinSourceElementKt.toSourceElement(setter)
);
KtTypeReference returnTypeReference = setter.getReturnTypeReference();
if (returnTypeReference != null) {
KotlinType returnType = typeResolver.resolveType(scopeWithTypeParameters, returnTypeReference, trace, true);
if (!KotlinBuiltIns.isUnit(returnType)) {
trace.report(WRONG_SETTER_RETURN_TYPE.on(returnTypeReference));
}
}
if (parameter != null) {
// This check is redundant: the parser does not allow a default value, but we'll keep it just in case
if (parameter.hasDefaultValue()) {
trace.report(SETTER_PARAMETER_WITH_DEFAULT_VALUE.on(parameter.getDefaultValue()));
}
KotlinType type;
KtTypeReference typeReference = parameter.getTypeReference();
if (typeReference == null) {
type = propertyDescriptor.getType(); // TODO : this maybe unknown at this point
}
else {
type = typeResolver.resolveType(scopeWithTypeParameters, typeReference, trace, true);
KotlinType inType = propertyDescriptor.getType();
if (!TypeUtils.equalTypes(type, inType)) {
trace.report(WRONG_SETTER_PARAMETER_TYPE.on(typeReference, inType, type));
}
}
ValueParameterDescriptorImpl valueParameterDescriptor = resolveValueParameterDescriptor(
scopeWithTypeParameters, setterDescriptor, parameter, 0, type, trace, parameterTargetedAnnotations
);
setterDescriptor.initialize(valueParameterDescriptor);
}
else {
setterDescriptor.initializeDefault();
}
trace.record(BindingContext.PROPERTY_ACCESSOR, setter, setterDescriptor);
}
else if (property.isVar()) {
setterDescriptor = DescriptorFactory.createSetter(
propertyDescriptor, setterTargetedAnnotations, parameterTargetedAnnotations,
!hasDelegate && setterTargetedAnnotations.isEmpty() && parameterTargetedAnnotations.isEmpty(),
false, property.hasModifier(KtTokens.INLINE_KEYWORD),
propertyDescriptor.getSource()
);
}
if (!property.isVar()) {
if (setter != null) {
trace.report(VAL_WITH_SETTER.on(setter));
}
}
return setterDescriptor;
}
@NotNull
private PropertyGetterDescriptorImpl resolvePropertyGetterDescriptor(
@NotNull LexicalScope scopeForDeclarationResolution,
@NotNull KtVariableDeclaration property,
@NotNull PropertyDescriptor propertyDescriptor,
@NotNull AnnotationSplitter annotationSplitter,
@NotNull BindingTrace trace,
@Nullable KotlinType propertyTypeIfKnown,
@Nullable KtPropertyAccessor getter,
boolean hasDelegate
) {
PropertyGetterDescriptorImpl getterDescriptor;
KotlinType getterType;
Annotations getterTargetedAnnotations = annotationSplitter.getAnnotationsForTarget(PROPERTY_GETTER);
if (getter != null) {
Annotations getterAnnotations = new CompositeAnnotations(CollectionsKt.listOf(
getterTargetedAnnotations,
annotationResolver.resolveAnnotationsWithoutArguments(scopeForDeclarationResolution, getter.getModifierList(), trace)
));
getterDescriptor = new PropertyGetterDescriptorImpl(
propertyDescriptor, getterAnnotations,
resolveMemberModalityFromModifiers(getter, propertyDescriptor.getModality(),
trace.getBindingContext(), propertyDescriptor.getContainingDeclaration()),
resolveVisibilityFromModifiers(getter, propertyDescriptor.getVisibility()),
/* isDefault = */ false, getter.hasModifier(EXTERNAL_KEYWORD),
property.hasModifier(KtTokens.INLINE_KEYWORD) || getter.hasModifier(KtTokens.INLINE_KEYWORD),
CallableMemberDescriptor.Kind.DECLARATION, null, KotlinSourceElementKt.toSourceElement(getter)
);
getterType = determineGetterReturnType(scopeForDeclarationResolution, trace, getterDescriptor, getter, propertyTypeIfKnown);
trace.record(BindingContext.PROPERTY_ACCESSOR, getter, getterDescriptor);
}
else {
getterDescriptor = DescriptorFactory.createGetter(
propertyDescriptor, getterTargetedAnnotations,
!hasDelegate && getterTargetedAnnotations.isEmpty(),
/* isExternal = */ false, property.hasModifier(KtTokens.INLINE_KEYWORD)
);
getterType = propertyTypeIfKnown;
}
getterDescriptor.initialize(getterType != null ? getterType : VariableTypeAndInitializerResolver.STUB_FOR_PROPERTY_WITHOUT_TYPE);
return getterDescriptor;
}
@Nullable
private KotlinType determineGetterReturnType(
@NotNull LexicalScope scope,
@NotNull BindingTrace trace,
@NotNull PropertyGetterDescriptor getterDescriptor,
@NotNull KtPropertyAccessor getter,
@Nullable KotlinType propertyTypeIfKnown
) {
KtTypeReference returnTypeReference = getter.getReturnTypeReference();
if (returnTypeReference != null) {
KotlinType explicitReturnType = typeResolver.resolveType(scope, returnTypeReference, trace, true);
if (propertyTypeIfKnown != null && !TypeUtils.equalTypes(explicitReturnType, propertyTypeIfKnown)) {
trace.report(WRONG_GETTER_RETURN_TYPE.on(returnTypeReference, propertyTypeIfKnown, explicitReturnType));
}
return explicitReturnType;
}
// If a property has no type specified in the PSI but the getter does (or has an initializer e.g. "val x get() = ..."),
// infer the correct type for the getter but leave the error type for the property.
// This is useful for an IDE quick fix which would add the type to the property
KtProperty property = getter.getProperty();
if (!property.hasDelegateExpressionOrInitializer() && property.getTypeReference() == null &&
getter.hasBody() && !getter.hasBlockBody()) {
return inferReturnTypeFromExpressionBody(trace, scope, DataFlowInfoFactory.EMPTY, getter, getterDescriptor);
}
return propertyTypeIfKnown;
}
@NotNull
/*package*/ KotlinType inferReturnTypeFromExpressionBody(
@NotNull BindingTrace trace,
@NotNull LexicalScope scope,
@NotNull DataFlowInfo dataFlowInfo,
@NotNull KtDeclarationWithBody function,
@NotNull FunctionDescriptor functionDescriptor
) {
return wrappedTypeFactory.createRecursionIntolerantDeferredType(trace, () -> {
PreliminaryDeclarationVisitor.Companion.createForDeclaration(function, trace, languageVersionSettings);
KotlinType type = expressionTypingServices.getBodyExpressionType(trace, scope, dataFlowInfo, function, functionDescriptor);
KotlinType publicType = transformAnonymousTypeIfNeeded(functionDescriptor, function, type, trace, anonymousTypeTransformers);
UnwrappedType approximatedType = typeApproximator.approximateDeclarationType(publicType, false, languageVersionSettings);
KotlinType sanitizedType = declarationReturnTypeSanitizer.sanitizeReturnType(approximatedType, wrappedTypeFactory, trace, languageVersionSettings);
functionsTypingVisitor.checkTypesForReturnStatements(function, trace, sanitizedType);
return sanitizedType;
});
}
@NotNull
public PropertyDescriptor resolvePrimaryConstructorParameterToAProperty(
@NotNull ClassDescriptor classDescriptor,
@NotNull ValueParameterDescriptor valueParameter,
@NotNull LexicalScope scope,
@NotNull KtParameter parameter,
BindingTrace trace
) {
KotlinType type = resolveParameterType(scope, parameter, trace);
Name name = parameter.getNameAsSafeName();
boolean isMutable = parameter.isMutable();
KtModifierList modifierList = parameter.getModifierList();
if (modifierList != null) {
if (modifierList.hasModifier(KtTokens.ABSTRACT_KEYWORD)) {
trace.report(ABSTRACT_PROPERTY_IN_PRIMARY_CONSTRUCTOR_PARAMETERS.on(parameter));
}
}
Annotations allAnnotations = annotationResolver.resolveAnnotationsWithoutArguments(scope, parameter.getModifierList(), trace);
Set<AnnotationUseSiteTarget> targetSet = EnumSet.of(PROPERTY, PROPERTY_GETTER, FIELD, CONSTRUCTOR_PARAMETER, PROPERTY_SETTER);
if (isMutable) {
targetSet.add(PROPERTY_SETTER);
targetSet.add(SETTER_PARAMETER);
}
AnnotationSplitter annotationSplitter = new AnnotationSplitter(storageManager, allAnnotations, targetSet);
Annotations propertyAnnotations = new CompositeAnnotations(
annotationSplitter.getAnnotationsForTarget(PROPERTY),
annotationSplitter.getOtherAnnotations()
);
PropertyDescriptorImpl propertyDescriptor = PropertyDescriptorImpl.create(
classDescriptor,
propertyAnnotations,
resolveMemberModalityFromModifiers(parameter, Modality.FINAL, trace.getBindingContext(), classDescriptor),
resolveVisibilityFromModifiers(parameter, getDefaultVisibility(parameter, classDescriptor)),
isMutable,
name,
CallableMemberDescriptor.Kind.DECLARATION,
KotlinSourceElementKt.toSourceElement(parameter),
false,
false,
classDescriptor.isExpect(),
modifierList != null && PsiUtilsKt.hasActualModifier(modifierList),
false,
false
);
propertyDescriptor.setType(type, Collections.emptyList(), getDispatchReceiverParameterIfNeeded(classDescriptor), null);
Annotations setterAnnotations = annotationSplitter.getAnnotationsForTarget(PROPERTY_SETTER);
Annotations getterAnnotations = new CompositeAnnotations(CollectionsKt.listOf(
annotationSplitter.getAnnotationsForTarget(PROPERTY_GETTER)));
PropertyGetterDescriptorImpl getter = DescriptorFactory.createDefaultGetter(propertyDescriptor, getterAnnotations);
PropertySetterDescriptor setter =
propertyDescriptor.isVar()
? DescriptorFactory.createDefaultSetter(
propertyDescriptor, setterAnnotations, annotationSplitter.getAnnotationsForTarget(SETTER_PARAMETER)
)
: null;
propertyDescriptor.initialize(
getter, setter,
new FieldDescriptorImpl(annotationSplitter.getAnnotationsForTarget(FIELD), propertyDescriptor),
null
);
getter.initialize(propertyDescriptor.getType());
trace.record(BindingContext.PRIMARY_CONSTRUCTOR_PARAMETER, parameter, propertyDescriptor);
trace.record(BindingContext.VALUE_PARAMETER_AS_PROPERTY, valueParameter, propertyDescriptor);
return propertyDescriptor;
}
public static void checkBounds(@NotNull KtTypeReference typeReference, @NotNull KotlinType type, @NotNull BindingTrace trace) {
if (KotlinTypeKt.isError(type)) return;
KtTypeElement typeElement = typeReference.getTypeElement();
if (typeElement == null) return;
List<TypeParameterDescriptor> parameters = type.getConstructor().getParameters();
List<TypeProjection> arguments = type.getArguments();
assert parameters.size() == arguments.size();
List<KtTypeReference> ktTypeArguments = typeElement.getTypeArgumentsAsTypes();
// A type reference from Kotlin code can yield a flexible type only if it's `ft<T1, T2>`, whose bounds should not be checked
if (FlexibleTypesKt.isFlexible(type) && !DynamicTypesKt.isDynamic(type)) {
assert ktTypeArguments.size() == 2
: "Flexible type cannot be denoted in Kotlin otherwise than as ft<T1, T2>, but was: "
+ PsiUtilsKt.getElementTextWithContext(typeReference);
// it's really ft<Foo, Bar>
FlexibleType flexibleType = FlexibleTypesKt.asFlexibleType(type);
checkBounds(ktTypeArguments.get(0), flexibleType.getLowerBound(), trace);
checkBounds(ktTypeArguments.get(1), flexibleType.getUpperBound(), trace);
return;
}
// If the numbers of type arguments do not match, the error has been already reported in TypeResolver
if (ktTypeArguments.size() != arguments.size()) return;
TypeSubstitutor substitutor = TypeSubstitutor.create(type);
for (int i = 0; i < ktTypeArguments.size(); i++) {
KtTypeReference ktTypeArgument = ktTypeArguments.get(i);
if (ktTypeArgument == null) continue;
KotlinType typeArgument = arguments.get(i).getType();
checkBounds(ktTypeArgument, typeArgument, trace);
TypeParameterDescriptor typeParameterDescriptor = parameters.get(i);
checkBounds(ktTypeArgument, typeArgument, typeParameterDescriptor, substitutor, trace);
}
}
public static void checkBounds(
@NotNull KtTypeReference jetTypeArgument,
@NotNull KotlinType typeArgument,
@NotNull TypeParameterDescriptor typeParameterDescriptor,
@NotNull TypeSubstitutor substitutor,
@NotNull BindingTrace trace
) {
for (KotlinType bound : typeParameterDescriptor.getUpperBounds()) {
KotlinType substitutedBound = substitutor.safeSubstitute(bound, Variance.INVARIANT);
if (!KotlinTypeChecker.DEFAULT.isSubtypeOf(typeArgument, substitutedBound)) {
trace.report(UPPER_BOUND_VIOLATED.on(jetTypeArgument, substitutedBound, typeArgument));
}
}
}
public static boolean checkHasOuterClassInstance(
@NotNull LexicalScope scope,
@NotNull BindingTrace trace,
@NotNull PsiElement reportErrorsOn,
@NotNull ClassDescriptor target
) {
ClassDescriptor classDescriptor = getContainingClass(scope);
if (!isInsideOuterClassOrItsSubclass(classDescriptor, target)) {
return true;
}
while (classDescriptor != null) {
if (isSubclass(classDescriptor, target)) {
return true;
}
if (isStaticNestedClass(classDescriptor)) {
trace.report(INACCESSIBLE_OUTER_CLASS_EXPRESSION.on(reportErrorsOn, classDescriptor));
return false;
}
classDescriptor = getParentOfType(classDescriptor, ClassDescriptor.class, true);
}
return true;
}
private static boolean isInsideOuterClassOrItsSubclass(@Nullable DeclarationDescriptor nested, @NotNull ClassDescriptor outer) {
if (nested == null) return false;
if (nested instanceof ClassDescriptor && isSubclass((ClassDescriptor) nested, outer)) return true;
return isInsideOuterClassOrItsSubclass(nested.getContainingDeclaration(), outer);
}
@Nullable
public static ClassDescriptor getContainingClass(@NotNull LexicalScope scope) {
return getParentOfType(scope.getOwnerDescriptor(), ClassDescriptor.class, false);
}
}
|
/*
* Licensed to STRATIO (C) under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. The STRATIO (C) licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.stratio.connector.mongodb.core.engine.query;
import com.stratio.connector.commons.engine.query.ProjectParsed;
import com.stratio.connector.commons.engine.query.ProjectValidator;
import com.stratio.connector.mongodb.core.exceptions.MongoValidationException;
import com.stratio.crossdata.common.exceptions.ExecutionException;
import com.stratio.crossdata.common.exceptions.UnsupportedException;
/**
* Validates the logical workflow and stores the needed steps.
*/
public class MongoLogicalWorkflowValidator implements ProjectValidator {
/**
* This method validate the projectParsed.
*
* @param projectParsed
* the projectParsed.
* @throws UnsupportedException
* if the specified operation is not supported
* @throws ExecutionException
* if the project is not validated.
*/
@Override
public void validate(ProjectParsed projectParsed) throws MongoValidationException, UnsupportedException {
if (projectParsed.getProject() == null) {
throw new MongoValidationException("Projection has not been found in the logical workflow");
}
if (projectParsed.getSelect() == null) {
throw new MongoValidationException("Select has not been found in the logical workflow");
}
if (!projectParsed.getMatchList().isEmpty()) {
throw new UnsupportedException("Full-text queries not yet supported");
}
}
}
|
package com.budwk.app.cms.controllers.cms;
import cn.dev33.satoken.annotation.SaCheckLogin;
import cn.dev33.satoken.annotation.SaCheckPermission;
import com.budwk.app.cms.models.Cms_link;
import com.budwk.app.cms.services.CmsLinkClassService;
import com.budwk.app.cms.services.CmsLinkService;
import com.budwk.starter.common.openapi.annotation.*;
import com.budwk.starter.common.openapi.enums.ParamIn;
import com.budwk.starter.common.page.PageUtil;
import com.budwk.starter.common.result.Result;
import com.budwk.starter.common.result.ResultCode;
import com.budwk.starter.log.annotation.SLog;
import com.budwk.starter.security.utils.SecurityUtil;
import lombok.extern.slf4j.Slf4j;
import org.nutz.dao.Cnd;
import org.nutz.ioc.loader.annotation.Inject;
import org.nutz.ioc.loader.annotation.IocBean;
import org.nutz.lang.Strings;
import org.nutz.mvc.annotation.*;
import javax.servlet.http.HttpServletRequest;
/**
* @author wizzer(wizzer.cn)
* @date 2020/3/3
*/
@IocBean
@At("/cms/links/link")
@SLog(tag = "CMS链接管理")
@ApiDefinition(tag = "CMS链接管理")
@Slf4j
public class CmsLinkController {
@Inject
private CmsLinkClassService cmsLinkClassService;
@Inject
private CmsLinkService cmsLinkService;
@At("/list_class")
@GET
@Ok("json:{actived:'code|msg|data|id|name',ignoreNull:true}")
@SaCheckLogin
@ApiOperation(name = "获取分类列表")
@ApiResponses
public Result<?> listClass(HttpServletRequest req) {
return Result.data(cmsLinkClassService.query());
}
@At("/list")
@POST
@Ok("json:{locked:'password|salt',ignoreNull:false}")
@SaCheckPermission("cms.links.link")
@ApiOperation(name = "分页查询链接")
@ApiFormParams(
{
@ApiFormParam(name = "classId", example = "", description = "分类ID"),
@ApiFormParam(name = "pageNo", example = "1", description = "页码", type = "integer"),
@ApiFormParam(name = "pageSize", example = "10", description = "页大小", type = "integer"),
@ApiFormParam(name = "pageOrderName", example = "createdAt", description = "排序字段"),
@ApiFormParam(name = "pageOrderBy", example = "descending", description = "排序方式")
}
)
@ApiResponses(implementation = Cms_link.class)
public Result<?> list(@Param("classId") String classId, @Param("pageNo") int pageNo, @Param("pageSize") int pageSize, @Param("pageOrderName") String pageOrderName, @Param("pageOrderBy") String pageOrderBy) {
Cnd cnd = Cnd.NEW();
if (Strings.isNotBlank(classId)) {
cnd.and("classId", "=", classId);
}
if (Strings.isNotBlank(pageOrderName) && Strings.isNotBlank(pageOrderBy)) {
cnd.orderBy(pageOrderName, PageUtil.getOrder(pageOrderBy));
}
return Result.data(cmsLinkService.listPage(pageNo, pageSize, cnd));
}
@At("/create")
@POST
@Ok("json")
@SaCheckPermission("cms.links.link.create")
@SLog(value = "新增链接:${link.name}")
@ApiOperation(name = "新增链接")
@ApiFormParams(
implementation = Cms_link.class
)
@ApiResponses
public Result<?> create(@Param("..") Cms_link link, HttpServletRequest req) {
link.setCreatedBy(SecurityUtil.getUserId());
link.setUpdatedBy(SecurityUtil.getUserId());
cmsLinkService.insert(link);
return Result.success();
}
@At("/update")
@POST
@Ok("json")
@SaCheckPermission("cms.links.link.update")
@SLog(value = "修改链接:${link.name}")
@ApiOperation(name = "修改链接")
@ApiFormParams(
implementation = Cms_link.class
)
@ApiResponses
public Result<?> update(@Param("..") Cms_link link, HttpServletRequest req) {
link.setUpdatedBy(SecurityUtil.getUserId());
cmsLinkService.updateIgnoreNull(link);
return Result.success();
}
@At("/get/{id}")
@GET
@Ok("json")
@SaCheckLogin
@ApiOperation(name = "获取链接信息")
@ApiImplicitParams(
{
@ApiImplicitParam(name = "id", description = "主键ID", in = ParamIn.PATH, required = true, check = true)
}
)
@ApiResponses(implementation = Cms_link.class)
public Result<?> get(String id, HttpServletRequest req) {
return Result.success(cmsLinkService.fetch(id));
}
@At("/delete_more")
@Ok("json")
@POST
@SaCheckPermission("cms.links.link.delete")
@SLog(value = "批量删除链接")
@ApiOperation(name = "批量删除链接")
@ApiFormParams(
{
@ApiFormParam(name = "ids", example = "a,b", description = "链接ID数组", required = true),
@ApiFormParam(name = "names", example = "a,b", description = "链接名称数组", required = true)
}
)
@ApiResponses
public Result<?> deleteMore(@Param("ids") String[] ids, @Param("names") String names, HttpServletRequest req) {
if (ids == null) {
return Result.error("system.error.invalid");
}
cmsLinkService.delete(ids);
req.setAttribute("_slog_msg", names);
return Result.success();
}
@At("/delete/{id}")
@Ok("json")
@DELETE
@SaCheckPermission("cms.links.link.delete")
@SLog(value = "删除链接")
@ApiOperation(name = "删除链接")
@ApiImplicitParams(
{
@ApiImplicitParam(name = "id", description = "主键ID", in = ParamIn.PATH, required = true, check = true)
}
)
@ApiResponses
public Result<?> delete(String id, HttpServletRequest req) {
Cms_link link = cmsLinkService.fetch(id);
if (link == null) {
return Result.error(ResultCode.NULL_DATA_ERROR);
}
cmsLinkService.delete(id);
req.setAttribute("_slog_msg", String.format("%s", link.getName()));
return Result.success();
}
}
|
/*
* Copyright 2015, Yahoo Inc.
* Licensed under the Apache License, Version 2.0
* See LICENSE file in project root for terms.
*/
package com.yahoo.elide.datastores.hibernate5;
import com.yahoo.elide.core.datastore.DataStoreTransaction;
import com.yahoo.elide.core.datastore.JPQLDataStore;
import com.yahoo.elide.core.dictionary.EntityDictionary;
import com.yahoo.elide.core.type.ClassType;
import com.yahoo.elide.core.type.Type;
import org.hibernate.ScrollMode;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import javax.persistence.EntityManagerFactory;
import javax.persistence.metamodel.EntityType;
/**
* Hibernate interface library.
*/
public abstract class AbstractHibernateStore implements JPQLDataStore {
protected final SessionFactory sessionFactory;
protected final boolean isScrollEnabled;
protected final ScrollMode scrollMode;
protected final HibernateTransactionSupplier transactionSupplier;
/**
* Constructor.
*
* @param aSessionFactory Session factory
* @param isScrollEnabled Whether or not scrolling is enabled on driver
* @param scrollMode Scroll mode to use for scrolling driver
*/
protected AbstractHibernateStore(SessionFactory aSessionFactory, boolean isScrollEnabled, ScrollMode scrollMode) {
this(aSessionFactory, isScrollEnabled, scrollMode, HibernateTransaction::new);
}
/**
* Constructor.
*
* Useful for extending the store and relying on existing code
* to instantiate custom hibernate transaction.
*
* @param aSessionFactory Session factory
* @param isScrollEnabled Whether or not scrolling is enabled on driver
* @param scrollMode Scroll mode to use for scrolling driver
* @param transactionSupplier Supplier for transaction
*/
protected AbstractHibernateStore(SessionFactory aSessionFactory,
boolean isScrollEnabled,
ScrollMode scrollMode,
HibernateTransactionSupplier transactionSupplier) {
this.sessionFactory = aSessionFactory;
this.isScrollEnabled = isScrollEnabled;
this.scrollMode = scrollMode;
this.transactionSupplier = transactionSupplier;
}
/**
* Builder object to configuration hibernate store.
*/
public static class Builder {
private final SessionFactory sessionFactory;
private boolean isScrollEnabled;
private ScrollMode scrollMode;
private EntityManagerFactory emf;
public Builder(final SessionFactory sessionFactory) {
this.sessionFactory = sessionFactory;
this.isScrollEnabled = true;
this.scrollMode = ScrollMode.FORWARD_ONLY;
this.emf = null;
}
public Builder(final EntityManagerFactory entityManagerFactory) {
this.sessionFactory = null;
this.isScrollEnabled = true;
this.scrollMode = ScrollMode.FORWARD_ONLY;
this.emf = entityManagerFactory;
}
public Builder withScrollEnabled(final boolean isScrollEnabled) {
this.isScrollEnabled = isScrollEnabled;
return this;
}
public Builder withScrollMode(final ScrollMode scrollMode) {
this.scrollMode = scrollMode;
return this;
}
public AbstractHibernateStore build() {
if (sessionFactory != null) {
return new HibernateSessionFactoryStore(sessionFactory, isScrollEnabled, scrollMode);
}
if (emf != null) {
return new HibernateEntityManagerStore(emf, isScrollEnabled, scrollMode);
}
throw new IllegalStateException("Either an EntityManager or SessionFactory is required!");
}
}
@Override
public void populateEntityDictionary(EntityDictionary dictionary) {
/* bind all entities */
for (EntityType<?> type : sessionFactory.getMetamodel().getEntities()) {
bindEntity(dictionary, type);
}
}
protected void bindEntity(EntityDictionary dictionary, EntityType<?> type) {
Type<?> mappedClass = ClassType.of(type.getJavaType());
bindEntityClass(mappedClass, dictionary);
}
/**
* Start Hibernate transaction.
*
* @return transaction
*/
@Override
abstract public DataStoreTransaction beginTransaction();
/**
* Functional interface for describing a method to supply a custom Hibernate transaction.
*/
@FunctionalInterface
public interface HibernateTransactionSupplier {
HibernateTransaction get(Session session, boolean isScrollEnabled, ScrollMode scrollMode);
}
}
|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zmlx.hg4idea.action.mq;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.openapi.wm.ToolWindowId;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.util.ContentUtilEx;
import com.intellij.util.ObjectUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.zmlx.hg4idea.action.HgAbstractGlobalSingleRepoAction;
import org.zmlx.hg4idea.action.HgActionUtil;
import org.zmlx.hg4idea.repo.HgRepository;
import org.zmlx.hg4idea.ui.HgMqUnAppliedPatchesPanel;
import java.util.Collection;
public class HgShowUnAppliedPatchesAction extends HgAbstractGlobalSingleRepoAction {
@Override
protected void execute(@NotNull Project project, @NotNull Collection<HgRepository> repositories, @Nullable HgRepository selectedRepo) {
if (selectedRepo != null) {
showUnAppliedPatches(project, selectedRepo);
}
}
@Override
public void update(AnActionEvent e) {
HgRepository repository = HgActionUtil.getSelectedRepositoryFromEvent(e);
e.getPresentation().setEnabledAndVisible(repository != null);
}
public static void showUnAppliedPatches(@NotNull Project project, @NotNull HgRepository selectedRepo) {
ToolWindow toolWindow = ObjectUtils.assertNotNull(ToolWindowManager.getInstance(project).getToolWindow(ToolWindowId.VCS));
ContentUtilEx
.addTabbedContent(toolWindow.getContentManager(), new HgMqUnAppliedPatchesPanel(selectedRepo), "MQ", selectedRepo.getRoot().getName(),
true);
toolWindow.activate(null);
}
}
|
/*
* Copyright (c) 2013, 2014, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
import com.sun.tools.classfile.*;
/**
* Trivial {@code Attribute.Visitor} implementation, to make it easy to
* write visitors for specific attributes.
*/
class AttributeVisitor<R, P> implements Attribute.Visitor<R, P> {
public R visitBootstrapMethods(BootstrapMethods_attribute attr, P p) { return null; }
public R visitDefault(DefaultAttribute attr, P p) { return null; }
public R visitAnnotationDefault(AnnotationDefault_attribute attr, P p) { return null; }
public R visitCharacterRangeTable(CharacterRangeTable_attribute attr, P p) { return null; }
public R visitCode(Code_attribute attr, P p) { return null; }
public R visitCompilationID(CompilationID_attribute attr, P p) { return null; }
public R visitConstantValue(ConstantValue_attribute attr, P p) { return null; }
public R visitDeprecated(Deprecated_attribute attr, P p) { return null; }
public R visitEnclosingMethod(EnclosingMethod_attribute attr, P p) { return null; }
public R visitExceptions(Exceptions_attribute attr, P p) { return null; }
public R visitInnerClasses(InnerClasses_attribute attr, P p) { return null; }
public R visitLineNumberTable(LineNumberTable_attribute attr, P p) { return null; }
public R visitLocalVariableTable(LocalVariableTable_attribute attr, P p) { return null; }
public R visitLocalVariableTypeTable(LocalVariableTypeTable_attribute attr, P p) { return null; }
public R visitMethodParameters(MethodParameters_attribute attr, P p) { return null; }
public R visitModule(Module_attribute attr, P p) { return null; }
public R visitModuleHashes(ModuleHashes_attribute attr, P p) { return null; }
public R visitModuleMainClass(ModuleMainClass_attribute attr, P p) { return null; }
public R visitModulePackages(ModulePackages_attribute attr, P p) { return null; }
public R visitModuleResolution(ModuleResolution_attribute attr, P p) { return null; }
public R visitModuleTarget(ModuleTarget_attribute attr, P p) { return null; }
public R visitRuntimeVisibleAnnotations(RuntimeVisibleAnnotations_attribute attr, P p) { return null; }
public R visitRuntimeInvisibleAnnotations(RuntimeInvisibleAnnotations_attribute attr, P p) { return null; }
public R visitRuntimeVisibleParameterAnnotations(RuntimeVisibleParameterAnnotations_attribute attr, P p) { return null; }
public R visitRuntimeInvisibleParameterAnnotations(RuntimeInvisibleParameterAnnotations_attribute attr, P p) { return null; }
public R visitRuntimeVisibleTypeAnnotations(RuntimeVisibleTypeAnnotations_attribute attr, P p) { return null; }
public R visitRuntimeInvisibleTypeAnnotations(RuntimeInvisibleTypeAnnotations_attribute attr, P p) { return null; }
public R visitSignature(Signature_attribute attr, P p) { return null; }
public R visitSourceDebugExtension(SourceDebugExtension_attribute attr, P p) { return null; }
public R visitSourceFile(SourceFile_attribute attr, P p) { return null; }
public R visitSourceID(SourceID_attribute attr, P p) { return null; }
public R visitStackMap(StackMap_attribute attr, P p) { return null; }
public R visitStackMapTable(StackMapTable_attribute attr, P p) { return null; }
public R visitSynthetic(Synthetic_attribute attr, P p) { return null; }
}
|
package chylex.hee.world.feature.stronghold;
import java.util.Random;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.world.EnumDifficulty;
import net.minecraft.world.World;
import chylex.hee.entity.mob.EntityMobSilverfish;
import chylex.hee.entity.technical.EntityTechnicalSpawner;
import chylex.hee.entity.technical.EntityTechnicalSpawner.IVirtualSpawner;
import chylex.hee.system.abstractions.Pos;
import chylex.hee.system.abstractions.Vec;
import chylex.hee.system.abstractions.entity.EntitySelector;
import chylex.hee.world.util.BoundingBox;
import com.google.common.collect.ImmutableList;
public class StrongholdSilverfishSpawner implements IVirtualSpawner<EntityMobSilverfish>{
private BoundingBox box;
@Override
public void init(EntityTechnicalSpawner owner){
box = new BoundingBox(Pos.at(owner).offset(-128, -32, -128), Pos.at(owner).offset(128, 32, 128));
}
@Override
public EntityMobSilverfish createEntity(World world){
return new EntityMobSilverfish(world);
}
@Override
public int getCheckTimer(Random rand){
return 20+rand.nextInt(35);
}
@Override
public int getSpawnAttempts(Random rand){
return 8;
}
@Override
public int getSpawnLimit(Random rand){
return 2+rand.nextInt(3);
}
@Override
public double getSpawnRange(Random rand){
return 7D+rand.nextDouble()*13D;
}
@Override
public BoundingBox getCheckBox(){
return box;
}
@Override
public void findSpawnPosition(World world, Random rand, EntityPlayer target, EntityMobSilverfish entity, double range){
Vec vec = Vec.xzRandom(rand);
entity.setPositionAndRotation(target.posX+vec.x*range, target.posY+(rand.nextDouble()-0.5D)*10D, target.posZ+vec.z*range, rand.nextFloat()*360F-180F, 0F);
for(int floorCheck = 0; floorCheck < 5; floorCheck++){
if (Pos.at(entity).getDown().getBlock(world) == Blocks.stonebrick)break;
else entity.setPosition(entity.posX, entity.posY-1D, entity.posZ);
}
}
@Override
public boolean checkSpawnConditions(World world, Random rand, ImmutableList<EntityPlayer> playersInRange, EntityPlayer target, EntityMobSilverfish entity){
Pos pos = Pos.at(entity);
if (world.difficultySetting == EnumDifficulty.PEACEFUL)return false;
if (!world.checkNoEntityCollision(entity.boundingBox))return false;
if (!world.getCollidingBoundingBoxes(entity, entity.boundingBox).isEmpty())return false;
if (pos.getDown().getBlock(world) != Blocks.stonebrick)return false;
if (world.getBlockLightValue(pos.getX(), pos.getY(), pos.getZ()) > 7)return false;
if (playersInRange.stream().anyMatch(entity::canEntityBeSeen))return false;
if (EntitySelector.type(world, EntityMobSilverfish.class, target.boundingBox.expand(16D, 8D, 16D)).size() >= 10)return false;
if (EntitySelector.type(world, EntityMobSilverfish.class, box.toAABB()).size() >= 15*playersInRange.size())return false;
return true;
}
}
|
/*
* Copyright (c) 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bicarb.core.forum;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.bicarb.core.BaseSetup;
import org.junit.jupiter.api.Test;
/**
* @author olOwOlo
*/
public class PreviewTest extends BaseSetup {
@Test
void testPreviewTopicBody() throws Exception {
mockRequest.postJson(mockMvc, "/api/preview", jsonBody.getJson("/preview/topicBody.json"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.body").value("<h1><a href=\"#heading\" id=\"heading\" class=\"anchor-link\"></a>heading</h1>\n<p><a class=\"user-link\" href=\"/user/admin\">@admin </a>hello!</p>\n"));
}
@Test
void testPreviewPost() throws Exception {
mockRequest.postJson(mockMvc, "/api/preview", jsonBody.getJson("/preview/postBody.json"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.body").value("<h1>heading</h1>\n<p><a class=\"user-link\" href=\"/user/admin\">@admin </a>hello!</p>\n"));
}
@Test
void testInvalid() throws Exception {
mockRequest.postJson(mockMvc, "/api/preview", "{}")
.andExpect(status().isBadRequest());
}
}
|
/*
* Copyright (c) 2004, 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.tools.jconsole;
import java.awt.*;
import java.awt.event.*;
import java.util.List;
import java.util.TreeSet;
import java.util.Comparator;
import javax.swing.*;
import javax.swing.border.*;
import javax.management.MBeanServerConnection;
import javax.management.ObjectName;
import javax.management.InstanceAlreadyExistsException;
import javax.management.InstanceNotFoundException;
import static sun.tools.jconsole.Utilities.*;
@SuppressWarnings("serial")
public class CreateMBeanDialog extends InternalDialog implements ActionListener {
JConsole jConsole;
JComboBox<ProxyClient> connections;
JButton createMBeanButton, unregisterMBeanButton, cancelButton;
private static final String HOTSPOT_MBEAN = "sun.management.HotspotInternal";
private static final String HOTSPOT_MBEAN_OBJECTNAME = "sun.management:type=HotspotInternal";
public CreateMBeanDialog(JConsole jConsole) {
super(jConsole, "JConsole: Hotspot MBeans", true);
this.jConsole = jConsole;
setAccessibleDescription(this, Messages.HOTSPOT_MBEANS_DIALOG_ACCESSIBLE_DESCRIPTION);
Container cp = getContentPane();
((JComponent) cp).setBorder(new EmptyBorder(10, 10, 4, 10));
JPanel centerPanel = new JPanel(new VariableGridLayout(0, 1, 4, 4, false, true));
cp.add(centerPanel, BorderLayout.CENTER);
connections = new JComboBox<ProxyClient>();
updateConnections();
centerPanel.add(new LabeledComponent(Resources.format(Messages.MANAGE_HOTSPOT_MBEANS_IN_COLON_), connections));
JPanel bottomPanel = new JPanel(new BorderLayout());
cp.add(bottomPanel, BorderLayout.SOUTH);
JPanel buttonPanel = new JPanel();
bottomPanel.add(buttonPanel, BorderLayout.NORTH);
buttonPanel.add(createMBeanButton = new JButton(Messages.CREATE));
buttonPanel.add(unregisterMBeanButton = new JButton(Messages.UNREGISTER));
buttonPanel.add(cancelButton = new JButton(Messages.CANCEL));
statusBar = new JLabel(" ", JLabel.CENTER);
bottomPanel.add(statusBar, BorderLayout.SOUTH);
createMBeanButton.addActionListener(this);
unregisterMBeanButton.addActionListener(this);
cancelButton.addActionListener(this);
LabeledComponent.layout(centerPanel);
pack();
setLocationRelativeTo(jConsole);
}
private void updateConnections() {
List<VMInternalFrame> frames = jConsole.getInternalFrames();
TreeSet<ProxyClient> data = new TreeSet<ProxyClient>(new Comparator<ProxyClient>() {
public int compare(ProxyClient o1, ProxyClient o2) {
// TODO: Need to understand how this method being used?
return o1.connectionName().compareTo(o2.connectionName());
}
});
if (frames.size() == 0) {
JComponent cp = (JComponent) jConsole.getContentPane();
Component comp = ((BorderLayout) cp.getLayout()).
getLayoutComponent(BorderLayout.CENTER);
if (comp instanceof VMPanel) {
VMPanel vmpanel = (VMPanel) comp;
ProxyClient client = vmpanel.getProxyClient(false);
if (client != null && client.hasPlatformMXBeans()) {
data.add(client);
}
}
} else {
for (VMInternalFrame f : frames) {
ProxyClient client = f.getVMPanel().getProxyClient(false);
if (client != null && client.hasPlatformMXBeans()) {
data.add(client);
}
}
}
connections.invalidate();
connections.setModel(new DefaultComboBoxModel<ProxyClient>(data.toArray(new ProxyClient[data.size()])));
connections.validate();
}
public void actionPerformed(final ActionEvent ev) {
setVisible(false);
statusBar.setText("");
if (ev.getSource() != cancelButton) {
new Thread("CreateMBeanDialog.actionPerformed") {
public void run() {
try {
Object c = connections.getSelectedItem();
if (c == null) { return; }
if (ev.getSource() == createMBeanButton) {
MBeanServerConnection connection = ((ProxyClient) c).
getMBeanServerConnection();
connection.createMBean(HOTSPOT_MBEAN, null);
} else {
if (ev.getSource() == unregisterMBeanButton) {
MBeanServerConnection connection = ((ProxyClient) c).
getMBeanServerConnection();
connection.unregisterMBean(new ObjectName(HOTSPOT_MBEAN_OBJECTNAME));
}
}
return;
} catch (InstanceAlreadyExistsException e) {
statusBar.setText(Messages.ERROR_COLON_MBEANS_ALREADY_EXIST);
} catch (InstanceNotFoundException e) {
statusBar.setText(Messages.ERROR_COLON_MBEANS_DO_NOT_EXIST);
} catch (Exception e) {
statusBar.setText(e.toString());
}
setVisible(true);
}
}.start();
}
}
public void setVisible(boolean b) {
boolean wasVisible = isVisible();
if (b) {
setLocationRelativeTo(jConsole);
invalidate();
updateConnections();
validate();
repaint();
}
super.setVisible(b);
if (b && !wasVisible) {
// Need to delay this to make focus stick
SwingUtilities.invokeLater(new Runnable() {
public void run() {
connections.requestFocus();
}
});
}
}
}
|
// Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skylarkbuildapi;
import com.google.devtools.build.lib.skylarkinterface.SkylarkCallable;
import com.google.devtools.build.lib.skylarkinterface.SkylarkModule;
import com.google.devtools.build.lib.skylarkinterface.SkylarkModuleCategory;
import com.google.devtools.build.lib.syntax.EvalException;
import com.google.devtools.build.lib.syntax.StarlarkValue;
/** Interface for a type containing information about the attributes of a rule. */
@SkylarkModule(
name = "rule_attributes",
category = SkylarkModuleCategory.BUILTIN,
doc = "Information about attributes of a rule an aspect is applied to.")
public interface SkylarkAttributesCollectionApi extends StarlarkValue {
@SkylarkCallable(name = "attr", structField = true, doc = SkylarkRuleContextApi.ATTR_DOC)
public StructApi getAttr() throws EvalException;
@SkylarkCallable(
name = "executable",
structField = true,
doc = SkylarkRuleContextApi.EXECUTABLE_DOC)
public StructApi getExecutable() throws EvalException;
@SkylarkCallable(name = "file", structField = true, doc = SkylarkRuleContextApi.FILE_DOC)
public StructApi getFile() throws EvalException;
@SkylarkCallable(name = "files", structField = true, doc = SkylarkRuleContextApi.FILES_DOC)
public StructApi getFiles() throws EvalException;
@SkylarkCallable(
name = "kind",
structField = true,
doc = "The kind of a rule, such as 'cc_library'"
)
public String getRuleClassName() throws EvalException;
}
|
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2022 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.model.impl.net;
import org.eclipse.core.net.proxy.IProxyService;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.model.DBPDataSource;
import org.jkiss.dbeaver.model.app.DBPPlatform;
import org.jkiss.dbeaver.model.connection.DBPConnectionConfiguration;
import org.jkiss.dbeaver.model.net.DBWForwarder;
import org.jkiss.dbeaver.model.net.DBWHandlerConfiguration;
import org.jkiss.dbeaver.model.net.DBWNetworkHandler;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.runtime.DBWorkbench;
import org.jkiss.dbeaver.runtime.net.GlobalProxySelector;
import org.jkiss.dbeaver.utils.GeneralUtils;
import java.io.IOException;
import java.net.ProxySelector;
/**
* SOCKS proxy
*/
public class SocksProxyImpl implements DBWNetworkHandler, DBWForwarder {
private static final Log log = Log.getLog(SocksProxyImpl.class);
private DBWHandlerConfiguration configuration;
@Override
public DBPConnectionConfiguration initializeHandler(DBRProgressMonitor monitor, DBPPlatform platform, DBWHandlerConfiguration configuration, DBPConnectionConfiguration connectionInfo) throws DBException, IOException {
this.configuration = configuration;
setupProxyHandler();
return null;
}
@Override
public void invalidateHandler(DBRProgressMonitor monitor, DBPDataSource dataSource) throws DBException, IOException {
}
@Override
public boolean matchesParameters(String host, int port) {
if (host.equals(configuration.getStringProperty(SocksConstants.PROP_HOST))) {
int socksPort = configuration.getIntProperty(SocksConstants.PROP_PORT);
return socksPort == port;
}
return false;
}
private static void activateProxyService() {
try {
log.debug("Proxy service '" + IProxyService.class.getName() + "' loaded");
} catch (Throwable e) {
log.debug("Proxy service not found");
}
}
private static void setupProxyHandler() {
if (ProxySelector.getDefault() instanceof GlobalProxySelector) {
return;
}
activateProxyService();
// Init default network settings
ProxySelector defProxySelector = GeneralUtils.adapt(DBWorkbench.getPlatform(), ProxySelector.class);
if (defProxySelector == null) {
defProxySelector = new GlobalProxySelector(ProxySelector.getDefault());
}
ProxySelector.setDefault(defProxySelector);
}
}
|
package org.deeplearning4j.nn.layers.variational;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.Getter;
import lombok.Setter;
import org.deeplearning4j.nn.api.Layer;
import org.deeplearning4j.nn.api.MaskState;
import org.deeplearning4j.nn.api.layers.LayerConstraint;
import org.deeplearning4j.nn.conf.CacheMode;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.layers.variational.CompositeReconstructionDistribution;
import org.deeplearning4j.nn.conf.layers.variational.LossFunctionWrapper;
import org.deeplearning4j.nn.conf.layers.variational.ReconstructionDistribution;
import org.deeplearning4j.nn.gradient.DefaultGradient;
import org.deeplearning4j.nn.gradient.Gradient;
import org.deeplearning4j.nn.params.VariationalAutoencoderParamInitializer;
import org.deeplearning4j.optimize.Solver;
import org.deeplearning4j.optimize.api.ConvexOptimizer;
import org.deeplearning4j.optimize.api.IterationListener;
import org.deeplearning4j.optimize.api.TrainingListener;
import org.nd4j.linalg.activations.IActivation;
import org.nd4j.linalg.activations.impl.ActivationIdentity;
import org.nd4j.linalg.api.blas.Level1;
import org.nd4j.linalg.api.memory.MemoryWorkspace;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.lossfunctions.ILossFunction;
import org.nd4j.linalg.ops.transforms.Transforms;
import org.nd4j.linalg.primitives.Pair;
import java.util.*;
import static org.deeplearning4j.nn.params.VariationalAutoencoderParamInitializer.BIAS_KEY_SUFFIX;
import static org.deeplearning4j.nn.params.VariationalAutoencoderParamInitializer.WEIGHT_KEY_SUFFIX;
/**
* Variational Autoencoder layer
* <p>
* See: Kingma & Welling, 2013: Auto-Encoding Variational Bayes - https://arxiv.org/abs/1312.6114
* <p>
* This implementation allows multiple encoder and decoder layers, the number and sizes of which can be set independently.
* <p>
* A note on scores during pretraining: This implementation minimizes the negative of the variational lower bound objective
* as described in Kingma & Welling; the mathematics in that paper is based on maximization of the variational lower bound instead.
* Thus, scores reported during pretraining in DL4J are the negative of the variational lower bound equation in the paper.
* The backpropagation and learning procedure is otherwise as described there.
*
* @author Alex Black
*/
public class VariationalAutoencoder implements Layer {
protected INDArray input;
protected INDArray paramsFlattened;
protected INDArray gradientsFlattened;
protected Map<String, INDArray> params;
@Getter
protected transient Map<String, INDArray> gradientViews;
protected NeuralNetConfiguration conf;
protected double score = 0.0;
protected ConvexOptimizer optimizer;
protected Gradient gradient;
protected Collection<IterationListener> iterationListeners = new ArrayList<>();
protected Collection<TrainingListener> trainingListeners = null;
protected int index = 0;
protected INDArray maskArray;
protected Solver solver;
protected int[] encoderLayerSizes;
protected int[] decoderLayerSizes;
protected ReconstructionDistribution reconstructionDistribution;
protected IActivation pzxActivationFn;
protected int numSamples;
protected CacheMode cacheMode = CacheMode.NONE;
protected boolean zeroedPretrainParamGradients = false;
protected Map<String,INDArray> weightNoiseParams = new HashMap<>();
@Getter @Setter
protected int iterationCount;
@Getter @Setter
protected int epochCount;
public VariationalAutoencoder(NeuralNetConfiguration conf) {
this.conf = conf;
this.encoderLayerSizes =
((org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) conf.getLayer())
.getEncoderLayerSizes();
this.decoderLayerSizes =
((org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) conf.getLayer())
.getDecoderLayerSizes();
this.reconstructionDistribution =
((org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) conf.getLayer())
.getOutputDistribution();
this.pzxActivationFn = ((org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) conf.getLayer())
.getPzxActivationFn();
this.numSamples = ((org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) conf.getLayer())
.getNumSamples();
}
protected org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder layerConf() {
return (org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) conf().getLayer();
}
@Override
public void setCacheMode(CacheMode mode) {
if (mode == null)
mode = CacheMode.NONE;
this.cacheMode = mode;
}
protected String layerId() {
String name = this.conf().getLayer().getLayerName();
return "(layer name: " + (name == null ? "\"\"" : name) + ", layer index: " + index + ")";
}
/**
* Init the model
*/
@Override
public void init() {
}
@Override
public void update(Gradient gradient) {
throw new UnsupportedOperationException("Not supported " + layerId());
}
@Override
public void update(INDArray gradient, String paramType) {
throw new UnsupportedOperationException("Not supported " + layerId());
}
@Override
public double score() {
return score;
}
protected INDArray getParamWithNoise(String param, boolean training){
INDArray p;
if(layerConf().getWeightNoise() != null){
if(training && weightNoiseParams.size() > 0 ){
//Re-use these weights for both forward pass and backprop - don't want to use 2 different params here
//These should be cleared during backprop
return weightNoiseParams.get(param);
} else {
try (MemoryWorkspace ws = Nd4j.getMemoryManager().scopeOutOfWorkspaces()) {
p = layerConf().getWeightNoise().getParameter(this, param, getIterationCount(), getEpochCount(), training);
}
}
if(training){
//Store for re-use in backprop
weightNoiseParams.put(param, p);
}
} else {
return getParam(param);
}
return p;
}
@Override
public void computeGradientAndScore() {
//Forward pass through the encoder and mean for P(Z|X)
VAEFwdHelper fwd = doForward(true, true);
IActivation afn = layerConf().getActivationFn();
//Forward pass through logStd^2 for P(Z|X)
INDArray pzxLogStd2W = getParamWithNoise(VariationalAutoencoderParamInitializer.PZX_LOGSTD2_W, true);
INDArray pzxLogStd2b = getParamWithNoise(VariationalAutoencoderParamInitializer.PZX_LOGSTD2_B, true);
INDArray pzxLogStd2Pre = fwd.encoderActivations[fwd.encoderActivations.length - 1].mmul(pzxLogStd2W)
.addiRowVector(pzxLogStd2b);
INDArray meanZ = fwd.pzxMeanPreOut.dup();
INDArray logStdev2Z = pzxLogStd2Pre.dup();
pzxActivationFn.getActivation(meanZ, true);
pzxActivationFn.getActivation(logStdev2Z, true);
INDArray pzxSigmaSquared = Transforms.exp(logStdev2Z, true);
INDArray pzxSigma = Transforms.sqrt(pzxSigmaSquared, true);
int minibatch = input.size(0);
int size = fwd.pzxMeanPreOut.size(1);
Map<String, INDArray> gradientMap = new HashMap<>();
double scaleFactor = 1.0 / numSamples;
Level1 blasL1 = Nd4j.getBlasWrapper().level1();
INDArray[] encoderActivationDerivs = (numSamples > 1 ? new INDArray[encoderLayerSizes.length] : null);
for (int l = 0; l < numSamples; l++) { //Default (and in most cases) numSamples == 1
double gemmCConstant = (l == 0 ? 0.0 : 1.0); //0 for first one (to get rid of previous buffer data), otherwise 1 (for adding)
INDArray e = Nd4j.randn(minibatch, size);
INDArray z = pzxSigma.mul(e).addi(meanZ); //z = mu + sigma * e, with e ~ N(0,1)
//Need to do forward pass through decoder layers
int nDecoderLayers = decoderLayerSizes.length;
INDArray current = z;
INDArray[] decoderPreOut = new INDArray[nDecoderLayers]; //Need pre-out for backprop later
INDArray[] decoderActivations = new INDArray[nDecoderLayers];
for (int i = 0; i < nDecoderLayers; i++) {
String wKey = "d" + i + WEIGHT_KEY_SUFFIX;
String bKey = "d" + i + BIAS_KEY_SUFFIX;
INDArray weights = getParamWithNoise(wKey, true);
INDArray bias = getParamWithNoise(bKey, true);
current = current.mmul(weights).addiRowVector(bias);
decoderPreOut[i] = current.dup();
afn.getActivation(current, true);
decoderActivations[i] = current;
}
INDArray pxzw = getParamWithNoise(VariationalAutoencoderParamInitializer.PXZ_W, true);
INDArray pxzb = getParamWithNoise(VariationalAutoencoderParamInitializer.PXZ_B, true);
if (l == 0) {
//Need to add other component of score, in addition to negative log probability
//Note the negative here vs. the equation in Kingma & Welling: this is because we are minimizing the negative of
// variational lower bound, rather than maximizing the variational lower bound
//Unlike log probability (which is averaged over samples) this should be calculated just once
INDArray temp = meanZ.mul(meanZ).addi(pzxSigmaSquared).negi();
temp.addi(logStdev2Z).addi(1.0);
double scorePt1 = -0.5 / minibatch * temp.sumNumber().doubleValue();
this.score = scorePt1 + (calcL1(false) + calcL2(false)) / minibatch;
}
INDArray pxzDistributionPreOut = current.mmul(pxzw).addiRowVector(pxzb);
double logPTheta = reconstructionDistribution.negLogProbability(input, pxzDistributionPreOut, true);
this.score += logPTheta / numSamples;
//If we have any training listeners (for example, for UI StatsListener - pass on activations)
if (trainingListeners != null && !trainingListeners.isEmpty() && l == 0) { //Note: only doing this on the *first* sample
Map<String, INDArray> activations = new LinkedHashMap<>();
for (int i = 0; i < fwd.encoderActivations.length; i++) {
activations.put("e" + i, fwd.encoderActivations[i]);
}
activations.put(VariationalAutoencoderParamInitializer.PZX_PREFIX, z);
for (int i = 0; i < decoderActivations.length; i++) {
activations.put("d" + i, decoderActivations[i]);
}
activations.put(VariationalAutoencoderParamInitializer.PXZ_PREFIX,
reconstructionDistribution.generateAtMean(pxzDistributionPreOut));
if (!trainingListeners.isEmpty()) {
try (MemoryWorkspace workspace = Nd4j.getMemoryManager().scopeOutOfWorkspaces()) {
for (TrainingListener tl : trainingListeners) {
tl.onForwardPass(this, activations);
}
}
}
}
/////////////////////////////////////////////////////////
//Backprop
//First: calculate the gradients at the input to the reconstruction distribution
INDArray dpdpxz = reconstructionDistribution.gradient(input, pxzDistributionPreOut);
//Do backprop for output reconstruction distribution -> final decoder layer
INDArray dLdxzw = gradientViews.get(VariationalAutoencoderParamInitializer.PXZ_W);
INDArray dLdxzb = gradientViews.get(VariationalAutoencoderParamInitializer.PXZ_B);
INDArray lastDecActivations = decoderActivations[decoderActivations.length - 1];
Nd4j.gemm(lastDecActivations, dpdpxz, dLdxzw, true, false, scaleFactor, gemmCConstant);
if (l == 0) {
dpdpxz.sum(dLdxzb, 0); //dLdxzb array is initialized/zeroed first in sum op
if (numSamples > 1) {
dLdxzb.muli(scaleFactor);
}
} else {
blasL1.axpy(dLdxzb.length(), scaleFactor, dpdpxz.sum(0), dLdxzb);
}
gradientMap.put(VariationalAutoencoderParamInitializer.PXZ_W, dLdxzw);
gradientMap.put(VariationalAutoencoderParamInitializer.PXZ_B, dLdxzb);
INDArray epsilon = pxzw.mmul(dpdpxz.transpose()).transpose();
//Next: chain derivatives backwards through the decoder layers
for (int i = nDecoderLayers - 1; i >= 0; i--) {
String wKey = "d" + i + WEIGHT_KEY_SUFFIX;
String bKey = "d" + i + BIAS_KEY_SUFFIX;
INDArray currentDelta = afn.backprop(decoderPreOut[i], epsilon).getFirst(); //TODO activation functions with params
INDArray weights = getParamWithNoise(wKey, true);
INDArray dLdW = gradientViews.get(wKey);
INDArray dLdB = gradientViews.get(bKey);
INDArray actInput;
if (i == 0) {
actInput = z;
} else {
actInput = decoderActivations[i - 1];
}
Nd4j.gemm(actInput, currentDelta, dLdW, true, false, scaleFactor, gemmCConstant);
if (l == 0) {
currentDelta.sum(dLdB, 0);
if (numSamples > 1) {
dLdB.muli(scaleFactor);
}
} else {
blasL1.axpy(dLdB.length(), scaleFactor, currentDelta.sum(0), dLdB);
}
gradientMap.put(wKey, dLdW);
gradientMap.put(bKey, dLdB);
epsilon = weights.mmul(currentDelta.transpose()).transpose();
}
//Do backprop through p(z|x)
INDArray eZXMeanW = getParamWithNoise(VariationalAutoencoderParamInitializer.PZX_MEAN_W, true);
INDArray eZXLogStdev2W = getParamWithNoise(VariationalAutoencoderParamInitializer.PZX_LOGSTD2_W, true);
INDArray dLdz = epsilon;
//If we were maximizing the equation in Kinga and Welling, this would be a .sub(meanZ). Here: we are minimizing the negative instead
INDArray dLdmu = dLdz.add(meanZ);
INDArray dLdLogSigma2 = dLdz.mul(e).muli(pzxSigma).addi(pzxSigmaSquared).subi(1).muli(0.5);
INDArray dLdPreMu = pzxActivationFn.backprop(fwd.getPzxMeanPreOut().dup(), dLdmu).getFirst();
INDArray dLdPreLogSigma2 = pzxActivationFn.backprop(pzxLogStd2Pre.dup(), dLdLogSigma2).getFirst();
//Weight gradients for weights feeding into p(z|x)
INDArray lastEncoderActivation = fwd.encoderActivations[fwd.encoderActivations.length - 1];
INDArray dLdZXMeanW = gradientViews.get(VariationalAutoencoderParamInitializer.PZX_MEAN_W);
INDArray dLdZXLogStdev2W = gradientViews.get(VariationalAutoencoderParamInitializer.PZX_LOGSTD2_W);
Nd4j.gemm(lastEncoderActivation, dLdPreMu, dLdZXMeanW, true, false, scaleFactor, gemmCConstant);
Nd4j.gemm(lastEncoderActivation, dLdPreLogSigma2, dLdZXLogStdev2W, true, false, scaleFactor, gemmCConstant);
//Bias gradients for p(z|x)
INDArray dLdZXMeanb = gradientViews.get(VariationalAutoencoderParamInitializer.PZX_MEAN_B);
INDArray dLdZXLogStdev2b = gradientViews.get(VariationalAutoencoderParamInitializer.PZX_LOGSTD2_B);
//If we were maximizing the equation in Kinga and Welling, this would be a .sub(meanZ). Here: we are minimizing the negative instead
if (l == 0) {
dLdZXMeanb.assign(pzxActivationFn.backprop(fwd.getPzxMeanPreOut().dup(), dLdz.add(meanZ)).getFirst()
.sum(0));
dLdPreLogSigma2.sum(dLdZXLogStdev2b, 0);
if (numSamples > 1) {
dLdZXMeanb.muli(scaleFactor);
dLdZXLogStdev2b.muli(scaleFactor);
}
} else {
blasL1.axpy(dLdZXMeanb.length(), scaleFactor, pzxActivationFn
.backprop(fwd.getPzxMeanPreOut().dup(), dLdz.add(meanZ)).getFirst().sum(0), dLdZXMeanb);
blasL1.axpy(dLdZXLogStdev2b.length(), scaleFactor, dLdPreLogSigma2.sum(0), dLdZXLogStdev2b);
}
gradientMap.put(VariationalAutoencoderParamInitializer.PZX_MEAN_W, dLdZXMeanW);
gradientMap.put(VariationalAutoencoderParamInitializer.PZX_MEAN_B, dLdZXMeanb);
gradientMap.put(VariationalAutoencoderParamInitializer.PZX_LOGSTD2_W, dLdZXLogStdev2W);
gradientMap.put(VariationalAutoencoderParamInitializer.PZX_LOGSTD2_B, dLdZXLogStdev2b);
//Epsilon (dL/dActivation) at output of the last encoder layer:
epsilon = Nd4j.gemm(dLdPreMu, eZXMeanW, false, true); //Equivalent to: epsilon = eZXMeanW.mmul(dLdPreMu.transpose()).transpose(); using (AxB^T)^T = BxA^T
//Next line: equivalent to epsilon.addi(eZXLogStdev2W.mmul(dLdPreLogSigma2.transpose()).transpose()); using: (AxB^T)^T = BxA^T
Nd4j.gemm(dLdPreLogSigma2, eZXLogStdev2W, epsilon, false, true, 1.0, 1.0);
//Backprop through encoder:
int nEncoderLayers = encoderLayerSizes.length;
for (int i = nEncoderLayers - 1; i >= 0; i--) {
String wKey = "e" + i + WEIGHT_KEY_SUFFIX;
String bKey = "e" + i + BIAS_KEY_SUFFIX;
INDArray weights = getParamWithNoise(wKey, true);
INDArray dLdW = gradientViews.get(wKey);
INDArray dLdB = gradientViews.get(bKey);
INDArray preOut = fwd.encoderPreOuts[i];
INDArray currentDelta;
if (numSamples > 1) {
//Re-use sigma-prime values for the encoder - these don't change based on multiple samples,
// only the errors do
if (l == 0) {
//Not the most elegent implementation (with the ND4j.ones()), but it works...
encoderActivationDerivs[i] =
afn.backprop(fwd.encoderPreOuts[i], Nd4j.ones(fwd.encoderPreOuts[i].shape()))
.getFirst();
}
currentDelta = epsilon.muli(encoderActivationDerivs[i]);
} else {
currentDelta = afn.backprop(preOut, epsilon).getFirst();
}
INDArray actInput;
if (i == 0) {
actInput = input;
} else {
actInput = fwd.encoderActivations[i - 1];
}
Nd4j.gemm(actInput, currentDelta, dLdW, true, false, scaleFactor, gemmCConstant);
if (l == 0) {
currentDelta.sum(dLdB, 0);
if (numSamples > 1) {
dLdB.muli(scaleFactor);
}
} else {
blasL1.axpy(dLdB.length(), scaleFactor, currentDelta.sum(0), dLdB);
}
gradientMap.put(wKey, dLdW);
gradientMap.put(bKey, dLdB);
epsilon = weights.mmul(currentDelta.transpose()).transpose();
}
}
//Insert the gradients into the Gradient map in the correct order, in case we need to flatten the gradient later
// to match the parameters iteration order
Gradient gradient = new DefaultGradient(gradientsFlattened);
Map<String, INDArray> g = gradient.gradientForVariable();
for (int i = 0; i < encoderLayerSizes.length; i++) {
String w = "e" + i + VariationalAutoencoderParamInitializer.WEIGHT_KEY_SUFFIX;
g.put(w, gradientMap.get(w));
String b = "e" + i + VariationalAutoencoderParamInitializer.BIAS_KEY_SUFFIX;
g.put(b, gradientMap.get(b));
}
g.put(VariationalAutoencoderParamInitializer.PZX_MEAN_W,
gradientMap.get(VariationalAutoencoderParamInitializer.PZX_MEAN_W));
g.put(VariationalAutoencoderParamInitializer.PZX_MEAN_B,
gradientMap.get(VariationalAutoencoderParamInitializer.PZX_MEAN_B));
g.put(VariationalAutoencoderParamInitializer.PZX_LOGSTD2_W,
gradientMap.get(VariationalAutoencoderParamInitializer.PZX_LOGSTD2_W));
g.put(VariationalAutoencoderParamInitializer.PZX_LOGSTD2_B,
gradientMap.get(VariationalAutoencoderParamInitializer.PZX_LOGSTD2_B));
for (int i = 0; i < decoderLayerSizes.length; i++) {
String w = "d" + i + VariationalAutoencoderParamInitializer.WEIGHT_KEY_SUFFIX;
g.put(w, gradientMap.get(w));
String b = "d" + i + VariationalAutoencoderParamInitializer.BIAS_KEY_SUFFIX;
g.put(b, gradientMap.get(b));
}
g.put(VariationalAutoencoderParamInitializer.PXZ_W,
gradientMap.get(VariationalAutoencoderParamInitializer.PXZ_W));
g.put(VariationalAutoencoderParamInitializer.PXZ_B,
gradientMap.get(VariationalAutoencoderParamInitializer.PXZ_B));
weightNoiseParams.clear();
this.gradient = gradient;
}
@Override
public void accumulateScore(double accum) {
}
@Override
public INDArray params() {
return paramsFlattened;
}
@Override
public int numParams() {
return numParams(false);
}
@Override
public int numParams(boolean backwards) {
int ret = 0;
for (Map.Entry<String, INDArray> entry : params.entrySet()) {
if (backwards && isPretrainParam(entry.getKey()))
continue;
ret += entry.getValue().length();
}
return ret;
}
@Override
public void setParams(INDArray params) {
if (params.length() != this.paramsFlattened.length()) {
throw new IllegalArgumentException("Cannot set parameters: expected parameters vector of length "
+ this.paramsFlattened.length() + " but got parameters array of length " + params.length()
+ " " + layerId());
}
this.paramsFlattened.assign(params);
}
@Override
public void setParamsViewArray(INDArray params) {
if (this.params != null && params.length() != numParams())
throw new IllegalArgumentException("Invalid input: expect params of length " + numParams()
+ ", got params of length " + params.length() + " " + layerId());
this.paramsFlattened = params;
}
@Override
public INDArray getGradientsViewArray() {
return gradientsFlattened;
}
@Override
public void setBackpropGradientsViewArray(INDArray gradients) {
if (this.params != null && gradients.length() != numParams()) {
throw new IllegalArgumentException("Invalid input: expect gradients array of length " + numParams()
+ ", got gradient array of length of length " + gradients.length() + " " + layerId());
}
this.gradientsFlattened = gradients;
this.gradientViews = conf.getLayer().initializer().getGradientsFromFlattened(conf, gradients);
}
@Override
public void fit(INDArray data) {
this.setInput(data);
fit();
}
@Override
public void iterate(INDArray input) {
fit(input);
}
@Override
public Gradient gradient() {
return gradient;
}
@Override
public Pair<Gradient, Double> gradientAndScore() {
return new Pair<>(gradient(), score());
}
@Override
public int batchSize() {
return input.size(0);
}
@Override
public NeuralNetConfiguration conf() {
return conf;
}
@Override
public void setConf(NeuralNetConfiguration conf) {
this.conf = conf;
}
@Override
public INDArray input() {
return input;
}
@Override
public void validateInput() {
throw new UnsupportedOperationException("Not supported " + layerId());
}
@Override
public ConvexOptimizer getOptimizer() {
return optimizer;
}
@Override
public INDArray getParam(String param) {
return params.get(param);
}
@Override
public void initParams() {
throw new UnsupportedOperationException("Deprecated " + layerId());
}
@Override
public Map<String, INDArray> paramTable() {
return new LinkedHashMap<>(params);
}
@Override
public Map<String, INDArray> paramTable(boolean backpropParamsOnly) {
Map<String, INDArray> map = new LinkedHashMap<>();
for (Map.Entry<String, INDArray> e : params.entrySet()) {
if (!backpropParamsOnly || !isPretrainParam(e.getKey())) {
map.put(e.getKey(), e.getValue());
}
}
return map;
}
@Override
public void setParamTable(Map<String, INDArray> paramTable) {
this.params = paramTable;
}
@Override
public void setParam(String key, INDArray val) {
if (paramTable().containsKey(key)) {
paramTable().get(key).assign(val);
} else {
throw new IllegalArgumentException("Unknown parameter: " + key + " - " + layerId());
}
}
@Override
public void clear() {
this.input = null;
this.maskArray = null;
}
@Override
public void applyConstraints(int iteration, int epoch) {
if(layerConf().getConstraints() != null){
for(LayerConstraint lc : layerConf().getConstraints()){
lc.applyConstraint(this, iteration, epoch);
}
}
}
public boolean isPretrainParam(String param) {
return !(param.startsWith("e") || param.startsWith(VariationalAutoencoderParamInitializer.PZX_MEAN_PREFIX));
}
@Override
public double calcL2(boolean backpropParamsOnly) {
double l2Sum = 0.0;
for (Map.Entry<String, INDArray> e : paramTable().entrySet()) {
double l2 = conf().getL2ByParam(e.getKey());
if (l2 <= 0.0 || (backpropParamsOnly && isPretrainParam(e.getKey()))) {
continue;
}
double l2Norm = e.getValue().norm2Number().doubleValue();
l2Sum += 0.5 * l2 * l2Norm * l2Norm;
}
return l2Sum;
}
@Override
public double calcL1(boolean backpropParamsOnly) {
double l1Sum = 0.0;
for (Map.Entry<String, INDArray> e : paramTable().entrySet()) {
double l1 = conf().getL1ByParam(e.getKey());
if (l1 <= 0.0 || (backpropParamsOnly && isPretrainParam(e.getKey()))) {
continue;
}
l1Sum += l1 * e.getValue().norm1Number().doubleValue();
}
return l1Sum;
}
@Override
public Type type() {
return Type.FEED_FORWARD;
}
@Override
public Pair<Gradient, INDArray> backpropGradient(INDArray epsilon) {
if (!zeroedPretrainParamGradients) {
for (Map.Entry<String, INDArray> entry : gradientViews.entrySet()) {
if (isPretrainParam(entry.getKey())) {
entry.getValue().assign(0);
}
}
zeroedPretrainParamGradients = true;
}
Gradient gradient = new DefaultGradient();
VAEFwdHelper fwd = doForward(true, true);
INDArray currentDelta = pzxActivationFn.backprop(fwd.pzxMeanPreOut, epsilon).getFirst();
//Finally, calculate mean value:
INDArray meanW = getParamWithNoise(VariationalAutoencoderParamInitializer.PZX_MEAN_W, true);
INDArray dLdMeanW = gradientViews.get(VariationalAutoencoderParamInitializer.PZX_MEAN_W); //f order
INDArray lastEncoderActivation = fwd.encoderActivations[fwd.encoderActivations.length - 1];
Nd4j.gemm(lastEncoderActivation, currentDelta, dLdMeanW, true, false, 1.0, 0.0);
INDArray dLdMeanB = gradientViews.get(VariationalAutoencoderParamInitializer.PZX_MEAN_B);
currentDelta.sum(dLdMeanB, 0); //dLdMeanB is initialized/zeroed first in sum op
gradient.gradientForVariable().put(VariationalAutoencoderParamInitializer.PZX_MEAN_W, dLdMeanW);
gradient.gradientForVariable().put(VariationalAutoencoderParamInitializer.PZX_MEAN_B, dLdMeanB);
epsilon = meanW.mmul(currentDelta.transpose()).transpose();
int nEncoderLayers = encoderLayerSizes.length;
IActivation afn = layerConf().getActivationFn();
for (int i = nEncoderLayers - 1; i >= 0; i--) {
String wKey = "e" + i + WEIGHT_KEY_SUFFIX;
String bKey = "e" + i + BIAS_KEY_SUFFIX;
INDArray weights = getParamWithNoise(wKey, true);
INDArray dLdW = gradientViews.get(wKey);
INDArray dLdB = gradientViews.get(bKey);
INDArray preOut = fwd.encoderPreOuts[i];
currentDelta = afn.backprop(preOut, epsilon).getFirst();
INDArray actInput;
if (i == 0) {
actInput = input;
} else {
actInput = fwd.encoderActivations[i - 1];
}
Nd4j.gemm(actInput, currentDelta, dLdW, true, false, 1.0, 0.0);
currentDelta.sum(dLdB, 0); //dLdB is initialized/zeroed first in sum op
gradient.gradientForVariable().put(wKey, dLdW);
gradient.gradientForVariable().put(bKey, dLdB);
epsilon = weights.mmul(currentDelta.transpose()).transpose();
}
return new Pair<>(gradient, epsilon);
}
@Override
public INDArray preOutput(INDArray x) {
return preOutput(x, TrainingMode.TEST);
}
@Override
public INDArray preOutput(INDArray x, TrainingMode training) {
return preOutput(x, training == TrainingMode.TRAIN);
}
@Override
public INDArray preOutput(INDArray x, boolean training) {
setInput(x);
return preOutput(training);
}
public INDArray preOutput(boolean training) {
VAEFwdHelper f = doForward(training, false);
return f.pzxMeanPreOut;
}
@AllArgsConstructor
@Data
private static class VAEFwdHelper {
private INDArray[] encoderPreOuts;
private INDArray pzxMeanPreOut;
private INDArray[] encoderActivations;
}
private VAEFwdHelper doForward(boolean training, boolean forBackprop) {
if (input == null) {
throw new IllegalStateException("Cannot do forward pass with null input " + layerId());
}
//TODO input validation
int nEncoderLayers = encoderLayerSizes.length;
INDArray[] encoderPreOuts = new INDArray[encoderLayerSizes.length];
INDArray[] encoderActivations = new INDArray[encoderLayerSizes.length];
INDArray current = input;
for (int i = 0; i < nEncoderLayers; i++) {
String wKey = "e" + i + WEIGHT_KEY_SUFFIX;
String bKey = "e" + i + BIAS_KEY_SUFFIX;
INDArray weights = getParamWithNoise(wKey, training);
INDArray bias = getParamWithNoise(bKey, training);
current = current.mmul(weights).addiRowVector(bias);
if (forBackprop) {
encoderPreOuts[i] = current.dup();
}
layerConf().getActivationFn().getActivation(current, training);
encoderActivations[i] = current;
}
//Finally, calculate mean value:
INDArray mW = getParamWithNoise(VariationalAutoencoderParamInitializer.PZX_MEAN_W, training);
INDArray mB = getParamWithNoise(VariationalAutoencoderParamInitializer.PZX_MEAN_B, training);
INDArray pzxMean = current.mmul(mW).addiRowVector(mB);
return new VAEFwdHelper(encoderPreOuts, pzxMean, encoderActivations);
}
@Override
public INDArray activate(TrainingMode training) {
return activate(training == TrainingMode.TRAIN);
}
@Override
public INDArray activate(INDArray input, TrainingMode training) {
return null;
}
@Override
public INDArray activate(boolean training) {
INDArray output = preOutput(training); //Mean values for p(z|x)
pzxActivationFn.getActivation(output, training);
return output;
}
@Override
public INDArray activate(INDArray input, boolean training) {
setInput(input);
return activate(training);
}
@Override
public INDArray activate() {
return activate(false);
}
@Override
public INDArray activate(INDArray input) {
setInput(input);
return activate();
}
@Override
public Layer transpose() {
throw new UnsupportedOperationException("Not supported " + layerId());
}
@Override
public Layer clone() {
throw new UnsupportedOperationException("Not yet implemented " + layerId());
}
@Override
public Collection<IterationListener> getListeners() {
if (iterationListeners == null)
return null;
return new ArrayList<>(iterationListeners);
}
@Override
public void setListeners(IterationListener... listeners) {
setListeners(Arrays.<IterationListener>asList(listeners));
}
@Override
public void setListeners(Collection<IterationListener> listeners) {
if (iterationListeners == null)
iterationListeners = new ArrayList<>();
else
iterationListeners.clear();
if (trainingListeners == null)
trainingListeners = new ArrayList<>();
else
trainingListeners.clear();
if (listeners != null && !listeners.isEmpty()) {
iterationListeners.addAll(listeners);
for (IterationListener il : listeners) {
if (il instanceof TrainingListener) {
trainingListeners.add((TrainingListener) il);
}
}
}
}
/**
* This method ADDS additional IterationListener to existing listeners
*
* @param listeners
*/
@Override
public void addListeners(IterationListener... listeners) {
if (this.iterationListeners == null) {
setListeners(listeners);
return;
}
Collections.addAll(iterationListeners, listeners);
}
@Override
public void setIndex(int index) {
this.index = index;
}
@Override
public int getIndex() {
return index;
}
@Override
public void setInput(INDArray input) {
this.input = input;
}
@Override
public void migrateInput() {
if(input != null)
input = input.migrate(true);
if(maskArray != null)
maskArray = maskArray.migrate(true);
}
@Override
public void setInputMiniBatchSize(int size) {
}
@Override
public int getInputMiniBatchSize() {
return input.size(0);
}
@Override
public void setMaskArray(INDArray maskArray) {
this.maskArray = maskArray;
}
@Override
public INDArray getMaskArray() {
return maskArray;
}
@Override
public boolean isPretrainLayer() {
return true;
}
@Override
public void clearNoiseWeightParams() {
weightNoiseParams.clear();
}
@Override
public Pair<INDArray, MaskState> feedForwardMaskArray(INDArray maskArray, MaskState currentMaskState,
int minibatchSize) {
throw new UnsupportedOperationException("Not yet implemented " + layerId());
}
@Override
public void fit() {
if (input == null) {
throw new IllegalStateException("Cannot fit layer: layer input is null (not set) " + layerId());
}
if (solver == null) {
try (MemoryWorkspace workspace = Nd4j.getMemoryManager().scopeOutOfWorkspaces()) {
solver = new Solver.Builder().model(this).configure(conf()).listeners(getListeners()).build();
}
}
this.optimizer = solver.getOptimizer();
solver.optimize();
}
/**
* Calculate the reconstruction probability, as described in An & Cho, 2015 - "Variational Autoencoder based
* Anomaly Detection using Reconstruction Probability" (Algorithm 4)<br>
* The authors describe it as follows: "This is essentially the probability of the data being generated from a given
* latent variable drawn from the approximate posterior distribution."<br>
* <br>
* Specifically, for each example x in the input, calculate p(x). Note however that p(x) is a stochastic (Monte-Carlo)
* estimate of the true p(x), based on the specified number of samples. More samples will produce a more accurate
* (lower variance) estimate of the true p(x) for the current model parameters.<br>
* <br>
* Internally uses {@link #reconstructionLogProbability(INDArray, int)} for the actual implementation.
* That method may be more numerically stable in some cases.<br>
* <br>
* The returned array is a column vector of reconstruction probabilities, for each example. Thus, reconstruction probabilities
* can (and should, for efficiency) be calculated in a batched manner.
*
* @param data The data to calculate the reconstruction probability for
* @param numSamples Number of samples with which to base the reconstruction probability on.
* @return Column vector of reconstruction probabilities for each example (shape: [numExamples,1])
*/
public INDArray reconstructionProbability(INDArray data, int numSamples) {
INDArray reconstructionLogProb = reconstructionLogProbability(data, numSamples);
return Transforms.exp(reconstructionLogProb, false);
}
/**
* Return the log reconstruction probability given the specified number of samples.<br>
* See {@link #reconstructionLogProbability(INDArray, int)} for more details
*
* @param data The data to calculate the log reconstruction probability
* @param numSamples Number of samples with which to base the reconstruction probability on.
* @return Column vector of reconstruction log probabilities for each example (shape: [numExamples,1])
*/
public INDArray reconstructionLogProbability(INDArray data, int numSamples) {
if (numSamples <= 0) {
throw new IllegalArgumentException(
"Invalid input: numSamples must be > 0. Got: " + numSamples + " " + layerId());
}
if (reconstructionDistribution instanceof LossFunctionWrapper) {
throw new UnsupportedOperationException("Cannot calculate reconstruction log probability when using "
+ "a LossFunction (via LossFunctionWrapper) instead of a ReconstructionDistribution: ILossFunction "
+ "instances are not in general probabilistic, hence it is not possible to calculate reconstruction probability "
+ layerId());
}
//Forward pass through the encoder and mean for P(Z|X)
setInput(data);
VAEFwdHelper fwd = doForward(true, true);
IActivation afn = layerConf().getActivationFn();
//Forward pass through logStd^2 for P(Z|X)
INDArray pzxLogStd2W = getParamWithNoise(VariationalAutoencoderParamInitializer.PZX_LOGSTD2_W, false);
INDArray pzxLogStd2b = getParamWithNoise(VariationalAutoencoderParamInitializer.PZX_LOGSTD2_B, false);
INDArray meanZ = fwd.pzxMeanPreOut;
INDArray logStdev2Z = fwd.encoderActivations[fwd.encoderActivations.length - 1].mmul(pzxLogStd2W)
.addiRowVector(pzxLogStd2b);
pzxActivationFn.getActivation(meanZ, false);
pzxActivationFn.getActivation(logStdev2Z, false);
INDArray pzxSigma = Transforms.exp(logStdev2Z, false);
Transforms.sqrt(pzxSigma, false);
int minibatch = input.size(0);
int size = fwd.pzxMeanPreOut.size(1);
INDArray pxzw = getParamWithNoise(VariationalAutoencoderParamInitializer.PXZ_W, false);
INDArray pxzb = getParamWithNoise(VariationalAutoencoderParamInitializer.PXZ_B, false);
INDArray[] decoderWeights = new INDArray[decoderLayerSizes.length];
INDArray[] decoderBiases = new INDArray[decoderLayerSizes.length];
for (int i = 0; i < decoderLayerSizes.length; i++) {
String wKey = "d" + i + WEIGHT_KEY_SUFFIX;
String bKey = "d" + i + BIAS_KEY_SUFFIX;
decoderWeights[i] = getParamWithNoise(wKey, false);
decoderBiases[i] = getParamWithNoise(bKey, false);
}
INDArray sumReconstructionNegLogProbability = null;
for (int i = 0; i < numSamples; i++) {
INDArray e = Nd4j.randn(minibatch, size);
INDArray z = e.muli(pzxSigma).addi(meanZ); //z = mu + sigma * e, with e ~ N(0,1)
//Do forward pass through decoder
int nDecoderLayers = decoderLayerSizes.length;
INDArray currentActivations = z;
for (int j = 0; j < nDecoderLayers; j++) {
currentActivations = currentActivations.mmul(decoderWeights[j]).addiRowVector(decoderBiases[j]);
afn.getActivation(currentActivations, false);
}
//And calculate reconstruction distribution preOut
INDArray pxzDistributionPreOut = currentActivations.mmul(pxzw).addiRowVector(pxzb);
if (i == 0) {
sumReconstructionNegLogProbability =
reconstructionDistribution.exampleNegLogProbability(data, pxzDistributionPreOut);
} else {
sumReconstructionNegLogProbability
.addi(reconstructionDistribution.exampleNegLogProbability(data, pxzDistributionPreOut));
}
}
setInput(null);
return sumReconstructionNegLogProbability.divi(-numSamples);
}
/**
* Given a specified values for the latent space as input (latent space being z in p(z|data)), generate output
* from P(x|z), where x = E[P(x|z)]<br>
* i.e., return the mean value for the distribution P(x|z)
*
* @param latentSpaceValues Values for the latent space. size(1) must equal nOut configuration parameter
* @return Sample of data: E[P(x|z)]
*/
public INDArray generateAtMeanGivenZ(INDArray latentSpaceValues) {
INDArray pxzDistributionPreOut = decodeGivenLatentSpaceValues(latentSpaceValues);
return reconstructionDistribution.generateAtMean(pxzDistributionPreOut);
}
/**
* Given a specified values for the latent space as input (latent space being z in p(z|data)), randomly generate output
* x, where x ~ P(x|z)
*
* @param latentSpaceValues Values for the latent space. size(1) must equal nOut configuration parameter
* @return Sample of data: x ~ P(x|z)
*/
public INDArray generateRandomGivenZ(INDArray latentSpaceValues) {
INDArray pxzDistributionPreOut = decodeGivenLatentSpaceValues(latentSpaceValues);
return reconstructionDistribution.generateRandom(pxzDistributionPreOut);
}
private INDArray decodeGivenLatentSpaceValues(INDArray latentSpaceValues) {
if (latentSpaceValues.size(1) != getParamWithNoise(VariationalAutoencoderParamInitializer.PZX_MEAN_W, true).size(1)) {
throw new IllegalArgumentException("Invalid latent space values: expected size "
+ getParamWithNoise(VariationalAutoencoderParamInitializer.PZX_MEAN_W, false).size(1)
+ ", got size (dimension 1) = " + latentSpaceValues.size(1) + " " + layerId());
}
//Do forward pass through decoder
int nDecoderLayers = decoderLayerSizes.length;
INDArray currentActivations = latentSpaceValues;
IActivation afn = layerConf().getActivationFn();
for (int i = 0; i < nDecoderLayers; i++) {
String wKey = "d" + i + WEIGHT_KEY_SUFFIX;
String bKey = "d" + i + BIAS_KEY_SUFFIX;
INDArray w = getParamWithNoise(wKey, false);
INDArray b = getParamWithNoise(bKey, false);
currentActivations = currentActivations.mmul(w).addiRowVector(b);
afn.getActivation(currentActivations, false);
}
INDArray pxzw = getParamWithNoise(VariationalAutoencoderParamInitializer.PXZ_W, false);
INDArray pxzb = getParamWithNoise(VariationalAutoencoderParamInitializer.PXZ_B, false);
return currentActivations.mmul(pxzw).addiRowVector(pxzb);
}
/**
* Does the reconstruction distribution have a loss function (such as mean squared error) or is it a standard
* probabilistic reconstruction distribution?
*/
public boolean hasLossFunction() {
return reconstructionDistribution.hasLossFunction();
}
/**
* Return the reconstruction error for this variational autoencoder.<br>
* <b>NOTE (important):</b> This method is used ONLY for VAEs that have a standard neural network loss function (i.e.,
* an {@link org.nd4j.linalg.lossfunctions.ILossFunction} instance such as mean squared error) instead of using a
* probabilistic reconstruction distribution P(x|z) for the reconstructions (as presented in the VAE architecture by
* Kingma and Welling).<br>
* You can check if the VAE has a loss function using {@link #hasLossFunction()}<br>
* Consequently, the reconstruction error is a simple deterministic function (no Monte-Carlo sampling is required,
* unlike {@link #reconstructionProbability(INDArray, int)} and {@link #reconstructionLogProbability(INDArray, int)})
*
* @param data The data to calculate the reconstruction error on
* @return Column vector of reconstruction errors for each example (shape: [numExamples,1])
*/
public INDArray reconstructionError(INDArray data) {
if (!hasLossFunction()) {
throw new IllegalStateException(
"Cannot use reconstructionError method unless the variational autoencoder is "
+ "configured with a standard loss function (via LossFunctionWrapper). For VAEs utilizing a reconstruction "
+ "distribution, use the reconstructionProbability or reconstructionLogProbability methods "
+ layerId());
}
INDArray pZXMean = activate(data, false);
INDArray reconstruction = generateAtMeanGivenZ(pZXMean); //Not probabilistic -> "mean" == output
if (reconstructionDistribution instanceof CompositeReconstructionDistribution) {
CompositeReconstructionDistribution c = (CompositeReconstructionDistribution) reconstructionDistribution;
return c.computeLossFunctionScoreArray(data, reconstruction);
} else {
LossFunctionWrapper lfw = (LossFunctionWrapper) reconstructionDistribution;
ILossFunction lossFunction = lfw.getLossFunction();
//Re: the activation identity here - the reconstruction array already has the activation function applied,
// so we don't want to apply it again. i.e., we are passing the output, not the pre-output.
return lossFunction.computeScoreArray(data, reconstruction, new ActivationIdentity(), null);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.