code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
/*
* JBoss, Home of Professional Open Source.
* Copyright 2015 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.security.sasl.util;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Map;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.sasl.SaslException;
import javax.security.sasl.SaslServer;
import javax.security.sasl.SaslServerFactory;
;
import org.wildfly.common.Assert;
import org.wildfly.security.auth.callback.ServerCredentialCallback;
import org.wildfly.security.credential.Credential;
/**
* A {@link SaslServerFactory} which sets the server's credential.
*
* @author <a href="mailto:fjuma@redhat.com">Farah Juma</a>
*/
public final class CredentialSaslServerFactory extends AbstractDelegatingSaslServerFactory {
private final Credential credential;
/**
* Construct a new instance.
*
* @param delegate the delegate SASL server factory
* @param credential the server credential to use
*/
public CredentialSaslServerFactory(final SaslServerFactory delegate, final Credential credential) {
super(delegate);
Assert.checkNotNullParam("credential", credential);
this.credential = credential;
}
public SaslServer createSaslServer(final String mechanism, final String protocol, final String serverName, final Map<String, ?> props, final CallbackHandler cbh) throws SaslException {
return delegate.createSaslServer(mechanism, protocol, serverName, props, callbacks -> {
ArrayList<Callback> list = new ArrayList<>(Arrays.asList(callbacks));
final Iterator<Callback> iterator = list.iterator();
while (iterator.hasNext()) {
Callback callback = iterator.next();
if (callback instanceof ServerCredentialCallback) {
final ServerCredentialCallback credentialCallback = (ServerCredentialCallback) callback;
if (credentialCallback.isCredentialSupported(credential)) {
credentialCallback.setCredential(credential);
iterator.remove();
}
}
}
if (!list.isEmpty()) {
cbh.handle(list.toArray(new Callback[list.size()]));
}
});
}
}
| sguilhen/wildfly-elytron | src/main/java/org/wildfly/security/sasl/util/CredentialSaslServerFactory.java | Java | apache-2.0 | 2,996 |
package com.board;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.List;
public class BoardDAO {
private Connection conn;
public BoardDAO(Connection conn){
this.conn = conn;
}
//1. num์ ์ต๋๊ฐ
public int getMaxNum(){
int maxNum = 0;
PreparedStatement pstmt = null;
ResultSet rs = null;
String sql;
try {
sql = "select nvl(max(num),0) from board";
pstmt = conn.prepareStatement(sql);
rs = pstmt.executeQuery();
if(rs.next()){
maxNum = rs.getInt(1);
}
rs.close();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return maxNum;
}
// ์
๋ ฅ(created.jsp -> created_ok.jsp)
public int insertData(BoardForm dto){
int result = 0;
/*
PreparedStatement pstmt = null;
StringBuffer sql = new StringBuffer();
*/
PreparedStatement pstmt = null;
String sql;
try {
/*
sql.append("insert into board");
sql.append("(num, name, pwd, email, subject, content,");
*/
sql = "insert into board" +
"(num, name, pwd, email, subject, content," +
"ipAddr, hitCount, created) " +
"values(?, ?, ?, ?, ?, ?, ?, 0, sysdate)";
pstmt = conn.prepareStatement(sql);
pstmt.setInt(1, dto.getNum());
pstmt.setString(2, dto.getName());
pstmt.setString(3, dto.getPwd());
pstmt.setString(4, dto.getEmail());
pstmt.setString(5, dto.getSubject());
pstmt.setString(6, dto.getContent());
pstmt.setString(7, dto.getIpAddr());
result = pstmt.executeUpdate();
pstmt.close();
} catch (Exception e) {
System.out.println("# insertData");
System.out.println(e.toString());
}
return result;
}
// ์ ์ฒด๋ฐ์ดํฐ ๊ฐ์ง๊ณ ์ฌ๊ฑฐ์ผ
public List<BoardForm> getList(int start, int end){
List<BoardForm> lists = new ArrayList<BoardForm>();
PreparedStatement pstmt = null;
ResultSet rs = null;
String sql;
try {
sql = "select * from (";
sql += "select rownum rnum,data.* " +
" from (select num,name,subject,hitCount," +
" to_char(created, 'YYYY-MM-DD') created" +
" from board order by num desc) data )" +
" where rnum >= ? and rnum <= ? ";
pstmt = conn.prepareStatement(sql);
pstmt.setInt(1, start);
pstmt.setInt(2, end);
rs = pstmt.executeQuery();
while(rs.next()){
BoardForm dto = new BoardForm();
dto.setNum(rs.getInt(2));
dto.setName(rs.getString(3));
dto.setSubject(rs.getString(4));
dto.setHitCount(rs.getInt(5));
dto.setCreated(rs.getString(6));
lists.add(dto);
}
rs.close();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return lists;
}
//์ ์ฒด ๋ฐ์ดํฐ์ ๊ตฌํ๊ธฐ
public int getDataCount(){
int result= 0;
PreparedStatement pstmt = null;
ResultSet rs = null;
String sql;
try {
sql = "select nvl(count(*),0) from board";
pstmt = conn.prepareStatement(sql);
rs = pstmt.executeQuery();
if(rs.next()){
result = rs.getInt(1);
}
rs.close();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return result;
}
//์กฐํ์์ฆ๊ฐ
public int updateHitCount(int num){
int result = 0;
PreparedStatement pstmt = null;
String sql;
try {
sql = "update board set hitCount=hitCount+1 where num=?" ;
pstmt = conn.prepareStatement(sql);
pstmt.setInt(1, num);
result = pstmt.executeUpdate();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return result;
}
// ํ๋ช
์ ๋ฐ์ดํฐ ์ถ๋ ฅ
public BoardForm getReadData(int num){
BoardForm dto = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
String sql;
try {
// ์ ๋ชฉ, ์์ฑ์, ์ค์, ๋ฑ๋ก์ผ, ์กฐํ์, ๋ด์ฉ, ip์ฃผ์
sql = "select num, name, pwd, email, subject, content, ipaddr, created, hitCount " +
"from board where num=?";
pstmt = conn.prepareStatement(sql);
pstmt.setInt(1, num);
rs = pstmt.executeQuery();
if(rs.next()){
dto = new BoardForm();
dto.setNum(rs.getInt("num"));
dto.setName(rs.getString("name"));
dto.setPwd(rs.getString("pwd"));
dto.setEmail(rs.getString("email"));
dto.setSubject(rs.getString("subject"));
dto.setContent(rs.getString("content"));
dto.setIpAddr(rs.getString("ipAddr"));
dto.setHitCount(rs.getInt("hitCount"));
dto.setCreated(rs.getString("created"));
}
rs.close();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return dto;
}
// ์ญ์
public int deleteData(int num){
int result = 0;
PreparedStatement pstmt = null;
String sql;
try {
sql = "delete board where num=?";
pstmt = conn.prepareStatement(sql);
pstmt.setInt(1, num);
result = pstmt.executeUpdate();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return result;
}
// ์์
public int updateData(BoardForm dto){
int result = 0;
PreparedStatement pstmt = null;
String sql;
try {
sql = "update board set name=?, pwd=?, subject=?, content=?, email=? where num=? ";
pstmt = conn.prepareStatement(sql);
pstmt.setString(1, dto.getName());
pstmt.setString(2, dto.getPwd());
pstmt.setString(3, dto.getSubject());
pstmt.setString(4, dto.getContent());
pstmt.setString(5, dto.getEmail());
pstmt.setInt(6, dto.getNum());
result = pstmt.executeUpdate();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return result;
}
//๊ฒ์๋ ๋ฐ์ดํฐ์ ๊ตฌํ๊ธฐ
public int getDataCount(String searchKey, String searchValue){
int result= 0;
PreparedStatement pstmt = null;
ResultSet rs = null;
String sql;
try {
searchValue = "%" + searchValue + "%";
sql = "select nvl(count(*),0) from board where "+searchKey + " like ?";
pstmt = conn.prepareStatement(sql);
pstmt.setString(1, searchValue);
rs = pstmt.executeQuery();
if(rs.next()){
result = rs.getInt(1);
}
rs.close();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return result;
}
// ๊ฒ์๋ฐ์ดํฐ ๊ฐ์ง๊ณ ์ฌ๊ฑฐ์ผ
public List<BoardForm> getList(int start, int end, String searchKey, String searchValue){
List<BoardForm> lists = new ArrayList<BoardForm>();
PreparedStatement pstmt = null;
ResultSet rs = null;
String sql;
try {
searchValue = "%" + searchValue + "%";
sql = "select * from (";
sql += "select rownum rnum,data.* " +
" from (select num,name,subject,hitCount," +
" to_char(created, 'YYYY-MM-DD') created" +
" from board where "+searchKey + " like ? order by num desc) data )" +
" where rnum >= ? and rnum <= ? ";
pstmt = conn.prepareStatement(sql);
pstmt.setString(1, searchValue);
pstmt.setInt(2, start);
pstmt.setInt(3, end);
rs = pstmt.executeQuery();
while(rs.next()){
BoardForm dto = new BoardForm();
dto.setNum(rs.getInt(2));
dto.setName(rs.getString(3));
dto.setSubject(rs.getString(4));
dto.setHitCount(rs.getInt(5));
dto.setCreated(rs.getString(6));
lists.add(dto);
}
rs.close();
pstmt.close();
} catch (Exception e) {
System.out.println(e.toString());
}
return lists;
}
}
/////////////////
| xerato/vk-study | src/com/board/BoardDAO.java | Java | apache-2.0 | 7,784 |
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.antennaesdk.common.messages;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
/**
* <code>ServerRestMessage</code> carries a REST api call to the mobile-broker.
* Broker executes this call, and returns the result via <code>ClientMessage</code>.
*
* @see ClientMessage
*/
public class ServerRestMessage {
// from where the message originates.
// it can be from a user or from a server (bot)
private ClientAddress from;
// rest resource path such as "/api/books"
// another example would be "/api/books?id=383763"
// another example would be "/api/books/383763"
private String path;
// represents the "protocol//host:port" such as "https://toys.company.com:8443"
// or port can be optional such as "https://toys.company.com"
private String host;
// represents REST method such as "GET", "POST", "PUT", "DELETE" etc
// TODO: use an enum instead of string
private String method;
// actual message ( this the payload if its POST/PUT call )
// this is optional
private String payLoad;
// the headers for a REST message
private Map<String, String> headers = new HashMap<>();
// The name/value pairs of multipart entities. Implies a multipart request.
private Map<String, String> multipartEntities;
// unique identified to track the request on the client side.
private String requestId;
// TODO: use TypeAdapterFactory instead of passing the type.
private String classType = ServerRestMessage.class.getName();
// getters and setters
public ServerRestMessage(){
requestId = UUID.randomUUID().toString();
}
public ServerRestMessage( String requestId ){
this.requestId = requestId;
}
public ClientAddress getFrom() {
return from;
}
public void setFrom(ClientAddress from) {
this.from = from;
}
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public String getMethod() {
return method;
}
public void setMethod(String method) {
this.method = method;
}
public String getPayLoad() {
return payLoad;
}
public void setPayLoad(String payLoad) {
this.payLoad = payLoad;
}
public Map<String, String> getHeaders() { return headers; }
public void setHeaders(Map<String, String> headers) { this.headers = headers; }
public void setMultipartEntities(Map<String, String> multipartEntities) { this.multipartEntities = multipartEntities; }
public Map<String, String> getMultipartEntities() { return multipartEntities; }
public String getRequestId() {
return requestId;
}
public void setRequestId(String requestId) {
this.requestId = requestId;
}
// utility methods
public String toJson(){
Gson gson = new Gson();
String json = gson.toJson(this);
return json;
}
public String toJsonPretty(){
Gson gson = new GsonBuilder().setPrettyPrinting().create();
String json = gson.toJson(this);
return json;
}
public static ServerRestMessage fromJson(String json ){
Gson gson = new Gson();
ServerRestMessage result = gson.fromJson( json, ServerRestMessage.class);
return result;
}
}
| AntennaeSDK/MMS | client-api/src/main/java/com/github/antennaesdk/common/messages/ServerRestMessage.java | Java | apache-2.0 | 4,171 |
package io.quarkus.it.spring.data.jpa;
import java.io.Serializable;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.repository.NoRepositoryBean;
@NoRepositoryBean
public interface IntermediateRepository<T, ID extends Serializable> extends JpaRepository<T, ID> {
default public void doNothing() {
}
default public T findMandatoryById(ID id) {
return findById(id).orElseThrow(() -> new IllegalStateException("not found: " + id));
}
}
| quarkusio/quarkus | integration-tests/spring-data-jpa/src/main/java/io/quarkus/it/spring/data/jpa/IntermediateRepository.java | Java | apache-2.0 | 508 |
/*
* Copyright 2016 The Error Prone Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone.bugpatterns.threadsafety;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.errorprone.VisitorState;
import com.google.errorprone.annotations.CheckReturnValue;
import com.google.errorprone.annotations.Immutable;
import com.google.errorprone.annotations.ImmutableTypeParameter;
import com.google.errorprone.annotations.concurrent.LazyInit;
import com.google.errorprone.bugpatterns.BugChecker;
import com.google.errorprone.bugpatterns.threadsafety.ThreadSafety.Purpose;
import com.google.errorprone.bugpatterns.threadsafety.ThreadSafety.Violation;
import com.google.errorprone.fixes.SuggestedFix;
import com.google.errorprone.fixes.SuggestedFixes;
import com.google.errorprone.matchers.Description;
import com.google.errorprone.util.ASTHelpers;
import com.sun.source.tree.ClassTree;
import com.sun.source.tree.Tree;
import com.sun.tools.javac.code.Symbol;
import com.sun.tools.javac.code.Symbol.ClassSymbol;
import com.sun.tools.javac.code.Symbol.TypeVariableSymbol;
import com.sun.tools.javac.code.Symbol.VarSymbol;
import com.sun.tools.javac.code.Type;
import com.sun.tools.javac.code.Type.ClassType;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.Modifier;
import javax.lang.model.type.TypeKind;
/** Analyzes types for deep immutability. */
public class ImmutableAnalysis {
private final BugChecker bugChecker;
private final VisitorState state;
private final WellKnownMutability wellKnownMutability;
private final ThreadSafety threadSafety;
ImmutableAnalysis(
BugChecker bugChecker,
VisitorState state,
WellKnownMutability wellKnownMutability,
ImmutableSet<String> immutableAnnotations) {
this.bugChecker = bugChecker;
this.state = state;
this.wellKnownMutability = wellKnownMutability;
this.threadSafety =
ThreadSafety.builder()
.setPurpose(Purpose.FOR_IMMUTABLE_CHECKER)
.knownTypes(wellKnownMutability)
.markerAnnotations(immutableAnnotations)
.typeParameterAnnotation(ImmutableTypeParameter.class)
.build(state);
}
public ImmutableAnalysis(
BugChecker bugChecker, VisitorState state, WellKnownMutability wellKnownMutability) {
this(bugChecker, state, wellKnownMutability, ImmutableSet.of(Immutable.class.getName()));
}
Violation isThreadSafeType(
boolean allowContainerTypeParameters, Set<String> containerTypeParameters, Type type) {
return threadSafety.isThreadSafeType(
allowContainerTypeParameters, containerTypeParameters, type);
}
boolean hasThreadSafeTypeParameterAnnotation(TypeVariableSymbol sym) {
return threadSafety.hasThreadSafeTypeParameterAnnotation(sym);
}
Violation checkInstantiation(
Collection<TypeVariableSymbol> classTypeParameters, Collection<Type> typeArguments) {
return threadSafety.checkInstantiation(classTypeParameters, typeArguments);
}
public Violation checkInvocation(Type methodType, Symbol symbol) {
return threadSafety.checkInvocation(methodType, symbol);
}
/** Accepts {@link Violation violations} that are found during the analysis. */
@FunctionalInterface
public interface ViolationReporter {
Description.Builder describe(Tree tree, Violation info);
@CheckReturnValue
default Description report(Tree tree, Violation info, Optional<SuggestedFix> suggestedFix) {
Description.Builder description = describe(tree, info);
suggestedFix.ifPresent(description::addFix);
return description.build();
}
}
/**
* Check that an {@code @Immutable}-annotated class:
*
* <ul>
* <li>does not declare or inherit any mutable fields,
* <li>any immutable supertypes are instantiated with immutable type arguments as required by
* their containerOf spec, and
* <li>any enclosing instances are immutable.
* </ul>
*
* requiring supertypes to be annotated immutable would be too restrictive.
*/
public Violation checkForImmutability(
Optional<ClassTree> tree,
ImmutableSet<String> immutableTyParams,
ClassType type,
ViolationReporter reporter) {
Violation info = areFieldsImmutable(tree, immutableTyParams, type, reporter);
if (info.isPresent()) {
return info;
}
for (Type interfaceType : state.getTypes().interfaces(type)) {
AnnotationInfo interfaceAnnotation = getImmutableAnnotation(interfaceType.tsym, state);
if (interfaceAnnotation == null) {
continue;
}
info =
threadSafety.checkSuperInstantiation(
immutableTyParams, interfaceAnnotation, interfaceType);
if (info.isPresent()) {
return info.plus(
String.format(
"'%s' extends '%s'",
threadSafety.getPrettyName(type.tsym),
threadSafety.getPrettyName(interfaceType.tsym)));
}
}
if (!type.asElement().isEnum()) {
// don't check enum super types here to avoid double-reporting errors
info = checkSuper(immutableTyParams, type);
if (info.isPresent()) {
return info;
}
}
Type mutableEnclosing = threadSafety.mutableEnclosingInstance(tree, type);
if (mutableEnclosing != null) {
return info.plus(
String.format(
"'%s' has mutable enclosing instance '%s'",
threadSafety.getPrettyName(type.tsym), mutableEnclosing));
}
return Violation.absent();
}
private Violation checkSuper(ImmutableSet<String> immutableTyParams, ClassType type) {
ClassType superType = (ClassType) state.getTypes().supertype(type);
if (superType.getKind() == TypeKind.NONE
|| state.getTypes().isSameType(state.getSymtab().objectType, superType)) {
return Violation.absent();
}
if (WellKnownMutability.isAnnotation(state, type)) {
// TODO(b/25630189): add enforcement
return Violation.absent();
}
AnnotationInfo superannotation = getImmutableAnnotation(superType.tsym, state);
String message =
String.format(
"'%s' extends '%s'",
threadSafety.getPrettyName(type.tsym), threadSafety.getPrettyName(superType.tsym));
if (superannotation != null) {
// If the superclass does happen to be immutable, we don't need to recursively
// inspect it. We just have to check that it's instantiated correctly:
Violation info =
threadSafety.checkSuperInstantiation(immutableTyParams, superannotation, superType);
if (!info.isPresent()) {
return Violation.absent();
}
return info.plus(message);
}
// Recursive case: check if the supertype is 'effectively' immutable.
Violation info =
checkForImmutability(
Optional.<ClassTree>empty(),
immutableTyParams,
superType,
new ViolationReporter() {
@Override
public Description.Builder describe(Tree tree, Violation info) {
return bugChecker
.buildDescription(tree)
.setMessage(info.plus(info.message()).message());
}
});
if (!info.isPresent()) {
return Violation.absent();
}
return info.plus(message);
}
/**
* Check a single class' fields for immutability.
*
* @param immutableTyParams the in-scope immutable type parameters
* @param classType the type to check the fields of
*/
Violation areFieldsImmutable(
Optional<ClassTree> tree,
ImmutableSet<String> immutableTyParams,
ClassType classType,
ViolationReporter reporter) {
ClassSymbol classSym = (ClassSymbol) classType.tsym;
if (classSym.members() == null) {
return Violation.absent();
}
Predicate<Symbol> instanceFieldFilter =
symbol -> symbol.getKind() == ElementKind.FIELD && !symbol.isStatic();
Map<Symbol, Tree> declarations = new HashMap<>();
if (tree.isPresent()) {
for (Tree member : tree.get().getMembers()) {
Symbol sym = ASTHelpers.getSymbol(member);
if (sym != null) {
declarations.put(sym, member);
}
}
}
// javac gives us members in reverse declaration order
// handling them in declaration order leads to marginally better diagnostics
List<Symbol> members =
ImmutableList.copyOf(ASTHelpers.scope(classSym.members()).getSymbols(instanceFieldFilter))
.reverse();
for (Symbol member : members) {
Optional<Tree> memberTree = Optional.ofNullable(declarations.get(member));
Violation info =
isFieldImmutable(
memberTree, immutableTyParams, classSym, classType, (VarSymbol) member, reporter);
if (info.isPresent()) {
return info;
}
}
return Violation.absent();
}
/** Check a single field for immutability. */
private Violation isFieldImmutable(
Optional<Tree> tree,
ImmutableSet<String> immutableTyParams,
ClassSymbol classSym,
ClassType classType,
VarSymbol var,
ViolationReporter reporter) {
if (bugChecker.isSuppressed(var)) {
return Violation.absent();
}
if (!var.getModifiers().contains(Modifier.FINAL)
&& !ASTHelpers.hasAnnotation(var, LazyInit.class, state)) {
Violation info =
Violation.of(
String.format(
"'%s' has non-final field '%s'",
threadSafety.getPrettyName(classSym), var.getSimpleName()));
if (tree.isPresent()) {
// If we have a tree to attach diagnostics to, report the error immediately instead of
// accumulating the path to the error from the top-level class being checked
state.reportMatch(
reporter.report(
tree.get(), info, SuggestedFixes.addModifiers(tree.get(), state, Modifier.FINAL)));
return Violation.absent();
}
return info;
}
Type varType = state.getTypes().memberType(classType, var);
Violation info =
threadSafety.isThreadSafeType(
/* allowContainerTypeParameters= */ true, immutableTyParams, varType);
if (info.isPresent()) {
info =
info.plus(
String.format(
"'%s' has field '%s' of type '%s'",
threadSafety.getPrettyName(classSym), var.getSimpleName(), varType));
if (tree.isPresent()) {
// If we have a tree to attach diagnostics to, report the error immediately instead of
// accumulating the path to the error from the top-level class being checked
state.reportMatch(reporter.report(tree.get(), info, Optional.empty()));
return Violation.absent();
}
return info;
}
return Violation.absent();
}
/**
* Gets the {@link Symbol}'s {@code @Immutable} annotation info, either from an annotation on the
* symbol or from the list of well-known immutable types.
*/
AnnotationInfo getImmutableAnnotation(Symbol sym, VisitorState state) {
String nameStr = sym.flatName().toString();
AnnotationInfo known = wellKnownMutability.getKnownImmutableClasses().get(nameStr);
if (known != null) {
return known;
}
return threadSafety.getInheritedAnnotation(sym, state);
}
/**
* Gets the {@link Tree}'s {@code @Immutable} annotation info, either from an annotation on the
* symbol or from the list of well-known immutable types.
*/
AnnotationInfo getImmutableAnnotation(Tree tree, VisitorState state) {
Symbol sym = ASTHelpers.getSymbol(tree);
return sym == null ? null : threadSafety.getMarkerOrAcceptedAnnotation(sym, state);
}
}
| cushon/error-prone | core/src/main/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableAnalysis.java | Java | apache-2.0 | 12,491 |
package com.planet_ink.coffee_mud.Abilities.Songs;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2014 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Play_Organs extends Play_Instrument
{
@Override public String ID() { return "Play_Organs"; }
private final static String localizedName = CMLib.lang()._("Organs");
@Override public String name() { return localizedName; }
@Override protected int requiredInstrumentType(){return MusicalInstrument.TYPE_ORGANS;}
@Override public String mimicSpell(){return "Prayer_ProtectHealth";}
@Override protected int canAffectCode(){return 0;}
private static Ability theSpell=null;
@Override
protected Ability getSpell()
{
if(theSpell!=null) return theSpell;
if(mimicSpell().length()==0) return null;
theSpell=CMClass.getAbility(mimicSpell());
return theSpell;
}
}
| vjanmey/EpicMudfia | com/planet_ink/coffee_mud/Abilities/Songs/Play_Organs.java | Java | apache-2.0 | 2,094 |
// Copyright 2016 Pikkpoiss
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package gamejam
type SceneID int
type Scene interface {
AddComponent(c Component)
Load(r Resources) (err error)
Unload(r Resources) (err error)
Render()
Update(mgr SceneManager)
SetSceneID(id SceneID)
SceneID() SceneID
}
type BaseScene struct {
components map[ComponentID]Component
id SceneID
}
func NewBaseScene() *BaseScene {
return &BaseScene{
components: map[ComponentID]Component{},
}
}
func (s *BaseScene) AddComponent(c Component) {
c.SetScene(s)
s.components[c.GetID()] = c
}
func (s *BaseScene) Load(r Resources) (err error) {
return
}
func (s *BaseScene) Render() {
}
func (s *BaseScene) SetSceneID(id SceneID) {
s.id = id
}
func (s *BaseScene) SceneID() SceneID {
return s.id
}
func (s *BaseScene) Unload(r Resources) (err error) {
var (
id ComponentID
c Component
)
for id, c = range s.components {
s.components[id] = nil
c.Delete()
}
//s.DeleteObservers()
return
}
func (s *BaseScene) Update(mgr SceneManager) {
}
| pikkpoiss/gamejam | v1/gamejam/scene.go | GO | apache-2.0 | 1,558 |
package com.mygame;
import java.util.Vector;
import loon.geom.RectBox;
public class StepSwitch extends Switch
{
StepSwitch(int x, int y, Vector<Thing> blocks)
{
this.x = x;
this.y = y;
this.blocks = blocks;
orgblocks = new Vector<Thing>();
for(int i = 0; i < blocks.size(); i++)
orgblocks.add((Thing)blocks.get(i));
height = 50;
width = 50;
active = false;
col = new RectBox(x, y, width, height);
}
public void update(Player player, Vector<Thing> things)
{
boolean b = false;
for(int i = 0; i < things.size(); i++)
if(col.intersects((int)((Thing)things.get(i)).x, (int)((Thing)things.get(i)).y, ((Thing)things.get(i)).width, ((Thing)things.get(i)).height) && !b)
{
b = true;
active = true;
blocks.clear();
}
if(col.intersects((int)player.x, (int)player.y, player.width, player.height))
{
b = true;
active = true;
blocks.clear();
}
if(!b)
{
active = false;
if(blocks.isEmpty())
{
for(int i = 0; i < orgblocks.size(); i++)
blocks.add((Thing)orgblocks.get(i));
}
}
}
}
| cping/LGame | Java/Examples/arpggame(0.5)/src/com/mygame/StepSwitch.java | Java | apache-2.0 | 1,359 |
/*
* DateAxisBuilder.java
*
* Created on March 17, 2007, 10:17 PM
*
* To change this template, choose Tools | Template Manager
* and open the template in the editor.
*/
package com.thecoderscorner.groovychart.axis;
import java.beans.IntrospectionException;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.thecoderscorner.groovychart.chart.Buildable;
import com.thecoderscorner.groovychart.chart.ChartBuilder;
import com.thecoderscorner.groovychart.chart.BeanBuilder;
import org.jfree.chart.axis.NumberAxis;
/**
*
* @author jclarke
*/
public class NumberAxisBuilder extends BeanBuilder implements Buildable{
private static final Logger logger = Logger.getLogger(NumberAxisBuilder.class.getPackage().getName());
private NumberAxis axis = new NumberAxis();
private boolean domain;
/**
* Creates a new instance of DateAxisBuilder
*/
public NumberAxisBuilder() {
try {
setBeanClass(NumberAxis.class);
} catch (IntrospectionException ex) {
logger.log(Level.WARNING, ex.getMessage(), ex);
}
}
public void setChartBuilder(ChartBuilder chartBuilder) {
}
public void processNode(Object name, Map map, Object value) throws Exception {
String method = name.toString();
if(value != null) {
this.axis = (NumberAxis)value;
}else {
if(logger.isLoggable(Level.FINEST))
logger.finest("processNode: method = " + method);
if(method.equalsIgnoreCase("NumberAxis")) {
this.setProperties(this.axis, map);
}
}
}
private Object parent;
public Object getParent() {
return parent;
}
public void setParent(Object parent) {
this.parent = parent;
}
public void nodeCompleted(Object parent) {
if(parent != null && parent instanceof AxisSettable) {
logger.finest("Setting axis on parent");
((AxisSettable)parent).setAxis(this.axis);
}
}
private String name;
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public NumberAxis getAxis() {
return axis;
}
}
| davetcc/groovychart | src/main/java/com/thecoderscorner/groovychart/axis/NumberAxisBuilder.java | Java | apache-2.0 | 2,310 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.servicecatalog.model.transform;
import java.util.Map;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.servicecatalog.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* UpdateServiceActionRequestMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class UpdateServiceActionRequestMarshaller {
private static final MarshallingInfo<String> ID_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("Id").build();
private static final MarshallingInfo<String> NAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("Name").build();
private static final MarshallingInfo<Map> DEFINITION_BINDING = MarshallingInfo.builder(MarshallingType.MAP).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("Definition").build();
private static final MarshallingInfo<String> DESCRIPTION_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("Description").build();
private static final MarshallingInfo<String> ACCEPTLANGUAGE_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("AcceptLanguage").build();
private static final UpdateServiceActionRequestMarshaller instance = new UpdateServiceActionRequestMarshaller();
public static UpdateServiceActionRequestMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(UpdateServiceActionRequest updateServiceActionRequest, ProtocolMarshaller protocolMarshaller) {
if (updateServiceActionRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(updateServiceActionRequest.getId(), ID_BINDING);
protocolMarshaller.marshall(updateServiceActionRequest.getName(), NAME_BINDING);
protocolMarshaller.marshall(updateServiceActionRequest.getDefinition(), DEFINITION_BINDING);
protocolMarshaller.marshall(updateServiceActionRequest.getDescription(), DESCRIPTION_BINDING);
protocolMarshaller.marshall(updateServiceActionRequest.getAcceptLanguage(), ACCEPTLANGUAGE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| aws/aws-sdk-java | aws-java-sdk-servicecatalog/src/main/java/com/amazonaws/services/servicecatalog/model/transform/UpdateServiceActionRequestMarshaller.java | Java | apache-2.0 | 3,346 |
$(document).ready(function(){
$('#datepickerDay1').hide();
$('#datepickerDay2').hide();
$('#datepickerMnd1').hide();
$('#datepickerMnd2').hide();
$('#datepickerYear1').hide();
$('#datepickerYear2').hide();
$('select').change(function(){
var index = $('select').val();
if(index == 0){ //day
$('#datepickerDay1').show();
$('#datepickerDay2').show();
$('#datepickerMnd1').hide();
$('#datepickerMnd2').hide();
$('#datepickerYear1').hide();
$('#datepickerYear2').hide();
localStorage.setItem('type','0');
}
else if(index == 1){ //month
$('#datepickerMnd1').show();
$('#datepickerMnd2').show();
$('#datepickerDay1').hide();
$('#datepickerDay2').hide();
$('#datepickerYear1').hide();
$('#datepickerYear2').hide();
localStorage.setItem('type','1');
}
else if(index == 2){ //year
$('#datepickerYear1').show();
$('#datepickerYear2').show();
$('#datepickerDay1').hide();
$('#datepickerDay2').hide();
$('#datepickerMnd1').hide();
$('#datepickerMnd2').hide();
localStorage.setItem('type','2');
}
});
$('#datepickerDay1').datepicker({
format: "yyyy-mm-dd",
weekStart: 1,
language: "no",
todayHighlight: true
});
$('#datepickerDay1').on('changeDate', function(ev){
$(this).datepicker('hide');
day1();
if ($('input[name=date2]').val() != "") {
showChart1();
showChart2();
showChart3();
showChart4();
showChart5();
showChart6();
}
});
$('#datepickerDay2').datepicker({
format: "yyyy-mm-dd",
weekStart: 1,
language: "no",
todayHighlight: true
});
$('#datepickerDay2').on('changeDate', function(ev){
$(this).datepicker('hide');
day2();
if ($('input[name=date]').val() != "") {
showChart1();
showChart2();
showChart3();
showChart4();
showChart5();
showChart6();
}
});
$('#datepickerYear1').datepicker({
format: "yyyy",
weekStart: 1,
startView: 1,
minViewMode: 2,
language: "no",
todayHighlight: true
});
$('#datepickerYear1').on('changeDate', function(ev){
$(this).datepicker('hide');
year1();
if ($('input[name=date6]').val() != "") {
showChart1();
showChart2();
showChart3();
showChart4();
showChart5();
showChart6();
}
});
$('#datepickerYear2').datepicker({
format: "yyyy",
weekStart: 1,
startView: 1,
minViewMode: 2,
language: "no",
todayHighlight: true
});
$('#datepickerYear2').on('changeDate', function(ev){
$(this).datepicker('hide');
year2();
if ($('input[name=date5]').val() != "") {
showChart1();
showChart2();
showChart3();
showChart4();
showChart5();
showChart6();
}
});
$('#datepickerMnd1').datepicker({
format: "yyyy-mm",
weekStart: 1,
startView: 0,
minViewMode: 1,
language: "no",
todayHighlight: true
});
$('#datepickerMnd1').on('changeDate', function(ev){
$(this).datepicker('hide');
mnd1();
if ($('input[name=date4]').val() != "") {
showChart1();
showChart2();
showChart3();
showChart4();
showChart5();
showChart6();
}
});
$('#datepickerMnd2').datepicker({
format: "yyyy-mm",
weekStart: 1,
startView: 0,
minViewMode: 1,
language: "no",
todayHighlight: true
});
$('#datepickerMnd2').on('changeDate', function(ev){
$(this).datepicker('hide');
mnd2();
if ($('input[name=date3]').val() != "") {
showChart1();
showChart2();
showChart3();
showChart4();
showChart5();
showChart6();
}
});
$('#backBtn').on('click', function(ev){
window.location.replace("../pages/stats.php");
});
function day1(){
var day = $('input[name=date]').val();
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = function(){
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
document.getElementById("firstDate").innerHTML = xmlhttp.responseText;
}
}
xmlhttp.open("GET", "../phpBackend/OrgStat/day.php?date=" + day, true);
xmlhttp.send();
}
function day2(){
var day = $('input[name=date2]').val();
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = function(){
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
document.getElementById("lastDate").innerHTML = xmlhttp.responseText;
}
}
xmlhttp.open("GET", "../phpBackend/OrgStat/day.php?date=" + day, true);
xmlhttp.send();
}
function mnd1(){
var day = $('input[name=date3]').val() + '-01';
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = function(){
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
document.getElementById("firstDate").innerHTML = xmlhttp.responseText;
}
}
xmlhttp.open("GET", "../phpBackend/OrgStat/month.php?date=" + day, true);
xmlhttp.send();
}
function mnd2(){
var day = $('input[name=date4]').val() + '-01';
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = function(){
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
document.getElementById("lastDate").innerHTML = xmlhttp.responseText;
}
}
xmlhttp.open("GET", "../phpBackend/OrgStat/month.php?date=" + day, true);
xmlhttp.send();
}
function year1(){
var day = $('input[name=date5]').val();
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = function(){
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
document.getElementById("firstDate").innerHTML = xmlhttp.responseText;
}
}
xmlhttp.open("GET", "../phpBackend/OrgStat/year.php?date=" + day, true);
xmlhttp.send();
}
function year2(){
var day = $('input[name=date6]').val();
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = function(){
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
document.getElementById("lastDate").innerHTML = xmlhttp.responseText;
}
}
xmlhttp.open("GET", "../phpBackend/OrgStat/year.php?date=" + day, true);
xmlhttp.send();
}
function showChart1(){
var data = [
{
value: 1,
color:"#1AA24C",
highlight: "#1AD24C",
label: "Valg 2"
},
{
value: 1,
color: "#000000",
highlight: "#333333",
label: "Valg 1"
}
]
var ctx = $('#statistikk1').get(0).getContext("2d");
var myDoughnut = new Chart(ctx).Doughnut(data,{
animation:true,
showTooltips: true,
percentageInnerCutout : 0,
segmentShowStroke : true
});
var res1;
var res2;
var x = localStorage.getItem('type');
if(x == 0){ //day
var date1 = $('input[name=date]').val();
var date2 = $('input[name=date2]').val();
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date1 + '&num=' + 1,
success: function(data) {
res1 = parseInt(data);
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date2 + '&num=' + 1,
success: function(data) {
res2 = parseInt(data);
myDoughnut.segments[0].value = res2;
myDoughnut.segments[1].value = res1;
myDoughnut.update();
$("#followersFirstDate").text(res1);
$("#followersLastDate").text(res2);
},
});
},
});
}else if(x == 1){ //month
var date1 = $('input[name=date3]').val() + '-01';
var date2 = $('input[name=date4]').val() + '-01';
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date1 + '&num=' + 2,
success: function(data) {
res1 = parseInt(data);
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date2 + '&num=' + 2,
success: function(data) {
res2 = parseInt(data);
myDoughnut.segments[0].value = res2;
myDoughnut.segments[1].value = res1;
myDoughnut.update();
$("#followersFirstDate").text(res1);
$("#followersLastDate").text(res2);
},
});
},
});
}else if(x == 2){ //year
var date1 = $('input[name=date5]').val();
var date2 = $('input[name=date6]').val();
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date1 + '&num=' + 3,
success: function(data) {
res1 = parseInt(data);
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date2 + '&num=' + 3,
success: function(data) {
res2 = parseInt(data);
myDoughnut.segments[0].value = res2;
myDoughnut.segments[1].value = res1;
myDoughnut.update();
$("#followersFirstDate").text(res1);
$("#followersLastDate").text(res2);
},
});
},
});
}
}
function showChart2(){
var data = [
{
value: 1,
color:"#1AA24C",
highlight: "#1AD24C",
label: "Valg 2"
},
{
value: 1,
color: "#000000",
highlight: "#333333",
label: "Valg 1"
}
]
var ctx = $('#statistikk2').get(0).getContext("2d");
var myDoughnut = new Chart(ctx).Doughnut(data,{
animation:true,
showTooltips: true,
percentageInnerCutout : 0,
segmentShowStroke : true
});
var res1;
var res2;
var x = localStorage.getItem('type');
if(x == 0){ //day
var date1 = $('input[name=date]').val();
var date2 = $('input[name=date2]').val();
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date1 + '&num=' + 4,
success: function(data) {
res1 = parseInt(data);
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date2 + '&num=' + 4,
success: function(data) {
res2 = parseInt(data);
myDoughnut.segments[0].value = res2;
myDoughnut.segments[1].value = res1;
myDoughnut.update();
$("#donationsFirstDate").text(res1);
$("#donationsLastDate").text(res2);
},
});
},
});
}else if(x == 1){ //month
var date1 = $('input[name=date3]').val() + '-01';
var date2 = $('input[name=date4]').val() + '-01';
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date1 + '&num=' + 5,
success: function(data) {
res1 = parseInt(data);
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date2 + '&num=' + 5,
success: function(data) {
res2 = parseInt(data);
myDoughnut.segments[0].value = res2;
myDoughnut.segments[1].value = res1;
myDoughnut.update();
$("#donationsFirstDate").text(res1);
$("#donationsLastDate").text(res2);
},
});
},
});
}else if(x == 2){ //year
var date1 = $('input[name=date5]').val();
var date2 = $('input[name=date6]').val();
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date1 + '&num=' + 6,
success: function(data) {
res1 = parseInt(data);
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date2 + '&num=' + 6,
success: function(data) {
res2 = parseInt(data);
myDoughnut.segments[0].value = res2;
myDoughnut.segments[1].value = res1;
myDoughnut.update();
$("#donationsFirstDate").text(res1);
$("#donationsLastDate").text(res2);
},
});
},
});
}
}
function showChart3(){
var data = [
{
value: 1,
color:"#1AA24C",
highlight: "#1AD24C",
label: "Valg 2"
},
{
value: 1,
color: "#000000",
highlight: "#333333",
label: "Valg 1"
}
]
var ctx = $('#statistikk3').get(0).getContext("2d");
var myDoughnut = new Chart(ctx).Doughnut(data,{
animation:true,
showTooltips: true,
percentageInnerCutout : 0,
segmentShowStroke : true
});
var res1;
var res2;
var x = localStorage.getItem('type');
if(x == 0){ //day
var date1 = $('input[name=date]').val();
var date2 = $('input[name=date2]').val();
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date1 + '&num=' + 7,
success: function(data) {
res1 = parseInt(data);
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date2 + '&num=' + 7,
success: function(data) {
res2 = parseInt(data);
myDoughnut.segments[0].value = res2;
myDoughnut.segments[1].value = res1;
myDoughnut.update();
$("#moneyDonatedFirstDate").text(res1 + ",-");
$("#moneyDonatedLastDate").text(res2 + ",-");
},
});
},
});
}else if(x == 1){ //month
var date1 = $('input[name=date3]').val() + '-01';
var date2 = $('input[name=date4]').val() + '-01';
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date1 + '&num=' + 8,
success: function(data) {
res1 = parseInt(data);
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date2 + '&num=' + 8,
success: function(data) {
res2 = parseInt(data);
myDoughnut.segments[0].value = res2;
myDoughnut.segments[1].value = res1;
myDoughnut.update();
$("#moneyDonatedFirstDate").text(res1 + ",-");
$("#moneyDonatedLastDate").text(res2 + ",-");
},
});
},
});
}else if(x == 2){ //year
var date1 = $('input[name=date5]').val();
var date2 = $('input[name=date6]').val();
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date1 + '&num=' + 9,
success: function(data) {
res1 = parseInt(data);
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date2 + '&num=' + 9,
success: function(data) {
res2 = parseInt(data);
myDoughnut.segments[0].value = res2;
myDoughnut.segments[1].value = res1;
myDoughnut.update();
$("#moneyDonatedFirstDate").text(res1 + ",-");
$("#moneyDonatedLastDate").text(res2 + ",-");
},
});
},
});
}
}
function showChart4(){
var data = [
{
value: 1,
color:"#1AA24C",
highlight: "#1AD24C",
label: "Valg 2"
},
{
value: 1,
color: "#000000",
highlight: "#333333",
label: "Valg 1"
}
]
var ctx = $('#statistikk4').get(0).getContext("2d");
var myDoughnut = new Chart(ctx).Doughnut(data,{
animation:true,
showTooltips: true,
percentageInnerCutout : 0,
segmentShowStroke : true
});
var res1;
var res2;
var x = localStorage.getItem('type');
if(x == 0){ //day
var date1 = $('input[name=date]').val();
var date2 = $('input[name=date2]').val();
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date1 + '&num=' + 10,
success: function(data) {
res1 = parseInt(data);
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date2 + '&num=' + 10,
success: function(data) {
res2 = parseInt(data);
myDoughnut.segments[0].value = res2;
myDoughnut.segments[1].value = res1;
myDoughnut.update();
$("#averageDonationFirstDate").text(res1 + ",-");
$("#averageDonationLastDate").text(res2 + ",-");
},
});
},
});
}else if(x == 1){ //month
var date1 = $('input[name=date3]').val() + '-01';
var date2 = $('input[name=date4]').val() + '-01';
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date1 + '&num=' + 11,
success: function(data) {
res1 = parseInt(data);
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date2 + '&num=' + 11,
success: function(data) {
res2 = parseInt(data);
myDoughnut.segments[0].value = res2;
myDoughnut.segments[1].value = res1;
myDoughnut.update();
$("#averageDonationFirstDate").text(res1 + ",-");
$("#averageDonationLastDate").text(res2 + ",-");
},
});
},
});
}else if(x == 2){ //year
var date1 = $('input[name=date5]').val();
var date2 = $('input[name=date6]').val();
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date1 + '&num=' + 12,
success: function(data) {
res1 = parseInt(data);
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date2 + '&num=' + 12,
success: function(data) {
res2 = parseInt(data);
myDoughnut.segments[0].value = res2;
myDoughnut.segments[1].value = res1;
myDoughnut.update();
$("#averageDonationFirstDate").text(res1 + ",-");
$("#averageDonationLastDate").text(res2 + ",-");
},
});
},
});
}
}
function showChart5(){
var data = [
{
value: 1,
color:"#1AA24C",
highlight: "#1AD24C",
label: "Valg 2"
},
{
value: 1,
color: "#000000",
highlight: "#333333",
label: "Valg 1"
}
]
var ctx = $('#statistikk5').get(0).getContext("2d");
var myDoughnut = new Chart(ctx).Doughnut(data,{
animation:true,
showTooltips: true,
percentageInnerCutout : 0,
segmentShowStroke : true
});
var res1;
var res2;
var x = localStorage.getItem('type');
if(x == 0){ //day
var date1 = $('input[name=date]').val();
var date2 = $('input[name=date2]').val();
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date1 + '&num=' + 13,
success: function(data) {
res1 = parseInt(data);
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date2 + '&num=' + 13,
success: function(data) {
res2 = parseInt(data);
myDoughnut.segments[0].value = res2;
myDoughnut.segments[1].value = res1;
myDoughnut.update();
$("#newsFirstDate").text(res1);
$("#newsLastDate").text(res2);
},
});
},
});
}else if(x == 1){ //month
var date1 = $('input[name=date3]').val() + '-01';
var date2 = $('input[name=date4]').val() + '-01';
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date1 + '&num=' + 14,
success: function(data) {
res1 = parseInt(data);
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date2 + '&num=' + 14,
success: function(data) {
res2 = parseInt(data);
myDoughnut.segments[0].value = res2;
myDoughnut.segments[1].value = res1;
myDoughnut.update();
$("#newsFirstDate").text(res1);
$("#newsLastDate").text(res2);
},
});
},
});
}else if(x == 2){ //year
var date1 = $('input[name=date5]').val();
var date2 = $('input[name=date6]').val();
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date1 + '&num=' + 15,
success: function(data) {
res1 = parseInt(data);
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date2 + '&num=' + 15,
success: function(data) {
res2 = parseInt(data);
myDoughnut.segments[0].value = res2;
myDoughnut.segments[1].value = res1;
myDoughnut.update();
$("#newsFirstDate").text(res1);
$("#newsLastDate").text(res2);
},
});
},
});
}
}
function showChart6(){
var data = [
{
value: 1,
color:"#1AA24C",
highlight: "#1AD24C",
label: "Valg 2"
},
{
value: 1,
color: "#000000",
highlight: "#333333",
label: "Valg 1"
}
]
var ctx = $('#statistikk6').get(0).getContext("2d");
var myDoughnut = new Chart(ctx).Doughnut(data,{
animation:true,
showTooltips: true,
percentageInnerCutout : 0,
segmentShowStroke : true
});
var res1;
var res2;
var x = localStorage.getItem('type');
if(x == 0){ //day
var date1 = $('input[name=date]').val();
var date2 = $('input[name=date2]').val();
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date1 + '&num=' + 16,
success: function(data) {
res1 = parseInt(data);
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date2 + '&num=' + 16,
success: function(data) {
res2 = parseInt(data);
myDoughnut.segments[0].value = res2;
myDoughnut.segments[1].value = res1;
myDoughnut.update();
$("#prosjectsFirstDate").text(res1);
$("#prosjectsLastDate").text(res2);
},
});
},
});
}else if(x == 1){ //month
var date1 = $('input[name=date3]').val() + '-01';
var date2 = $('input[name=date4]').val() + '-01';
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date1 + '&num=' + 17,
success: function(data) {
res1 = parseInt(data);
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date2 + '&num=' + 17,
success: function(data) {
res2 = parseInt(data);
myDoughnut.segments[0].value = res2;
myDoughnut.segments[1].value = res1;
myDoughnut.update();
$("#prosjectsFirstDate").text(res1);
$("#prosjectsLastDate").text(res2);
},
});
},
});
}else if(x == 2){ //year
var date1 = $('input[name=date5]').val();
var date2 = $('input[name=date6]').val();
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date1 + '&num=' + 18,
success: function(data) {
res1 = parseInt(data);
$.ajax({
url: '../phpBackend/doughnut.php?date='+ date2 + '&num=' + 18,
success: function(data) {
res2 = parseInt(data);
myDoughnut.segments[0].value = res2;
myDoughnut.segments[1].value = res1;
myDoughnut.update();
$("#prosjectsFirstDate").text(res1);
$("#prosjectsLastDate").text(res2);
},
});
},
});
}
}
}); | lavhmrps/SharityCRM | js/comparison.js | JavaScript | apache-2.0 | 25,646 |
/* The all pages that do not require authentication */
function UnAuthenticatedHandler() {
"use strict";
this.displayAboutPage = function(req, res, next) {
return res.render("about");
};
this.displayContactPage = function(req, res, next) {
return res.render("contact");
};
this.displayHomePage = function(req, res, next) {
return res.render("home");
};
this.displayChatPage = function(req, res, next) {
return res.render("chat");
};
}
module.exports = UnAuthenticatedHandler;
| bertonjulian/NodeGoat | app/routes/unauthenticated.js | JavaScript | apache-2.0 | 549 |
/*
* Copyright (C) 2016 the original author or authors.
*
* This file is part of jGrades Application Project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*/
package org.jgrades.lic.api.model;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
@AllArgsConstructor
public class LicenceValidationResult {
private boolean valid;
private String errorMessage;
public LicenceValidationResult() {
valid = true;
errorMessage = null;
}
}
| jgrades/jgrades | jg-backend/implementation/base/jg-lic/interface/src/main/java/org/jgrades/lic/api/model/LicenceValidationResult.java | Java | apache-2.0 | 636 |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.mediaconvert.model;
import javax.annotation.Generated;
/**
* Use Deinterlacer (DeinterlaceMode) to choose how the service will do deinterlacing. Default is Deinterlace. -
* Deinterlace converts interlaced to progressive. - Inverse telecine converts Hard Telecine 29.97i to progressive
* 23.976p. - Adaptive auto-detects and converts to progressive.
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public enum DeinterlacerMode {
DEINTERLACE("DEINTERLACE"),
INVERSE_TELECINE("INVERSE_TELECINE"),
ADAPTIVE("ADAPTIVE");
private String value;
private DeinterlacerMode(String value) {
this.value = value;
}
@Override
public String toString() {
return this.value;
}
/**
* Use this in place of valueOf.
*
* @param value
* real value
* @return DeinterlacerMode corresponding to the value
*
* @throws IllegalArgumentException
* If the specified value does not map to one of the known values in this enum.
*/
public static DeinterlacerMode fromValue(String value) {
if (value == null || "".equals(value)) {
throw new IllegalArgumentException("Value cannot be null or empty!");
}
for (DeinterlacerMode enumEntry : DeinterlacerMode.values()) {
if (enumEntry.toString().equals(value)) {
return enumEntry;
}
}
throw new IllegalArgumentException("Cannot create enum from " + value + " value!");
}
}
| jentfoo/aws-sdk-java | aws-java-sdk-mediaconvert/src/main/java/com/amazonaws/services/mediaconvert/model/DeinterlacerMode.java | Java | apache-2.0 | 2,135 |
๏ปฟusing System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Cats.Models
{
public class RationDetail
{
public int RationDetailID { get; set; }
public int RationID { get; set; }
public int CommodityID { get; set; }
public decimal Amount { get; set; }
public Nullable<int> UnitID { get; set; }
public virtual Commodity Commodity { get; set; }
public virtual Ration Ration { get; set; }
public virtual Unit Unit { get; set; }
}
}
| ndrmc/cats | Models/Cats.Models/RationDetail.cs | C# | apache-2.0 | 552 |
<div id="print_area" style="width:200px; font-size:12px;">
<?php
include('dbcon_s.php');
$date=date('Y-m-d', $time_now);
if($_POST['h_status']=='off')
{
$query=mysql_query("select count(*) as num_order, sum(person) as total_person, sum(order_total) as total_order, sum(discount) as total_discount, sum(ser_charge) as total_s_charge, sum(vat_total) as total_vat, sum(cash) as total_cash, date from order_list where status='false' AND date='$date' AND terminal='".$_POST['terminal']."'");
}
else
{
$query=mysql_query("select count(*) as num_order, sum(person) as total_person, sum(order_total) as total_order, sum(discount) as total_discount, sum(ser_charge) as total_s_charge, sum(vat_total) as total_vat, sum(cash) as total_cash, date from order_list where status='false' AND date='$date' AND terminal='".$_POST['terminal']."'");
}
$row=mysql_fetch_array($query);
?>
<h2 style="text-align:center; font-family:Forte; margin:0px; padding:0px;">La Bamba</h2>
<p style="text-align:center; margin:0px; font-size:12px;">
House # 54, Road # 20,<br />
Sector # 03, Rabindra Sarani<br />
Uttara, Dhaka-1230<br />
Phone : 01759783896-7<br />
Vat Reg No: 5111110711
</p>
<p style="text-align:center; margin:0px; font-size:12px;">
Day Report (<?php echo $_POST['terminal']; ?>)
</p>
Report Date : <?php echo $row['date']; ?><br />
Print Date : <?php echo $date; ?><br />
<table cellspacing="0" style="width:100%; font-size:12px;">
<tr>
<th style="text-align:left">Number of order:</th>
<th style="text-align:left;"><?php echo $row['num_order']; ?></th>
</tr>
<tr>
<th style="text-align:left">Total Person:</th>
<th style="text-align:left;"><?php echo $row['total_person']; ?></th>
</tr>
<tr>
<th style="text-align:left">Total Order:</th>
<th style="text-align:left;"><?php echo $row['total_order']; ?></th>
</tr>
<tr>
<th style="text-align:left">Total Discount:</th>
<th style="text-align:left;"><?php echo $row['total_discount']; ?></th>
</tr>
<tr>
<th style="text-align:left">Total Service Charge:</th>
<th style="text-align:left;"><?php echo $row['total_s_charge']; ?></th>
</tr>
<tr>
<th style="text-align:left">Total Vat:</th>
<th style="text-align:left;"><?php echo $row['total_vat']; ?></th>
</tr>
<tr>
<th style="text-align:left">Total Cash:</th>
<th style="text-align:left;"><?php echo $row['total_cash']; ?></th>
</tr>
</table>
<br />
<div style="border-bottom:1px dotted #000;"></div>
<br />
Day report (<?php echo $_POST['terminal']; ?>) printed by : <?php echo $_POST['user']; ?>
</div>
| DR-PHP666/RMS | print_day_report.php | PHP | apache-2.0 | 3,222 |
/*
* Copyright (c) 2015 Ngewi Fet <ngewif@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gnucash.android.ui.common;
import android.app.Activity;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.database.Cursor;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.os.Build;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.annotation.LayoutRes;
import android.support.annotation.StringRes;
import android.support.design.widget.NavigationView;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBar;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.widget.PopupMenu;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.uservoice.uservoicesdk.UserVoice;
import org.gnucash.android.R;
import org.gnucash.android.app.GnuCashApplication;
import org.gnucash.android.db.DatabaseSchema;
import org.gnucash.android.db.adapter.BooksDbAdapter;
import org.gnucash.android.ui.account.AccountsActivity;
import org.gnucash.android.ui.passcode.PasscodeLockActivity;
import org.gnucash.android.ui.report.ReportsActivity;
import org.gnucash.android.ui.settings.PreferenceActivity;
import org.gnucash.android.ui.transaction.ScheduledActionsActivity;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
* Base activity implementing the navigation drawer, to be extended by all activities requiring one.
* <p>
* Each activity inheriting from this class has an indeterminate progress bar at the top,
* (above the action bar) which can be used to display busy operations. See {@link #getProgressBar()}
* </p>
*
* <p>Sub-classes should simply provide their layout using {@link #getContentView()} and then annotate
* any variables they wish to use with {@link ButterKnife#bind(Activity)} annotations. The view
* binding will be done in this base abstract class.<br>
* The activity layout of the subclass is expected to contain {@code DrawerLayout} and
* a {@code NavigationView}.<br>
* Sub-class should also consider using the {@code toolbar.xml} or {@code toolbar_with_spinner.xml}
* for the action bar in their XML layout. Otherwise provide another which contains widgets for the
* toolbar and progress indicator with the IDs {@code R.id.toolbar} and {@code R.id.progress_indicator} respectively.
* </p>
* @author Ngewi Fet <ngewif@gmail.com>
*/
public abstract class BaseDrawerActivity extends PasscodeLockActivity implements
PopupMenu.OnMenuItemClickListener {
public static final int ID_MANAGE_BOOKS = 0xB00C;
@BindView(R.id.drawer_layout) DrawerLayout mDrawerLayout;
@BindView(R.id.nav_view) NavigationView mNavigationView;
@BindView(R.id.toolbar) Toolbar mToolbar;
@BindView(R.id.toolbar_progress) ProgressBar mToolbarProgress;
protected TextView mBookNameTextView;
protected ActionBarDrawerToggle mDrawerToggle;
public static final int REQUEST_OPEN_DOCUMENT = 0x20;
private class DrawerItemClickListener implements NavigationView.OnNavigationItemSelectedListener {
@Override
public boolean onNavigationItemSelected(MenuItem menuItem) {
onDrawerMenuItemClicked(menuItem.getItemId());
return true;
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(getContentView());
//if a parameter was passed to open an account within a specific book, then switch
String bookUID = getIntent().getStringExtra(UxArgument.BOOK_UID);
if (bookUID != null && !bookUID.equals(BooksDbAdapter.getInstance().getActiveBookUID())){
GnuCashApplication.activateBook(bookUID);
}
ButterKnife.bind(this);
setSupportActionBar(mToolbar);
final ActionBar actionBar = getSupportActionBar();
if (actionBar != null){
actionBar.setHomeButtonEnabled(true);
actionBar.setDisplayHomeAsUpEnabled(true);
actionBar.setTitle(getTitleRes());
}
mToolbarProgress.getIndeterminateDrawable().setColorFilter(Color.WHITE, PorterDuff.Mode.SRC_IN);
View headerView = mNavigationView.getHeaderView(0);
headerView.findViewById(R.id.drawer_title).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
onClickAppTitle(v);
}
});
mBookNameTextView = (TextView) headerView.findViewById(R.id.book_name);
mBookNameTextView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
onClickBook(v);
}
});
updateActiveBookName();
setUpNavigationDrawer();
}
@Override
protected void onResume() {
super.onResume();
updateActiveBookName();
}
/**
* Return the layout to inflate for this activity
* @return Layout resource identifier
*/
public abstract @LayoutRes int getContentView();
/**
* Return the title for this activity.
* This will be displayed in the action bar
* @return String resource identifier
*/
public abstract @StringRes int getTitleRes();
/**
* Returns the progress bar for the activity.
* <p>This progress bar is displayed above the toolbar and should be used to show busy status
* for long operations.<br/>
* The progress bar visibility is set to {@link View#GONE} by default. Make visible to use </p>
* @return Indeterminate progress bar.
*/
public ProgressBar getProgressBar(){
return mToolbarProgress;
}
/**
* Sets up the navigation drawer for this activity.
*/
private void setUpNavigationDrawer() {
mNavigationView.setNavigationItemSelectedListener(new DrawerItemClickListener());
mDrawerToggle = new ActionBarDrawerToggle(
this, /* host Activity */
mDrawerLayout, /* DrawerLayout object */
R.string.drawer_open, /* "open drawer" description */
R.string.drawer_close /* "close drawer" description */
) {
/** Called when a drawer has settled in a completely closed state. */
public void onDrawerClosed(View view) {
super.onDrawerClosed(view);
}
/** Called when a drawer has settled in a completely open state. */
public void onDrawerOpened(View drawerView) {
super.onDrawerOpened(drawerView);
}
};
mDrawerLayout.setDrawerListener(mDrawerToggle);
}
@Override
protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
mDrawerToggle.syncState();
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
mDrawerToggle.onConfigurationChanged(newConfig);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == android.R.id.home){
if (!mDrawerLayout.isDrawerOpen(mNavigationView))
mDrawerLayout.openDrawer(mNavigationView);
else
mDrawerLayout.closeDrawer(mNavigationView);
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* Update the display name of the currently active book
*/
protected void updateActiveBookName(){
mBookNameTextView.setText(BooksDbAdapter.getInstance().getActiveBookDisplayName());
}
/**
* Handler for the navigation drawer items
* */
protected void onDrawerMenuItemClicked(int itemId) {
switch (itemId){
case R.id.nav_item_open: { //Open... files
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT){
//use the storage access framework
Intent openDocument = new Intent(Intent.ACTION_OPEN_DOCUMENT);
openDocument.addCategory(Intent.CATEGORY_OPENABLE);
openDocument.setType("*/*");
startActivityForResult(openDocument, REQUEST_OPEN_DOCUMENT);
} else {
AccountsActivity.startXmlFileChooser(this);
}
}
break;
case R.id.nav_item_favorites: { //favorite accounts
Intent intent = new Intent(this, AccountsActivity.class);
intent.putExtra(AccountsActivity.EXTRA_TAB_INDEX,
AccountsActivity.INDEX_FAVORITE_ACCOUNTS_FRAGMENT);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP|Intent.FLAG_ACTIVITY_SINGLE_TOP);
startActivity(intent);
}
break;
case R.id.nav_item_reports: {
Intent intent = new Intent(this, ReportsActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP);
startActivity(intent);
}
break;
/*
//todo: Re-enable this when Budget UI is complete
case R.id.nav_item_budgets:
startActivity(new Intent(this, BudgetsActivity.class));
break;
*/
case R.id.nav_item_scheduled_actions: { //show scheduled transactions
Intent intent = new Intent(this, ScheduledActionsActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP);
startActivity(intent);
}
break;
case R.id.nav_item_export:
AccountsActivity.openExportFragment(this);
break;
case R.id.nav_item_settings: //Settings activity
startActivity(new Intent(this, PreferenceActivity.class));
break;
case R.id.nav_item_help:
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
prefs.edit().putBoolean(UxArgument.SKIP_PASSCODE_SCREEN, true).apply();
UserVoice.launchUserVoice(this);
break;
}
mDrawerLayout.closeDrawer(mNavigationView);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode == Activity.RESULT_CANCELED) {
super.onActivityResult(requestCode, resultCode, data);
return;
}
switch (requestCode) {
case AccountsActivity.REQUEST_PICK_ACCOUNTS_FILE:
AccountsActivity.importXmlFileFromIntent(this, data, null);
break;
case BaseDrawerActivity.REQUEST_OPEN_DOCUMENT: //this uses the Storage Access Framework
final int takeFlags = data.getFlags()
& (Intent.FLAG_GRANT_READ_URI_PERMISSION | Intent.FLAG_GRANT_WRITE_URI_PERMISSION);
AccountsActivity.importXmlFileFromIntent(this, data, null);
getContentResolver().takePersistableUriPermission(data.getData(), takeFlags);
break;
default:
super.onActivityResult(requestCode, resultCode, data);
break;
}
}
@Override
public boolean onMenuItemClick(MenuItem item) {
long id = item.getItemId();
if (id == ID_MANAGE_BOOKS){
Intent intent = new Intent(this, PreferenceActivity.class);
intent.setAction(PreferenceActivity.ACTION_MANAGE_BOOKS);
startActivity(intent);
mDrawerLayout.closeDrawer(mNavigationView);
return true;
}
BooksDbAdapter booksDbAdapter = BooksDbAdapter.getInstance();
String bookUID = booksDbAdapter.getUID(id);
if (!bookUID.equals(booksDbAdapter.getActiveBookUID())){
GnuCashApplication.loadBook(bookUID);
finish();
}
AccountsActivity.start(GnuCashApplication.getAppContext());
return true;
}
public void onClickAppTitle(View view){
mDrawerLayout.closeDrawer(mNavigationView);
AccountsActivity.start(this);
}
public void onClickBook(View view){
PopupMenu popup = new PopupMenu(this, view);
popup.setOnMenuItemClickListener(this);
Menu menu = popup.getMenu();
int maxRecent = 0;
Cursor cursor = BooksDbAdapter.getInstance().fetchAllRecords(null, null,
DatabaseSchema.BookEntry.COLUMN_MODIFIED_AT + " DESC");
while (cursor.moveToNext() && maxRecent++ < 5) {
long id = cursor.getLong(cursor.getColumnIndexOrThrow(DatabaseSchema.BookEntry._ID));
String name = cursor.getString(cursor.getColumnIndexOrThrow(DatabaseSchema.BookEntry.COLUMN_DISPLAY_NAME));
menu.add(0, (int)id, maxRecent, name);
}
menu.add(0, ID_MANAGE_BOOKS, maxRecent, R.string.menu_manage_books);
popup.show();
}
}
| lxbzmy/gnucash-android | app/src/main/java/org/gnucash/android/ui/common/BaseDrawerActivity.java | Java | apache-2.0 | 13,912 |
function $childNode(o) {
return window.frames[o]
}
function animationHover(o, e) {
o = $(o), o.hover(function () {
o.addClass("animated " + e)
}, function () {
window.setTimeout(function () {
o.removeClass("animated " + e)
}, 2e3)
})
}
function WinMove() {
var o = "[class*=col]", e = ".ibox-title", i = "[class*=col]";
$(o).sortable({
handle: e,
connectWith: i,
tolerance: "pointer",
forcePlaceholderSize: !0,
opacity: .8
}).disableSelection()
}
var $parentNode = window.parent.document;
if ($(".tooltip-demo").tooltip({
selector: "[data-toggle=tooltip]",
container: "body"
}), $(".modal").appendTo("body"), $("[data-toggle=popover]").popover(), $(".collapse-link").click(function () {
var o = $(this).closest("div.ibox"), e = $(this).find("i"), i = o.find("div.ibox-content");
i.slideToggle(200), e.toggleClass("fa-chevron-up").toggleClass("fa-chevron-down"), o.toggleClass("").toggleClass("border-bottom"), setTimeout(function () {
o.resize(), o.find("[id^=map-]").resize()
}, 50)
}), $(".close-link").click(function () {
var o = $(this).closest("div.ibox");
o.remove()
}), top == this) {
} | pantisocracy/live-report | src/main/resources/static/resource/js/content.min.js | JavaScript | apache-2.0 | 1,277 |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import collections
import io
import json
import time
try:
import fastavro
except ImportError: # pragma: NO COVER
fastavro = None
import google.api_core.exceptions
import google.rpc.error_details_pb2
try:
import pandas
except ImportError: # pragma: NO COVER
pandas = None
try:
import pyarrow
except ImportError: # pragma: NO COVER
pyarrow = None
try:
import pyarrow
except ImportError: # pragma: NO COVER
pyarrow = None
_STREAM_RESUMPTION_EXCEPTIONS = (
google.api_core.exceptions.ServiceUnavailable,
# Caused by transport-level error. No status code was received.
# https://github.com/googleapis/python-bigquery-storage/issues/262
google.api_core.exceptions.Unknown,
)
# The Google API endpoint can unexpectedly close long-running HTTP/2 streams.
# Unfortunately, this condition is surfaced to the caller as an internal error
# by gRPC. We don't want to resume on all internal errors, so instead we look
# for error message that we know are caused by problems that are safe to
# reconnect.
_STREAM_RESUMPTION_INTERNAL_ERROR_MESSAGES = (
# See: https://github.com/googleapis/google-cloud-python/pull/9994
"RST_STREAM",
)
_FASTAVRO_REQUIRED = (
"fastavro is required to parse ReadRowResponse messages with Avro bytes."
)
_PANDAS_REQUIRED = "pandas is required to create a DataFrame"
_PYARROW_REQUIRED = (
"pyarrow is required to parse ReadRowResponse messages with Arrow bytes."
)
class ReadRowsStream(object):
"""A stream of results from a read rows request.
This stream is an iterable of
:class:`~google.cloud.bigquery_storage_v1.types.ReadRowsResponse`.
Iterate over it to fetch all row messages.
If the fastavro library is installed, use the
:func:`~google.cloud.bigquery_storage_v1.reader.ReadRowsStream.rows()`
method to parse all messages into a stream of row dictionaries.
If the pandas and fastavro libraries are installed, use the
:func:`~google.cloud.bigquery_storage_v1.reader.ReadRowsStream.to_dataframe()`
method to parse all messages into a :class:`pandas.DataFrame`.
This object should not be created directly, but is returned by
other methods in this library.
"""
def __init__(
self, client, name, offset, read_rows_kwargs, retry_delay_callback=None
):
"""Construct a ReadRowsStream.
Args:
client ( \
~google.cloud.bigquery_storage_v1.services. \
big_query_read.BigQueryReadClient \
):
A GAPIC client used to reconnect to a ReadRows stream. This
must be the GAPIC client to avoid a circular dependency on
this class.
name (str):
Required. Stream ID from which rows are being read.
offset (int):
Required. Position in the stream to start
reading from. The offset requested must be less than the last
row read from ReadRows. Requesting a larger offset is
undefined.
read_rows_kwargs (dict):
Keyword arguments to use when reconnecting to a ReadRows
stream.
retry_delay_callback (Optional[Callable[[float], None]]):
If the client receives a retryable error that asks the client to
delay its next attempt and retry_delay_callback is not None,
ReadRowsStream will call retry_delay_callback with the delay
duration (in seconds) before it starts sleeping until the next
attempt.
Returns:
Iterable[ \
~google.cloud.bigquery_storage.types.ReadRowsResponse \
]:
A sequence of row messages.
"""
# Make a copy of the read position so that we can update it without
# mutating the original input.
self._client = client
self._name = name
self._offset = offset
self._read_rows_kwargs = read_rows_kwargs
self._retry_delay_callback = retry_delay_callback
self._wrapped = None
def __iter__(self):
"""An iterable of messages.
Returns:
Iterable[ \
~google.cloud.bigquery_storage_v1.types.ReadRowsResponse \
]:
A sequence of row messages.
"""
# Infinite loop to reconnect on reconnectable errors while processing
# the row stream.
if self._wrapped is None:
self._reconnect()
while True:
try:
for message in self._wrapped:
rowcount = message.row_count
self._offset += rowcount
yield message
return # Made it through the whole stream.
except google.api_core.exceptions.InternalServerError as exc:
resumable_error = any(
resumable_message in exc.message
for resumable_message in _STREAM_RESUMPTION_INTERNAL_ERROR_MESSAGES
)
if not resumable_error:
raise
except _STREAM_RESUMPTION_EXCEPTIONS:
# Transient error, so reconnect to the stream.
pass
except Exception as exc:
if not self._resource_exhausted_exception_is_retryable(exc):
raise
self._reconnect()
def _reconnect(self):
"""Reconnect to the ReadRows stream using the most recent offset."""
while True:
try:
self._wrapped = self._client.read_rows(
read_stream=self._name,
offset=self._offset,
**self._read_rows_kwargs
)
break
except Exception as exc:
if not self._resource_exhausted_exception_is_retryable(exc):
raise
def _resource_exhausted_exception_is_retryable(self, exc):
if isinstance(exc, google.api_core.exceptions.ResourceExhausted):
# ResourceExhausted errors are only retried if a valid
# RetryInfo is provided with the error.
#
# TODO: Remove hasattr logic when we require google-api-core >= 2.2.0.
# ResourceExhausted added details/_details in google-api-core 2.2.0.
details = None
if hasattr(exc, "details"):
details = exc.details
elif hasattr(exc, "_details"):
details = exc._details
if details is not None:
for detail in details:
if isinstance(detail, google.rpc.error_details_pb2.RetryInfo):
retry_delay = detail.retry_delay
if retry_delay is not None:
delay = max(
0,
float(retry_delay.seconds)
+ (float(retry_delay.nanos) / 1e9),
)
if self._retry_delay_callback:
self._retry_delay_callback(delay)
time.sleep(delay)
return True
return False
def rows(self, read_session=None):
"""Iterate over all rows in the stream.
This method requires the fastavro library in order to parse row
messages in avro format. For arrow format messages, the pyarrow
library is required.
.. warning::
DATETIME columns are not supported. They are currently parsed as
strings in the fastavro library.
Args:
read_session ( \
Optional[~google.cloud.bigquery_storage_v1.types.ReadSession] \
):
DEPRECATED.
This argument was used to specify the schema of the rows in the
stream, but now the first message in a read stream contains
this information.
Returns:
Iterable[Mapping]:
A sequence of rows, represented as dictionaries.
"""
return ReadRowsIterable(self, read_session=read_session)
def to_arrow(self, read_session=None):
"""Create a :class:`pyarrow.Table` of all rows in the stream.
This method requires the pyarrow library and a stream using the Arrow
format.
Args:
read_session ( \
~google.cloud.bigquery_storage_v1.types.ReadSession \
):
DEPRECATED.
This argument was used to specify the schema of the rows in the
stream, but now the first message in a read stream contains
this information.
Returns:
pyarrow.Table:
A table of all rows in the stream.
"""
return self.rows(read_session=read_session).to_arrow()
def to_dataframe(self, read_session=None, dtypes=None):
"""Create a :class:`pandas.DataFrame` of all rows in the stream.
This method requires the pandas libary to create a data frame and the
fastavro library to parse row messages.
.. warning::
DATETIME columns are not supported. They are currently parsed as
strings.
Args:
read_session ( \
~google.cloud.bigquery_storage_v1.types.ReadSession \
):
DEPRECATED.
This argument was used to specify the schema of the rows in the
stream, but now the first message in a read stream contains
this information.
dtypes ( \
Map[str, Union[str, pandas.Series.dtype]] \
):
Optional. A dictionary of column names pandas ``dtype``s. The
provided ``dtype`` is used when constructing the series for
the column specified. Otherwise, the default pandas behavior
is used.
Returns:
pandas.DataFrame:
A data frame of all rows in the stream.
"""
if pandas is None:
raise ImportError(_PANDAS_REQUIRED)
return self.rows(read_session=read_session).to_dataframe(dtypes=dtypes)
class ReadRowsIterable(object):
"""An iterable of rows from a read session.
Args:
reader (google.cloud.bigquery_storage_v1.reader.ReadRowsStream):
A read rows stream.
read_session ( \
Optional[~google.cloud.bigquery_storage_v1.types.ReadSession] \
):
DEPRECATED.
This argument was used to specify the schema of the rows in the
stream, but now the first message in a read stream contains
this information.
"""
# This class is modelled after the google.cloud.bigquery.table.RowIterator
# and aims to be API compatible where possible.
def __init__(self, reader, read_session=None):
self._reader = reader
if read_session is not None:
self._stream_parser = _StreamParser.from_read_session(read_session)
else:
self._stream_parser = None
@property
def pages(self):
"""A generator of all pages in the stream.
Returns:
types.GeneratorType[google.cloud.bigquery_storage_v1.ReadRowsPage]:
A generator of pages.
"""
# Each page is an iterator of rows. But also has num_items, remaining,
# and to_dataframe.
for message in self._reader:
# Only the first message contains the schema, which is needed to
# decode the messages.
if not self._stream_parser:
self._stream_parser = _StreamParser.from_read_rows_response(message)
yield ReadRowsPage(self._stream_parser, message)
def __iter__(self):
"""Iterator for each row in all pages."""
for page in self.pages:
for row in page:
yield row
def to_arrow(self):
"""Create a :class:`pyarrow.Table` of all rows in the stream.
This method requires the pyarrow library and a stream using the Arrow
format.
Returns:
pyarrow.Table:
A table of all rows in the stream.
"""
record_batches = []
for page in self.pages:
record_batches.append(page.to_arrow())
if record_batches:
return pyarrow.Table.from_batches(record_batches)
# No data, return an empty Table.
self._stream_parser._parse_arrow_schema()
return pyarrow.Table.from_batches([], schema=self._stream_parser._schema)
def to_dataframe(self, dtypes=None):
"""Create a :class:`pandas.DataFrame` of all rows in the stream.
This method requires the pandas libary to create a data frame and the
fastavro library to parse row messages.
.. warning::
DATETIME columns are not supported. They are currently parsed as
strings in the fastavro library.
Args:
dtypes ( \
Map[str, Union[str, pandas.Series.dtype]] \
):
Optional. A dictionary of column names pandas ``dtype``s. The
provided ``dtype`` is used when constructing the series for
the column specified. Otherwise, the default pandas behavior
is used.
Returns:
pandas.DataFrame:
A data frame of all rows in the stream.
"""
if pandas is None:
raise ImportError(_PANDAS_REQUIRED)
if dtypes is None:
dtypes = {}
# If it's an Arrow stream, calling to_arrow, then converting to a
# pandas dataframe is about 2x faster. This is because pandas.concat is
# rarely no-copy, whereas pyarrow.Table.from_batches + to_pandas is
# usually no-copy.
try:
record_batch = self.to_arrow()
except NotImplementedError:
pass
else:
df = record_batch.to_pandas()
for column in dtypes:
df[column] = pandas.Series(df[column], dtype=dtypes[column])
return df
frames = [page.to_dataframe(dtypes=dtypes) for page in self.pages]
if frames:
return pandas.concat(frames)
# No data, construct an empty dataframe with columns matching the schema.
# The result should be consistent with what an empty ARROW stream would produce.
self._stream_parser._parse_avro_schema()
schema = self._stream_parser._avro_schema_json
column_dtypes = self._dtypes_from_avro(schema["fields"])
column_dtypes.update(dtypes)
df = pandas.DataFrame(columns=column_dtypes.keys())
for column in df:
df[column] = pandas.Series([], dtype=column_dtypes[column])
return df
def _dtypes_from_avro(self, avro_fields):
"""Determine Pandas dtypes for columns in Avro schema.
Args:
avro_fields (Iterable[Mapping[str, Any]]):
Avro fields' metadata.
Returns:
colelctions.OrderedDict[str, str]:
Column names with their corresponding Pandas dtypes.
"""
result = collections.OrderedDict()
type_map = {"long": "int64", "double": "float64", "boolean": "bool"}
for field_info in avro_fields:
# If a type is an union of multiple types, pick the first type
# that is not "null".
if isinstance(field_info["type"], list):
type_info = next(item for item in field_info["type"] if item != "null")
if isinstance(type_info, str):
field_dtype = type_map.get(type_info, "object")
else:
logical_type = type_info.get("logicalType")
if logical_type == "timestamp-micros":
field_dtype = "datetime64[ns, UTC]"
else:
field_dtype = "object"
result[field_info["name"]] = field_dtype
return result
class ReadRowsPage(object):
"""An iterator of rows from a read session message.
Args:
stream_parser (google.cloud.bigquery_storage_v1.reader._StreamParser):
A helper for parsing messages into rows.
message (google.cloud.bigquery_storage_v1.types.ReadRowsResponse):
A message of data from a read rows stream.
"""
# This class is modeled after google.api_core.page_iterator.Page and aims
# to provide API compatibility where possible.
def __init__(self, stream_parser, message):
self._stream_parser = stream_parser
self._message = message
self._iter_rows = None
self._num_items = self._message.row_count
self._remaining = self._message.row_count
def _parse_rows(self):
"""Parse rows from the message only once."""
if self._iter_rows is not None:
return
rows = self._stream_parser.to_rows(self._message)
self._iter_rows = iter(rows)
@property
def num_items(self):
"""int: Total items in the page."""
return self._num_items
@property
def remaining(self):
"""int: Remaining items in the page."""
return self._remaining
def __iter__(self):
"""A ``ReadRowsPage`` is an iterator."""
return self
def next(self):
"""Get the next row in the page."""
self._parse_rows()
if self._remaining > 0:
self._remaining -= 1
return next(self._iter_rows)
# Alias needed for Python 2/3 support.
__next__ = next
def to_arrow(self):
"""Create an :class:`pyarrow.RecordBatch` of rows in the page.
Returns:
pyarrow.RecordBatch:
Rows from the message, as an Arrow record batch.
"""
return self._stream_parser.to_arrow(self._message)
def to_dataframe(self, dtypes=None):
"""Create a :class:`pandas.DataFrame` of rows in the page.
This method requires the pandas libary to create a data frame and the
fastavro library to parse row messages.
.. warning::
DATETIME columns are not supported. They are currently parsed as
strings in the fastavro library.
Args:
dtypes ( \
Map[str, Union[str, pandas.Series.dtype]] \
):
Optional. A dictionary of column names pandas ``dtype``s. The
provided ``dtype`` is used when constructing the series for
the column specified. Otherwise, the default pandas behavior
is used.
Returns:
pandas.DataFrame:
A data frame of all rows in the stream.
"""
if pandas is None:
raise ImportError(_PANDAS_REQUIRED)
return self._stream_parser.to_dataframe(self._message, dtypes=dtypes)
class _StreamParser(object):
def to_arrow(self, message):
raise NotImplementedError("Not implemented.")
def to_dataframe(self, message, dtypes=None):
raise NotImplementedError("Not implemented.")
def to_rows(self, message):
raise NotImplementedError("Not implemented.")
def _parse_avro_schema(self):
raise NotImplementedError("Not implemented.")
def _parse_arrow_schema(self):
raise NotImplementedError("Not implemented.")
@staticmethod
def from_read_session(read_session):
schema_type = read_session._pb.WhichOneof("schema")
if schema_type == "avro_schema":
return _AvroStreamParser(read_session)
elif schema_type == "arrow_schema":
return _ArrowStreamParser(read_session)
else:
raise TypeError(
"Unsupported schema type in read_session: {0}".format(schema_type)
)
@staticmethod
def from_read_rows_response(message):
schema_type = message._pb.WhichOneof("schema")
if schema_type == "avro_schema":
return _AvroStreamParser(message)
elif schema_type == "arrow_schema":
return _ArrowStreamParser(message)
else:
raise TypeError(
"Unsupported schema type in message: {0}".format(schema_type)
)
class _AvroStreamParser(_StreamParser):
"""Helper to parse Avro messages into useful representations."""
def __init__(self, message):
"""Construct an _AvroStreamParser.
Args:
message (Union[
google.cloud.bigquery_storage_v1.types.ReadSession, \
google.cloud.bigquery_storage_v1.types.ReadRowsResponse, \
]):
Either the first message of data from a read rows stream or a
read session. Both types contain a oneof "schema" field, which
can be used to determine how to deserialize rows.
"""
if fastavro is None:
raise ImportError(_FASTAVRO_REQUIRED)
self._first_message = message
self._avro_schema_json = None
self._fastavro_schema = None
self._column_names = None
def to_arrow(self, message):
"""Create an :class:`pyarrow.RecordBatch` of rows in the page.
Args:
message (google.cloud.bigquery_storage_v1.types.ReadRowsResponse):
Protocol buffer from the read rows stream, to convert into an
Arrow record batch.
Returns:
pyarrow.RecordBatch:
Rows from the message, as an Arrow record batch.
"""
raise NotImplementedError("to_arrow not implemented for Avro streams.")
def to_dataframe(self, message, dtypes=None):
"""Create a :class:`pandas.DataFrame` of rows in the page.
This method requires the pandas libary to create a data frame and the
fastavro library to parse row messages.
.. warning::
DATETIME columns are not supported. They are currently parsed as
strings in the fastavro library.
Args:
message ( \
~google.cloud.bigquery_storage_v1.types.ReadRowsResponse \
):
A message containing Avro bytes to parse into a pandas DataFrame.
dtypes ( \
Map[str, Union[str, pandas.Series.dtype]] \
):
Optional. A dictionary of column names pandas ``dtype``s. The
provided ``dtype`` is used when constructing the series for
the column specified. Otherwise, the default pandas behavior
is used.
Returns:
pandas.DataFrame:
A data frame of all rows in the stream.
"""
self._parse_avro_schema()
if dtypes is None:
dtypes = {}
columns = collections.defaultdict(list)
for row in self.to_rows(message):
for column in row:
columns[column].append(row[column])
for column in dtypes:
columns[column] = pandas.Series(columns[column], dtype=dtypes[column])
return pandas.DataFrame(columns, columns=self._column_names)
def _parse_avro_schema(self):
"""Extract and parse Avro schema from a read session."""
if self._avro_schema_json:
return
self._avro_schema_json = json.loads(self._first_message.avro_schema.schema)
self._column_names = tuple(
(field["name"] for field in self._avro_schema_json["fields"])
)
self._first_message = None
def _parse_fastavro(self):
"""Convert parsed Avro schema to fastavro format."""
self._parse_avro_schema()
self._fastavro_schema = fastavro.parse_schema(self._avro_schema_json)
def to_rows(self, message):
"""Parse all rows in a stream message.
Args:
message ( \
~google.cloud.bigquery_storage_v1.types.ReadRowsResponse \
):
A message containing Avro bytes to parse into rows.
Returns:
Iterable[Mapping]:
A sequence of rows, represented as dictionaries.
"""
self._parse_fastavro()
messageio = io.BytesIO(message.avro_rows.serialized_binary_rows)
while True:
# Loop in a while loop because schemaless_reader can only read
# a single record.
try:
# TODO: Parse DATETIME into datetime.datetime (no timezone),
# instead of as a string.
yield fastavro.schemaless_reader(messageio, self._fastavro_schema)
except StopIteration:
break # Finished with message
class _ArrowStreamParser(_StreamParser):
def __init__(self, message):
"""Construct an _ArrowStreamParser.
Args:
message (Union[
google.cloud.bigquery_storage_v1.types.ReadSession, \
google.cloud.bigquery_storage_v1.types.ReadRowsResponse, \
]):
Either the first message of data from a read rows stream or a
read session. Both types contain a oneof "schema" field, which
can be used to determine how to deserialize rows.
"""
if pyarrow is None:
raise ImportError(_PYARROW_REQUIRED)
self._first_message = message
self._schema = None
def to_arrow(self, message):
return self._parse_arrow_message(message)
def to_rows(self, message):
record_batch = self._parse_arrow_message(message)
# Iterate through each column simultaneously, and make a dict from the
# row values
for row in zip(*record_batch.columns):
yield dict(zip(self._column_names, row))
def to_dataframe(self, message, dtypes=None):
record_batch = self._parse_arrow_message(message)
if dtypes is None:
dtypes = {}
df = record_batch.to_pandas()
for column in dtypes:
df[column] = pandas.Series(df[column], dtype=dtypes[column])
return df
def _parse_arrow_message(self, message):
self._parse_arrow_schema()
return pyarrow.ipc.read_record_batch(
pyarrow.py_buffer(message.arrow_record_batch.serialized_record_batch),
self._schema,
)
def _parse_arrow_schema(self):
if self._schema:
return
self._schema = pyarrow.ipc.read_schema(
pyarrow.py_buffer(self._first_message.arrow_schema.serialized_schema)
)
self._column_names = [field.name for field in self._schema]
self._first_message = None
| googleapis/python-bigquery-storage | google/cloud/bigquery_storage_v1/reader.py | Python | apache-2.0 | 27,503 |
/*
Copyright 2019 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import { Map, Set } from 'immutable';
import React from 'react';
import { Link } from 'react-router-dom';
import { OpenInNew, PersonAdd } from '@material-ui/icons/';
import { IArticleModel, ICategoryModel, IUserModel, ModelId } from '../../../models';
import { Avatar, MagicTimestamp, PseudoAvatar } from '../../components';
import { COMMON_STYLES, IMAGE_BASE } from '../../stylesx';
import { css, stylesheet } from '../../utilx';
interface IModeratorsWidgetProps {
users: Map<string, IUserModel>;
moderatorIds: Array<ModelId>;
superModeratorIds: Array<ModelId>;
openSetModerators(): void;
}
export const MODERATOR_WIDGET_STYLES = stylesheet({
widget: {
display: 'flex',
flexWrap: 'wrap',
justifyContent: 'center',
},
});
export function ModeratorsWidget(props: IModeratorsWidgetProps) {
const { users, moderatorIds, superModeratorIds } = props;
let s = Set(moderatorIds);
if (superModeratorIds) {
s = s.merge(superModeratorIds);
}
const moderators = s.toArray().map((uid: string) => users.get(uid));
if (moderators.length === 0) {
return (
<div onClick={props.openSetModerators} {...css(MODERATOR_WIDGET_STYLES.widget)}>
<PseudoAvatar size={IMAGE_BASE}>
<PersonAdd/>
</PseudoAvatar>
</div>
);
}
if (moderators.length === 1) {
const u = moderators[0];
return (
<div onClick={props.openSetModerators} {...css(MODERATOR_WIDGET_STYLES.widget)}>
<Avatar target={u} size={IMAGE_BASE}/>
</div>
);
}
const ret = [];
let limit = moderators.length;
let extra = false;
if (limit > 4) {
limit = 3;
extra = true;
} else if (limit === 4) {
limit = 4;
}
for (let i = 0; i < limit; i++) {
ret.push(<Avatar target={moderators[i]} size={IMAGE_BASE / 2}/>);
}
if (extra) {
ret.push(<PseudoAvatar size={IMAGE_BASE / 2}>+{moderators.length - 3}</PseudoAvatar>);
}
return (
<div onClick={props.openSetModerators} {...css(MODERATOR_WIDGET_STYLES.widget)}>
{ret}
</div>
);
}
export const TITLE_CELL_STYLES = stylesheet({
superText: {
fontSize: '10px',
fontWeight: '600',
color: 'rgba(0,0,0,0.54)',
},
categoryLabel: {
textTransform: 'uppercase',
marginRight: '12px',
},
mainText: {
display: 'flex',
},
mainTextText: {
lineHeight: '20px',
},
mainTextLink: {
padding: '0 10px',
color: 'rgba(0,0,0,0.54)',
},
});
interface ITitleCellProps {
category?: ICategoryModel;
article: IArticleModel;
link: string;
}
export function TitleCell(props: ITitleCellProps) {
const {
category,
article,
link,
} = props;
const supertext = [];
if (category) {
supertext.push(<span key="label" {...css(TITLE_CELL_STYLES.categoryLabel)}>{category.label}</span>);
}
if (article.sourceCreatedAt) {
supertext.push((
<span key="timestamp">
<MagicTimestamp timestamp={article.sourceCreatedAt} inFuture={false}/>
</span>
));
}
return (
<>
{supertext.length > 0 && <div {...css(TITLE_CELL_STYLES.superText)}>{supertext}</div>}
<div {...css(TITLE_CELL_STYLES.mainText)}>
<div>
<Link to={link} {...css(COMMON_STYLES.cellLink, TITLE_CELL_STYLES.mainTextText)}>
{article.title}
</Link>
</div>
{article.url && (
<div {...css(TITLE_CELL_STYLES.mainTextLink)}>
<a key="link" href={article.url} target="_blank" {...css(COMMON_STYLES.cellLink)}>
<OpenInNew fontSize="small" />
</a>
</div>
)}
</div>
</>
);
}
| conversationai/conversationai-moderator | packages/frontend-web/src/app/scenes/Tables/components.tsx | TypeScript | apache-2.0 | 4,167 |
from capstone import *
from .architecture import Architecture
from avatar2.installer.config import GDB_X86, OPENOCD
class X86(Architecture):
get_gdb_executable = Architecture.resolve(GDB_X86)
get_oocd_executable = Architecture.resolve(OPENOCD)
qemu_name = 'i386'
gdb_name = 'i386'
registers = {'eax': 0,
'ecx': 1,
'edx': 2,
'ebx': 3,
'esp': 4,
'ebp': 5,
'esi': 6,
'edi': 7,
'eip': 8,
'pc': 8,
'eflags': 9,
'cs': 10,
'ss': 11,
'ds': 12,
'es': 13,
'fs': 14,
'gs': 15, }
special_registers = {
#SSE
'xmm0': {'format': '{{{:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$xmm0.v4_int32',
},
'xmm1': {'format': '{{{:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$xmm1.v4_int32',
},
'xmm2': {'format': '{{{:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$xmm2.v4_int32',
},
'xmm3': {'format': '{{{:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$xmm3.v4_int32',
},
'xmm4': {'format': '{{{:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$xmm4.v4_int32',
},
'xmm5': {'format': '{{{:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$xmm5.v4_int32',
},
'xmm6': {'format': '{{{:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$xmm6.v4_int32',
},
'xmm7': {'format': '{{{:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$xmm7.v4_int32',
},
'xmm8': {'format': '{{{:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$xmm8.v4_int32',
},
'xmm9': {'format': '{{{:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$xmm9.v4_int32',
},
'xmm10': {'format': '{{{:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$xmm10.v4_int32',
},
'xmm11': {'format': '{{{:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$xmm11.v4_int32',
},
'xmm12': {'format': '{{{:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$xmm12.v4_int32',
},
'xmm13': {'format': '{{{:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$xmm13.v4_int32',
},
'xmm14': {'format': '{{{:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$xmm14.v4_int32',
},
'xmm15': {'format': '{{{:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$xmm15.v4_int32',
},
#AVX
'ymm0': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$ymm0.v8_int32',
},
'ymm1': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$ymm1.v8_int32',
},
'ymm2': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$ymm2.v8_int32',
},
'ymm3': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$ymm3.v8_int32',
},
'ymm4': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$ymm4.v8_int32',
},
'ymm5': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$ymm5.v8_int32',
},
'ymm6': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$ymm6.v8_int32',
},
'ymm7': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$ymm7.v8_int32',
},
'ymm8': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$ymm8.v8_int32',
},
'ymm9': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$ymm9.v8_int32',
},
'ymm10': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$ymm10.v8_int32',
},
'ymm11': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$ymm11.v8_int32',
},
'ymm12': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$ymm12.v8_int32',
},
'ymm13': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$ymm13.v8_int32',
},
'ymm14': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$ymm14.v8_int32',
},
'ymm15': {'format': '{{{:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}, {:d}}}',
'gdb_expression': '$ymm15.v8_int32',
},
}
sr_name = 'eflags'
unemulated_instructions = []
capstone_arch = CS_ARCH_X86
capstone_mode = CS_MODE_32
word_size = 32
class X86_64(X86):
qemu_name = 'x86_64'
gdb_name = 'i386:x86-64'
registers = {'rax': 0,
'rbx': 1,
'rcx': 2,
'rdx': 3,
'rsi': 4,
'rdi': 5,
'rbp': 6,
'rsp': 7,
'r8': 8,
'r9': 9,
'r10': 10,
'r11': 11,
'r12': 12,
'r13': 13,
'r14': 14,
'r15': 15,
'rip': 16,
'pc': 16,
'eflags': 17,
'cs': 18,
'ss': 19,
'ds': 20,
'es': 21,
'fs': 22,
'gs': 23,
}
capstone_mode = CS_MODE_64
unemulated_instructions = []
capstone_mode = CS_MODE_64
word_size = 64
| avatartwo/avatar2 | avatar2/archs/x86.py | Python | apache-2.0 | 6,401 |
# Copyright (c) 2015 Intel Research and Development Ireland Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import instantiation_validation_benchmark as base
from experimental_framework import common
NUM_OF_NEIGHBORS = 'num_of_neighbours'
AMOUNT_OF_RAM = 'amount_of_ram'
NUMBER_OF_CORES = 'number_of_cores'
NETWORK_NAME = 'network'
SUBNET_NAME = 'subnet'
class InstantiationValidationNoisyNeighborsBenchmark(
base.InstantiationValidationBenchmark):
def __init__(self, name, params):
base.InstantiationValidationBenchmark.__init__(self, name, params)
if common.RELEASE == 'liberty':
temp_name = 'stress_workload_liberty.yaml'
else:
temp_name = 'stress_workload.yaml'
self.template_file = common.get_template_dir() + \
temp_name
self.stack_name = 'neighbour'
self.neighbor_stack_names = list()
def get_features(self):
features = super(InstantiationValidationNoisyNeighborsBenchmark,
self).get_features()
features['description'] = 'Instantiation Validation Benchmark ' \
'with noisy neghbors'
features['parameters'].append(NUM_OF_NEIGHBORS)
features['parameters'].append(AMOUNT_OF_RAM)
features['parameters'].append(NUMBER_OF_CORES)
features['parameters'].append(NETWORK_NAME)
features['parameters'].append(SUBNET_NAME)
features['allowed_values'][NUM_OF_NEIGHBORS] = \
['1', '2', '3', '4', '5', '6', '7', '8', '9', '10']
features['allowed_values'][NUMBER_OF_CORES] = \
['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10']
features['allowed_values'][AMOUNT_OF_RAM] = \
['256M', '1G', '2G', '3G', '4G', '5G', '6G', '7G', '8G', '9G',
'10G']
features['default_values'][NUM_OF_NEIGHBORS] = '1'
features['default_values'][NUMBER_OF_CORES] = '1'
features['default_values'][AMOUNT_OF_RAM] = '256M'
features['default_values'][NETWORK_NAME] = ''
features['default_values'][SUBNET_NAME] = ''
return features
def init(self):
super(InstantiationValidationNoisyNeighborsBenchmark, self).init()
common.replace_in_file(self.lua_file, 'local out_file = ""',
'local out_file = "' +
self.results_file + '"')
heat_param = dict()
heat_param['network'] = self.params[NETWORK_NAME]
heat_param['subnet'] = self.params[SUBNET_NAME]
heat_param['cores'] = self.params['number_of_cores']
heat_param['memory'] = self.params['amount_of_ram']
for i in range(0, int(self.params['num_of_neighbours'])):
stack_name = self.stack_name + str(i)
common.DEPLOYMENT_UNIT.deploy_heat_template(self.template_file,
stack_name,
heat_param)
self.neighbor_stack_names.append(stack_name)
def finalize(self):
common.replace_in_file(self.lua_file, 'local out_file = "' +
self.results_file + '"',
'local out_file = ""')
# destroy neighbor stacks
for stack_name in self.neighbor_stack_names:
common.DEPLOYMENT_UNIT.destroy_heat_template(stack_name)
self.neighbor_stack_names = list()
| dtudares/hello-world | yardstick/yardstick/vTC/apexlake/experimental_framework/benchmarks/instantiation_validation_noisy_neighbors_benchmark.py | Python | apache-2.0 | 3,974 |
package de.nl.moo.data.loader.systems;
import de.nl.moo.data.beans.systems.SystemsApplyerBean;
import de.nl.moo.data.beans.systems.SystemsBean;
import de.nl.moo.data.beans.systems.SystemsSystemBean;
import de.nl.moo.data.dao.GameBeanDAO;
import de.nl.moo.data.loader.AbstractBeanLoader;
import org.springframework.beans.factory.annotation.Autowired;
import javax.inject.Provider;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
public class SystemsBeanLoader extends AbstractBeanLoader<SystemsBean> {
@Autowired
private SystemsBean systemsBean = null;
@Autowired
private Provider<SystemsSystemBeanLoader> systemLoaderProvider = null;
@Autowired
private Provider<SystemsApplyerBeanLoader> applyerLoaderProvider = null;
public SystemsBeanLoader() {
super();
}
@Override
protected SystemsBean load(GameBeanDAO dao) {
List<SystemsApplyerBean> applyers = this.loadApplyers(dao);
this.systemsBean.setApplyers(applyers);
List<SystemsSystemBean> systems = this.loadSystems(dao);
this.systemsBean.setSystems(systems);
return this.systemsBean;
}
// ##############################################
private List<SystemsApplyerBean> loadApplyers(GameBeanDAO dao) {
Path file = dao.getFile();
Path parent = file.getParent();
List<String> paths = dao.getList("applyers");
List<SystemsApplyerBean> applyers = new ArrayList<>();
paths.stream()
.map(parent::resolve)
.map(this::loadApplyer)
.forEach(applyers::add);
return applyers;
}
private SystemsApplyerBean loadApplyer(Path path) {
SystemsApplyerBeanLoader beanLoader = this.applyerLoaderProvider.get();
SystemsApplyerBean applyerBean = beanLoader.load(path);
return applyerBean;
}
// ##############################################
private List<SystemsSystemBean> loadSystems(GameBeanDAO dao) {
Path file = dao.getFile();
Path parent = file.getParent();
List<String> paths = dao.getList("systems");
List<SystemsSystemBean> systems = new ArrayList<>();
paths.stream()
.map(parent::resolve)
.map(this::loadSystem)
.forEach(systems::add);
return systems;
}
private SystemsSystemBean loadSystem(Path path) {
SystemsSystemBeanLoader beanLoader = this.systemLoaderProvider.get();
SystemsSystemBean systemBean = beanLoader.load(path);
return systemBean;
}
}
| dayaftereh/master-of-orion | src/main/java/de/nl/moo/data/loader/systems/SystemsBeanLoader.java | Java | apache-2.0 | 2,608 |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.plugins.newui;
import com.intellij.ide.IdeBundle;
import com.intellij.ui.JBColor;
import com.intellij.util.ui.JBUI;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import java.awt.*;
/**
* @author Alexander Lobas
*/
public class TagComponent extends LinkComponent {
private static final Color BACKGROUND = JBColor.namedColor("Plugins.tagBackground", new JBColor(0xEAEAEC, 0x4D4D4D));
private static final Color EAP_BACKGROUND = JBColor.namedColor("Plugins.eapTagBackground", new JBColor(0xF2D2CF, 0xF2D2CF));
private static final Color PAID_BACKGROUND = JBColor.namedColor("Plugins.paidTagBackground", new JBColor(0xD8EDF8, 0x3E505C));
private static final Color TRIAL_BACKGROUND = JBColor.namedColor("Plugins.trialTagBackground", new JBColor(0xDBE8DD, 0x345574E));
private static final Color FOREGROUND = JBColor.namedColor("Plugins.tagForeground", new JBColor(0x787878, 0x999999));
private Color myColor;
public TagComponent() {
setForeground(FOREGROUND);
setPaintUnderline(false);
setOpaque(false);
setBorder(JBUI.Borders.empty(1, 8));
}
public TagComponent(@NotNull @Nls String name) {
this();
setText(name);
}
@Override
public void setText(@NotNull @Nls String name) {
String tooltip = null;
myColor = BACKGROUND;
if (Tags.EAP.name().equals(name)) {
myColor = EAP_BACKGROUND;
tooltip = IdeBundle.message("tooltip.eap.plugin.version");
}
else if (Tags.Trial.name().equals(name) || Tags.Purchased.name().equals(name)) {
myColor = TRIAL_BACKGROUND;
}
else if (Tags.Paid.name().equals(name) || Tags.Freemium.name().equals(name)) {
myColor = PAID_BACKGROUND;
tooltip = IdeBundle.message("tooltip.paid.plugin");
}
super.setText(name);
setToolTipText(tooltip);
}
@Override
protected void paintComponent(Graphics g) {
//noinspection UseJBColor
g.setColor(myUnderline ? new Color(myColor.getRed(), myColor.getGreen(), myColor.getBlue(), 178) : myColor);
g.fillRect(0, 0, getWidth(), getHeight());
super.paintComponent(g);
}
@Override
protected boolean isInClickableArea(Point pt) {
return true;
}
} | JetBrains/intellij-community | platform/platform-impl/src/com/intellij/ide/plugins/newui/TagComponent.java | Java | apache-2.0 | 2,357 |
package esilegacy
/*
Gateway timeout model */
type GatewayTimeout struct {
/*
Gateway timeout message */
Error_ string `json:"error,omitempty"`
/*
number of seconds the request was given */
Timeout int32 `json:"timeout,omitempty"`
}
| antihax/mock-esi | legacy/go/model_gateway_timeout.go | GO | apache-2.0 | 242 |
/*
* Copyright 2013-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.consul.discovery;
import org.junit.jupiter.api.Test;
import org.springframework.cloud.commons.util.InetUtils;
import org.springframework.cloud.commons.util.InetUtilsProperties;
import static org.assertj.core.api.Assertions.assertThat;
/**
* @author Spencer Gibb
*/
public class ConsulCatalogWatchTests {
@Test
public void isRunningReportsCorrectly() {
ConsulDiscoveryProperties properties = new ConsulDiscoveryProperties(new InetUtils(new InetUtilsProperties()));
ConsulCatalogWatch watch = new ConsulCatalogWatch(properties, null) {
@Override
public void catalogServicesWatch() {
// do nothing
}
};
assertThat(watch.isRunning()).isFalse();
watch.start();
assertThat(watch.isRunning()).isTrue();
watch.stop();
assertThat(watch.isRunning()).isFalse();
}
}
| spring-cloud/spring-cloud-consul | spring-cloud-consul-discovery/src/test/java/org/springframework/cloud/consul/discovery/ConsulCatalogWatchTests.java | Java | apache-2.0 | 1,452 |
// Copyright 2012 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.collide.client.editor;
import com.google.collide.client.util.logging.Log;
import com.google.collide.json.shared.JsonArray;
import com.google.collide.shared.document.Document;
import com.google.collide.shared.document.Line;
import com.google.collide.shared.document.LineInfo;
import com.google.collide.shared.document.anchor.Anchor;
import com.google.collide.shared.document.anchor.Anchor.RemovalStrategy;
import com.google.collide.shared.document.anchor.AnchorManager;
import com.google.collide.shared.document.anchor.AnchorType;
import com.google.collide.shared.util.ListenerRegistrar.Remover;
import com.google.collide.shared.util.SortedList;
import com.google.collide.shared.util.SortedList.OneWayIntComparator;
/**
* This class takes care of mapping between the different coordinates used by
* the editor. The two supported systems are:
* <ul>
* <li>Offset (x,y) - in pixels, relative to the top left of line 0 in the
* current document.
* <li>Line (line, column) - the real line number and column, taking into
* account spacer objects in between lines. Lines and columns are 0-indexed.
* </ul>
*/
class CoordinateMap implements Document.LineListener {
interface DocumentSizeProvider {
float getEditorCharacterWidth();
int getEditorLineHeight();
void handleSpacerHeightChanged(Spacer spacer, int oldHeight);
}
private static class OffsetCache {
private static final SortedList.Comparator<OffsetCache> COMPARATOR =
new SortedList.Comparator<OffsetCache>() {
@Override
public int compare(OffsetCache a, OffsetCache b) {
return a.offset - b.offset;
}
};
private static final SortedList.OneWayIntComparator<OffsetCache> Y_OFFSET_ONE_WAY_COMPARATOR =
new SortedList.OneWayIntComparator<OffsetCache>() {
@Override
public int compareTo(OffsetCache s) {
return value - s.offset;
}
};
private static final SortedList.OneWayIntComparator<OffsetCache> LINE_NUMBER_ONE_WAY_COMPARATOR
= new SortedList.OneWayIntComparator<OffsetCache>() {
@Override
public int compareTo(OffsetCache s) {
return value - s.lineNumber;
}
};
private final int offset;
private final int height;
private final int lineNumber;
private OffsetCache(int offset, int lineNumber, int height) {
this.offset = offset;
this.height = height;
this.lineNumber = lineNumber;
}
}
private static final OffsetCache BEGINNING_EMPTY_OFFSET_CACHE = new OffsetCache(0, 0, 0);
private static final AnchorType SPACER_ANCHOR_TYPE = AnchorType.create(CoordinateMap.class,
"spacerAnchorType");
private static final Spacer.Comparator SPACER_COMPARATOR = new Spacer.Comparator();
private static final Spacer.OneWaySpacerComparator SPACER_ONE_WAY_COMPARATOR =
new Spacer.OneWaySpacerComparator();
/** Used by {@link #getPrecedingOffsetCache(int, int)} */
private static final int IGNORE = Integer.MIN_VALUE;
private Document document;
private DocumentSizeProvider documentSizeProvider;
/** List of offset cache items, sorted by the offset */
private SortedList<OffsetCache> offsetCache;
/**
* True if there is at least one spacer in the editor, false otherwise (false
* means a simple height / line height calculation can be used)
*/
private boolean requiresMapping;
/** Sorted by line number */
private SortedList<Spacer> spacers;
/** Summation of all spacers' heights */
private int totalSpacerHeight;
/** Remover for listener */
private Remover documentLineListenerRemover;
CoordinateMap(DocumentSizeProvider documentSizeProvider) {
this.documentSizeProvider = documentSizeProvider;
requiresMapping = false;
}
int convertYToLineNumber(int y) {
if (y < 0) {
return 0;
}
int lineHeight = documentSizeProvider.getEditorLineHeight();
if (!requiresMapping) {
return y / lineHeight;
}
OffsetCache precedingOffsetCache = getPrecedingOffsetCache(y, IGNORE);
int precedingOffsetCacheBottom = precedingOffsetCache.offset + precedingOffsetCache.height;
int lineNumberRelativeToOffsetCacheLine = (y - precedingOffsetCacheBottom) / lineHeight;
if (y < precedingOffsetCacheBottom) {
// y is inside the spacer
return precedingOffsetCache.lineNumber;
} else {
return precedingOffsetCache.lineNumber + lineNumberRelativeToOffsetCacheLine;
}
}
/**
* Returns the top of the given line.
*/
int convertLineNumberToY(int lineNumber) {
int lineHeight = documentSizeProvider.getEditorLineHeight();
if (!requiresMapping) {
return lineNumber * lineHeight;
}
OffsetCache precedingOffsetCache = getPrecedingOffsetCache(IGNORE, lineNumber);
int precedingOffsetCacheBottom = precedingOffsetCache.offset + precedingOffsetCache.height;
int offsetRelativeToOffsetCacheBottom =
(lineNumber - precedingOffsetCache.lineNumber) * lineHeight;
return precedingOffsetCacheBottom + offsetRelativeToOffsetCacheBottom;
}
/**
* Returns the first {@link OffsetCache} that is positioned less than or equal
* to {@code y} or {@code lineNumber}. This methods fills the
* {@link #offsetCache} if necessary ensuring the returned {@link OffsetCache}
* is up-to-date.
*
* @param y the y, or {@link #IGNORE} if looking up by {@code lineNumber}
* @param lineNumber the line number, or {@link #IGNORE} if looking up by
* {@code y}
*/
private OffsetCache getPrecedingOffsetCache(int y, int lineNumber) {
assert (y != IGNORE && lineNumber == IGNORE) || (lineNumber != IGNORE && y == IGNORE);
final int lineHeight = documentSizeProvider.getEditorLineHeight();
OffsetCache previousOffsetCache;
if (y != IGNORE) {
previousOffsetCache =
getCachedPrecedingOffsetCacheImpl(OffsetCache.Y_OFFSET_ONE_WAY_COMPARATOR, y);
} else {
previousOffsetCache =
getCachedPrecedingOffsetCacheImpl(OffsetCache.LINE_NUMBER_ONE_WAY_COMPARATOR, lineNumber);
}
if (previousOffsetCache == null) {
if (spacers.size() > 0 && spacers.get(0).getLineNumber() == 0) {
previousOffsetCache = createOffsetCache(0, 0, spacers.get(0).getHeight());
} else {
previousOffsetCache = BEGINNING_EMPTY_OFFSET_CACHE;
}
}
/*
* Optimization so the common case that the target has previously been
* computed requires no more computation
*/
int offsetCacheSize = offsetCache.size();
if (offsetCacheSize > 0
&& isTargetEarlierThanOffsetCache(y, lineNumber, offsetCache.get(offsetCacheSize - 1))) {
return previousOffsetCache;
}
// This will return this offset cache's matching spacer
int spacerPos = getPrecedingSpacerIndex(previousOffsetCache.lineNumber);
/*
* We want the spacer following this offset cache's spacer, or the first
* spacer if none were found
*/
spacerPos++;
for (int n = spacers.size(); spacerPos < n; spacerPos++) {
Spacer curSpacer = spacers.get(spacerPos);
int previousOffsetCacheBottom = previousOffsetCache.offset + previousOffsetCache.height;
int simpleLinesHeight =
(curSpacer.getLineNumber() - previousOffsetCache.lineNumber) * lineHeight;
if (simpleLinesHeight == 0) {
Log.warn(Spacer.class, "More than one spacer on line " + previousOffsetCache.lineNumber);
}
// Create an offset cache for this spacer
OffsetCache curOffsetCache =
createOffsetCache(previousOffsetCacheBottom + simpleLinesHeight,
curSpacer.getLineNumber(), curSpacer.getHeight());
if (isTargetEarlierThanOffsetCache(y, lineNumber, curOffsetCache)) {
return previousOffsetCache;
}
previousOffsetCache = curOffsetCache;
}
return previousOffsetCache;
}
/**
* Returns the {@link OffsetCache} instance in list that has the greatest
* value less than or equal to the given {@code value}. Returns null if there
* isn't one.
*
* This should only be used by {@link #getPrecedingOffsetCache(int, int)}.
*/
private OffsetCache getCachedPrecedingOffsetCacheImpl(
OneWayIntComparator<OffsetCache> comparator, int value) {
comparator.setValue(value);
int index = offsetCache.findInsertionIndex(comparator, false);
return index >= 0 ? offsetCache.get(index) : null;
}
private boolean isTargetEarlierThanOffsetCache(int y, int lineNumber, OffsetCache offsetCache) {
return ((y != IGNORE && y < offsetCache.offset) ||
(lineNumber != IGNORE && lineNumber < offsetCache.lineNumber));
}
private OffsetCache createOffsetCache(int offset, int lineNumber, int height) {
OffsetCache createdOffsetCache = new OffsetCache(offset, lineNumber, height);
offsetCache.add(createdOffsetCache);
return createdOffsetCache;
}
private int getPrecedingSpacerIndex(int lineNumber) {
SPACER_ONE_WAY_COMPARATOR.setValue(lineNumber);
return spacers.findInsertionIndex(SPACER_ONE_WAY_COMPARATOR, false);
}
/**
* Adds a spacer above the given lineInfo line with height heightPx and
* returns the created Spacer object.
*
* @param lineInfo the line before which the spacer will be inserted
* @param height the height in pixels of the spacer
*/
Spacer createSpacer(LineInfo lineInfo, int height, Buffer buffer, String cssClass) {
int lineNumber = lineInfo.number();
// create an anchor on the current line
Anchor anchor =
document.getAnchorManager().createAnchor(SPACER_ANCHOR_TYPE, lineInfo.line(), lineNumber,
AnchorManager.IGNORE_COLUMN);
anchor.setRemovalStrategy(RemovalStrategy.SHIFT);
// account for the height of the line the spacer is on
Spacer spacer = new Spacer(anchor, height, this, buffer, cssClass);
spacers.add(spacer);
totalSpacerHeight += height;
invalidateLineNumberAndFollowing(lineNumber);
requiresMapping = true;
return spacer;
}
boolean removeSpacer(Spacer spacer) {
int lineNumber = spacer.getLineNumber();
if (spacers.remove(spacer)) {
document.getAnchorManager().removeAnchor(spacer.getAnchor());
totalSpacerHeight -= spacer.getHeight();
invalidateLineNumberAndFollowing(lineNumber - 1);
updateRequiresMapping();
return true;
}
return false;
}
void handleDocumentChange(Document document) {
if (documentLineListenerRemover != null) {
documentLineListenerRemover.remove();
}
this.document = document;
spacers = new SortedList<Spacer>(SPACER_COMPARATOR);
offsetCache =
new SortedList<OffsetCache>(OffsetCache.COMPARATOR);
documentLineListenerRemover = document.getLineListenerRegistrar().add(this);
requiresMapping = false; // starts with no items in list
totalSpacerHeight = 0;
}
@Override
public void onLineAdded(Document document, int lineNumber, JsonArray<Line> addedLines) {
invalidateLineNumberAndFollowing(lineNumber);
}
@Override
public void onLineRemoved(Document document, int lineNumber, JsonArray<Line> removedLines) {
invalidateLineNumberAndFollowing(lineNumber);
}
/**
* Call this after any line changes (adding/deleting lines, changing line
* heights). Only invalidate (delete) cache items >= lineNumber, don't
* recalculate.
*/
void invalidateLineNumberAndFollowing(int lineNumber) {
OffsetCache.LINE_NUMBER_ONE_WAY_COMPARATOR.setValue(lineNumber);
int insertionIndex = offsetCache.findInsertionIndex(OffsetCache.LINE_NUMBER_ONE_WAY_COMPARATOR);
offsetCache.removeThisAndFollowing(insertionIndex);
}
private void updateRequiresMapping() {
// check to change active status
requiresMapping = spacers.size() > 0;
}
int getTotalSpacerHeight() {
return totalSpacerHeight;
}
void handleSpacerHeightChanged(Spacer spacer, int oldHeight) {
totalSpacerHeight -= oldHeight;
totalSpacerHeight += spacer.getHeight();
invalidateLineNumberAndFollowing(spacer.getLineNumber());
documentSizeProvider.handleSpacerHeightChanged(spacer, oldHeight);
}
}
| WeTheInternet/collide | client/src/main/java/com/google/collide/client/editor/CoordinateMap.java | Java | apache-2.0 | 12,795 |
package org.targettest.org.apache.lucene.store;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.io.Closeable;
import java.util.Map;
import java.util.HashMap;
/** Abstract base class for input from a file in a {@link Directory}. A
* random-access input stream. Used for all Lucene index input operations.
* @see Directory
*/
public abstract class IndexInput implements Cloneable,Closeable {
private boolean preUTF8Strings; // true if we are reading old (modified UTF8) string format
/** Reads and returns a single byte.
* @see IndexOutput#writeByte(byte)
*/
public abstract byte readByte() throws IOException;
/** Reads a specified number of bytes into an array at the specified offset.
* @param b the array to read bytes into
* @param offset the offset in the array to start storing bytes
* @param len the number of bytes to read
* @see IndexOutput#writeBytes(byte[],int)
*/
public abstract void readBytes(byte[] b, int offset, int len)
throws IOException;
/** Reads a specified number of bytes into an array at the
* specified offset with control over whether the read
* should be buffered (callers who have their own buffer
* should pass in "false" for useBuffer). Currently only
* {@link BufferedIndexInput} respects this parameter.
* @param b the array to read bytes into
* @param offset the offset in the array to start storing bytes
* @param len the number of bytes to read
* @param useBuffer set to false if the caller will handle
* buffering.
* @see IndexOutput#writeBytes(byte[],int)
*/
public void readBytes(byte[] b, int offset, int len, boolean useBuffer)
throws IOException
{
// Default to ignoring useBuffer entirely
readBytes(b, offset, len);
}
/** Reads four bytes and returns an int.
* @see IndexOutput#writeInt(int)
*/
public int readInt() throws IOException {
return ((readByte() & 0xFF) << 24) | ((readByte() & 0xFF) << 16)
| ((readByte() & 0xFF) << 8) | (readByte() & 0xFF);
}
/** Reads an int stored in variable-length format. Reads between one and
* five bytes. Smaller values take fewer bytes. Negative numbers are not
* supported.
* @see IndexOutput#writeVInt(int)
*/
public int readVInt() throws IOException {
byte b = readByte();
int i = b & 0x7F;
for (int shift = 7; (b & 0x80) != 0; shift += 7) {
b = readByte();
i |= (b & 0x7F) << shift;
}
return i;
}
/** Reads eight bytes and returns a long.
* @see IndexOutput#writeLong(long)
*/
public long readLong() throws IOException {
return (((long)readInt()) << 32) | (readInt() & 0xFFFFFFFFL);
}
/** Reads a long stored in variable-length format. Reads between one and
* nine bytes. Smaller values take fewer bytes. Negative numbers are not
* supported. */
public long readVLong() throws IOException {
byte b = readByte();
long i = b & 0x7F;
for (int shift = 7; (b & 0x80) != 0; shift += 7) {
b = readByte();
i |= (b & 0x7FL) << shift;
}
return i;
}
/** Call this if readString should read characters stored
* in the old modified UTF8 format (length in java chars
* and java's modified UTF8 encoding). This is used for
* indices written pre-2.4 See LUCENE-510 for details. */
public void setModifiedUTF8StringsMode() {
preUTF8Strings = true;
}
/** Reads a string.
* @see IndexOutput#writeString(String)
*/
public String readString() throws IOException {
if (preUTF8Strings)
return readModifiedUTF8String();
int length = readVInt();
final byte[] bytes = new byte[length];
readBytes(bytes, 0, length);
return new String(bytes, 0, length, "UTF-8");
}
private String readModifiedUTF8String() throws IOException {
int length = readVInt();
final char[] chars = new char[length];
readChars(chars, 0, length);
return new String(chars, 0, length);
}
/** Reads Lucene's old "modified UTF-8" encoded
* characters into an array.
* @param buffer the array to read characters into
* @param start the offset in the array to start storing characters
* @param length the number of characters to read
* @see IndexOutput#writeChars(String,int,int)
* @deprecated -- please use readString or readBytes
* instead, and construct the string
* from those utf8 bytes
*/
public void readChars(char[] buffer, int start, int length)
throws IOException {
final int end = start + length;
for (int i = start; i < end; i++) {
byte b = readByte();
if ((b & 0x80) == 0)
buffer[i] = (char)(b & 0x7F);
else if ((b & 0xE0) != 0xE0) {
buffer[i] = (char)(((b & 0x1F) << 6)
| (readByte() & 0x3F));
} else
buffer[i] = (char)(((b & 0x0F) << 12)
| ((readByte() & 0x3F) << 6)
| (readByte() & 0x3F));
}
}
/**
* Expert
*
* Similar to {@link #readChars(char[], int, int)} but does not do any conversion operations on the bytes it is reading in. It still
* has to invoke {@link #readByte()} just as {@link #readChars(char[], int, int)} does, but it does not need a buffer to store anything
* and it does not have to do any of the bitwise operations, since we don't actually care what is in the byte except to determine
* how many more bytes to read
* @param length The number of chars to read
* @deprecated this method operates on old "modified utf8" encoded
* strings
*/
public void skipChars(int length) throws IOException{
for (int i = 0; i < length; i++) {
byte b = readByte();
if ((b & 0x80) == 0){
//do nothing, we only need one byte
}
else if ((b & 0xE0) != 0xE0) {
readByte();//read an additional byte
} else{
//read two additional bytes.
readByte();
readByte();
}
}
}
/** Closes the stream to further operations. */
public abstract void close() throws IOException;
/** Returns the current position in this file, where the next read will
* occur.
* @see #seek(long)
*/
public abstract long getFilePointer();
/** Sets current position in this file, where the next read will occur.
* @see #getFilePointer()
*/
public abstract void seek(long pos) throws IOException;
/** The number of bytes in the file. */
public abstract long length();
/** Returns a clone of this stream.
*
* <p>Clones of a stream access the same data, and are positioned at the same
* point as the stream they were cloned from.
*
* <p>Expert: Subclasses must ensure that clones may be positioned at
* different points in the input from each other and from the stream they
* were cloned from.
*/
@Override
public Object clone() {
IndexInput clone = null;
try {
clone = (IndexInput)super.clone();
} catch (CloneNotSupportedException e) {}
return clone;
}
public Map<String,String> readStringStringMap() throws IOException {
final Map<String,String> map = new HashMap<String,String>();
final int count = readInt();
for(int i=0;i<count;i++) {
final String key = readString();
final String val = readString();
map.put(key, val);
}
return map;
}
}
| chrishumphreys/provocateur | provocateur-thirdparty/src/main/java/org/targettest/org/apache/lucene/store/IndexInput.java | Java | apache-2.0 | 8,089 |
## Copyright 2022 Google LLC
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## https://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
"""Sends a text mesage to the user with a suggestion action to dial a phone number.
Read more: https://developers.google.com/business-communications/business-messages/guides/how-to/message/send?hl=en#dial_action
This code is based on the https://github.com/google-business-communications/python-businessmessages
Python Business Messages client library.
"""
import uuid
from businessmessages import businessmessages_v1_client as bm_client
from businessmessages.businessmessages_v1_messages import BusinessmessagesConversationsMessagesCreateRequest
from businessmessages.businessmessages_v1_messages import BusinessMessagesDialAction
from businessmessages.businessmessages_v1_messages import BusinessMessagesMessage
from businessmessages.businessmessages_v1_messages import BusinessMessagesRepresentative
from businessmessages.businessmessages_v1_messages import BusinessMessagesSuggestedAction
from businessmessages.businessmessages_v1_messages import BusinessMessagesSuggestion
from oauth2client.service_account import ServiceAccountCredentials
# Edit the values below:
path_to_service_account_key = './service_account_key.json'
conversation_id = 'EDIT_HERE'
credentials = ServiceAccountCredentials.from_json_keyfile_name(
path_to_service_account_key,
scopes=['https://www.googleapis.com/auth/businessmessages'])
client = bm_client.BusinessmessagesV1(credentials=credentials)
representative_type_as_string = 'BOT'
if representative_type_as_string == 'BOT':
representative_type = BusinessMessagesRepresentative.RepresentativeTypeValueValuesEnum.BOT
else:
representative_type = BusinessMessagesRepresentative.RepresentativeTypeValueValuesEnum.HUMAN
# Create a text message with a dial action and fallback text
message = BusinessMessagesMessage(
messageId=str(uuid.uuid4().int),
representative=BusinessMessagesRepresentative(
representativeType=representative_type
),
text='Contact support for help with this issue.',
fallback='Give us a call at +12223334444.',
suggestions=[
BusinessMessagesSuggestion(
action=BusinessMessagesSuggestedAction(
text='Call support',
postbackData='call-support',
dialAction=BusinessMessagesDialAction(
phoneNumber='+12223334444'))
),
])
# Create the message request
create_request = BusinessmessagesConversationsMessagesCreateRequest(
businessMessagesMessage=message,
parent='conversations/' + conversation_id)
# Send the message
bm_client.BusinessmessagesV1.ConversationsMessagesService(
client=client).Create(request=create_request)
| google-business-communications/bm-snippets-python | send-message-suggested-action-dial.py | Python | apache-2.0 | 3,227 |
<?php
/**
* Skeleton subclass for representing a row from the 'data2010' table.
*
*
*
* You should add additional methods to this class to meet the
* application requirements. This class will only be generated as
* long as it does not already exist in the output directory.
*
* @package propel.generator.fbapp
*/
class Data2010 extends BaseData2010
{
}
| royrusso/fishbase | classes/fbapp/Data2010.php | PHP | apache-2.0 | 370 |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.11
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2015.08.19 at 01:05:06 PM PDT
//
package com.google.api.ads.adwords.lib.jaxb.v201509;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for SortOrder.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="SortOrder">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="ASCENDING"/>
* <enumeration value="DESCENDING"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "SortOrder")
@XmlEnum
public enum SortOrder {
ASCENDING,
DESCENDING;
public String value() {
return name();
}
public static SortOrder fromValue(String v) {
return valueOf(v);
}
}
| gawkermedia/googleads-java-lib | modules/ads_lib/src/main/java/com/google/api/ads/adwords/lib/jaxb/v201509/SortOrder.java | Java | apache-2.0 | 1,139 |
package ru.job4j;
import org.junit.Test;
import java.util.*;
/**
* ะะปะฐัั ะดะปั ัะตััะธัะพะฒะฐะฝะธั.
* @author agavrikov
* @since 13.07.2017
* @version 1
*/
public class TestTimeCollectionTest {
/**
* ะขะตััะธัะพะฒะฐะฝะธะต ะผะตัะพะดะฐ ะดะพะฑะฐะฒะปะตะฝะธั.
*/
@Test
public void add() {
TestTimeCollection methods = new TestTimeCollection();
List<String> linkedList = new LinkedList<String>();
long timeStart = new Date().getTime();
long timeEnd = methods.add(linkedList, 1000000);
System.out.println(timeEnd - timeStart);
List<String> arrayList = new ArrayList<String>();
timeStart = new Date().getTime();
timeEnd = methods.add(arrayList, 1000000);
System.out.println(timeEnd - timeStart);
Set<String> treeSet = new TreeSet<String>();
timeStart = new Date().getTime();
timeEnd = methods.add(treeSet, 1000000);
System.out.println(timeEnd - timeStart);
}
/**
* ะขะตััะธัะพะฒะฐะฝะธะต ะผะตัะพะดะฐ ัะดะฐะปะตะฝะธั.
*/
@Test
public void delete() {
TestTimeCollection methods = new TestTimeCollection();
List<String> linkedList = new LinkedList<String>();
methods.add(linkedList, 100000);
long timeStart = new Date().getTime();
long timeEnd = methods.delete(linkedList, 10000);
System.out.println(timeEnd - timeStart);
List<String> arrayList = new ArrayList<String>();
methods.add(arrayList, 100000);
timeStart = new Date().getTime();
timeEnd = methods.delete(arrayList, 10000);
System.out.println(timeEnd - timeStart);
Set<String> treeSet = new TreeSet<String>();
methods.add(treeSet, 100000);
timeStart = new Date().getTime();
timeEnd = methods.delete(treeSet, 10000);
System.out.println(timeEnd - timeStart);
}
} | AntonGavr92/agavrikov | chapter_003/src/test/java/ru/job4j/TestTimeCollectionTest.java | Java | apache-2.0 | 1,926 |
package tinymonkeys.vue;
import java.awt.Color;
import java.awt.Graphics;
import javax.swing.JPanel;
/**
* Classe du panneau de la carte.
*
* @version 1.0
* @author Camille Constant
*
*/
public class VueCarte extends JPanel {
/**
* UID auto-gรฉnรฉrรฉ.
*/
private static final long serialVersionUID = 4884966649331011259L;
/**
* Rapport entre la taille de la carte et la taille de l'รฉcran.
*/
private static final double RAPPORT_ECRAN = 0.75;
/**
* Constante permettant de placer un objet ร la moitiรฉ de l'รฉcran.
*/
private static final int DIVISEUR_MILIEU = 2;
/**
* Constante permettant de placer un objet au quart de l'รฉcran.
*/
private static final int DIVISEUR_QUART = 4;
/**
* Constante indiquant la couleur des cases reprรฉsentant la mer.
*/
private static final Color OCEAN = new Color(0, 120, 220);
/**
* Taille de la case en nombre de pixels.
*/
private int tailleCase;
/**
* La coordonnee en abscisse du coin supรฉrieur gauche de la grille.
*/
private int xGrille;
/**
* La coordonnee en ordonnรฉe du coin supรฉrieur gauche de la grille.
*/
private int yGrille;
/**
* Largeur de l'ecran en nombre de pixels.
*/
private final int largeurEcran;
/**
* Hauteur de l'ecran en nombre de pixels.
*/
private final int hauteurEcran;
/**
* Largeur de la grille en nombre de cases.
*/
private int largeurGrille;
/**
* Hauteur de la grille en nombre de cases.
*/
private int hauteurGrille;
/**
* La carte.
*/
private int[][] carte;
/**
* Constructeur de la vue de la carte.
*
* @param largeurEcran largeur de l'ecran en nombre de pixels.
* @param hauteurEcran hauteur de l'ecran en nombre de pixels.
* @param carte la carte a dessiner
*/
public VueCarte(int largeurEcran, int hauteurEcran, int[][] carte) {
super();
this.largeurEcran = largeurEcran;
this.hauteurEcran = hauteurEcran;
this.largeurGrille = carte.length;
this.hauteurGrille = carte[0].length;
this.copieCarte(carte);
this.placementGrille();
this.setBounds(this.xGrille, this.yGrille, this.largeurGrille
* this.tailleCase + 1, this.hauteurGrille * this.tailleCase + 1);
this.setOpaque(false);
}
/**
* Dessine la carte de l'ile avec la grille.
*
* @param g le graphique dans lequel dessiner.
*/
public final void paintComponent(Graphics g) {
super.paintComponent(g);
this.dessineIle(g);
this.dessineGrille(g);
}
/**
* Place la carte au centre de l'รฉcran.
*/
private void placementGrille() {
final int diviseurLargeur;
final int diviseurHauteur;
final int largeurCase = (int) ((this.largeurEcran * RAPPORT_ECRAN) / this.largeurGrille);
final int hauteurCase = (int) ((this.hauteurEcran * RAPPORT_ECRAN) / this.hauteurGrille);
if (largeurCase < hauteurCase) {
this.tailleCase = largeurCase;
diviseurLargeur = DIVISEUR_QUART;
diviseurHauteur = DIVISEUR_MILIEU;
} else {
this.tailleCase = hauteurCase;
diviseurLargeur = DIVISEUR_MILIEU;
diviseurHauteur = DIVISEUR_QUART;
}
this.xGrille = (int) ((this.largeurEcran - (this.tailleCase * this.largeurGrille)) / diviseurLargeur);
this.yGrille = (int) ((this.hauteurEcran - (this.tailleCase * this.hauteurGrille)) / diviseurHauteur);
}
/**
* Dessine la grille.
*
* @param g le graphique dans lequel dessiner.
*/
public void dessineGrille(Graphics g) {
// La grille apparait en noir.
g.setColor(Color.BLACK);
// colonnes
for (int i = 0; i <= (this.tailleCase * this.largeurGrille); i += this.tailleCase) {
g.drawLine(i, 0, i, this.tailleCase * this.hauteurGrille);
}
// lignes
for (int j = 0; j <= this.tailleCase * this.hauteurGrille; j += this.tailleCase) {
g.drawLine(0, j, this.tailleCase * this.largeurGrille, j);
}
}
/**
* Dessine l'ile.
*
* @param g le graphique dans lequel dessiner.
*/
public final void dessineIle(Graphics g) {
int i = -1;
while (++i < this.largeurGrille) {
int j = -1;
while (++j < this.hauteurGrille) {
// Si la case est de type mer.
if (this.carte[i][j] == 0) {
g.setColor(OCEAN);
g.fillRect(i * this.tailleCase, j * this.tailleCase,
this.tailleCase, this.tailleCase);
}
// Coloration inutile pour les cases terre.
}
}
}
/**
* Modifie la carte de l'ile.
*
* @param carte la nouvelle carte.
*/
public final void setVueCarte(int[][] carte) {
this.largeurGrille = carte.length;
this.hauteurGrille = carte[0].length;
this.copieCarte(carte);
this.placementGrille();
this.setBounds(this.xGrille, this.yGrille, this.largeurGrille
* this.tailleCase + 1, this.hauteurGrille * this.tailleCase + 1);
this.setOpaque(false);
}
/**
* Accesseur en lecture de la taille d'une case.
*
* @return la taille d'une case.
*/
public final int getTailleCase() {
return this.tailleCase;
}
/**
* Accesseur en lecture de l'abscisse de la grille.
*
* @return l'abscisse de la grille.
*/
public final int getXGrille() {
return this.xGrille;
}
/**
* Accesseur en lecture de l'ordonnee de la grille.
*
* @return l'ordonnee de la grille.
*/
public final int getYGrille() {
return this.yGrille;
}
/**
* Recopie de la carte dans l'attribut carte.
*
* @param carte la carte a copier.
*/
private void copieCarte(int[][] carte) {
this.carte = new int[carte.length][carte[0].length];
int i = -1;
while (++i < carte.length) {
int j = -1;
while(++j < carte[0].length) {
this.carte[i][j] = carte[i][j];
}
}
}
}
| afraisse/TinyMonkey | src/tinymonkeys/vue/VueCarte.java | Java | apache-2.0 | 5,524 |
package org.sagebionetworks.auth.services;
import org.sagebionetworks.repo.manager.AuthenticationManager;
import org.sagebionetworks.repo.manager.MessageManager;
import org.sagebionetworks.repo.manager.UserManager;
import org.sagebionetworks.repo.manager.authentication.PersonalAccessTokenManager;
import org.sagebionetworks.repo.manager.oauth.AliasAndType;
import org.sagebionetworks.repo.manager.oauth.OAuthManager;
import org.sagebionetworks.repo.manager.oauth.OpenIDConnectManager;
import org.sagebionetworks.repo.model.AuthorizationUtils;
import org.sagebionetworks.repo.model.UnauthorizedException;
import org.sagebionetworks.repo.model.UserInfo;
import org.sagebionetworks.repo.model.auth.AccessToken;
import org.sagebionetworks.repo.model.auth.AccessTokenGenerationRequest;
import org.sagebionetworks.repo.model.auth.AccessTokenGenerationResponse;
import org.sagebionetworks.repo.model.auth.AccessTokenRecord;
import org.sagebionetworks.repo.model.auth.AccessTokenRecordList;
import org.sagebionetworks.repo.model.auth.AuthenticatedOn;
import org.sagebionetworks.repo.model.auth.ChangePasswordInterface;
import org.sagebionetworks.repo.model.auth.LoginRequest;
import org.sagebionetworks.repo.model.auth.LoginResponse;
import org.sagebionetworks.repo.model.auth.NewUser;
import org.sagebionetworks.repo.model.auth.PasswordResetSignedToken;
import org.sagebionetworks.repo.model.oauth.OAuthAccountCreationRequest;
import org.sagebionetworks.repo.model.oauth.OAuthProvider;
import org.sagebionetworks.repo.model.oauth.OAuthUrlRequest;
import org.sagebionetworks.repo.model.oauth.OAuthUrlResponse;
import org.sagebionetworks.repo.model.oauth.OAuthValidationRequest;
import org.sagebionetworks.repo.model.oauth.ProvidedUserInfo;
import org.sagebionetworks.repo.model.principal.AliasType;
import org.sagebionetworks.repo.model.principal.PrincipalAlias;
import org.sagebionetworks.repo.transactions.WriteTransaction;
import org.sagebionetworks.repo.web.NotFoundException;
import org.sagebionetworks.util.ValidateArgument;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@Service
public class AuthenticationServiceImpl implements AuthenticationService {
@Autowired
private UserManager userManager;
@Autowired
private AuthenticationManager authManager;
@Autowired
private OAuthManager oauthManager;
@Autowired
private OpenIDConnectManager oidcManager;
@Autowired
private MessageManager messageManager;
@Autowired
private PersonalAccessTokenManager personalAccessTokenManager;
@WriteTransaction
@Override
public void changePassword(ChangePasswordInterface request) throws NotFoundException {
final long userId = authManager.changePassword(request);
messageManager.sendPasswordChangeConfirmationEmail(userId);
}
@Override
@WriteTransaction
public void signTermsOfUse(AccessToken accessToken) throws NotFoundException {
ValidateArgument.required(accessToken, "Access token");
ValidateArgument.required(accessToken.getAccessToken(), "Access token contents");
Long principalId = Long.parseLong(oidcManager.validateAccessToken(accessToken.getAccessToken()));
// Save the state of acceptance
authManager.setTermsOfUseAcceptance(principalId, true);
}
@Override
public String getSecretKey(Long principalId) throws NotFoundException {
return authManager.getSecretKey(principalId);
}
@Override
@WriteTransaction
public void deleteSecretKey(Long principalId) throws NotFoundException {
authManager.changeSecretKey(principalId);
}
@Override
public boolean hasUserAcceptedTermsOfUse(Long userId) throws NotFoundException {
return authManager.hasUserAcceptedTermsOfUse(userId);
}
@Override
public void sendPasswordResetEmail(String passwordResetUrlPrefix, String usernameOrEmail) {
try {
PrincipalAlias principalAlias = userManager.lookupUserByUsernameOrEmail(usernameOrEmail);
PasswordResetSignedToken passwordRestToken = authManager.createPasswordResetToken(principalAlias.getPrincipalId());
messageManager.sendNewPasswordResetEmail(passwordResetUrlPrefix, passwordRestToken, principalAlias);
} catch (NotFoundException e) {
// should not indicate that a email/user could not be found
}
}
@Override
public OAuthUrlResponse getOAuthAuthenticationUrl(OAuthUrlRequest request) {
String url = oauthManager.getAuthorizationUrl(request.getProvider(), request.getRedirectUrl(), request.getState());
OAuthUrlResponse response = new OAuthUrlResponse();
response.setAuthorizationUrl(url);
return response;
}
@Override
public LoginResponse validateOAuthAuthenticationCodeAndLogin(
OAuthValidationRequest request, String tokenIssuer) throws NotFoundException {
// Use the authentication code to lookup the user's information.
ProvidedUserInfo providedInfo = oauthManager.validateUserWithProvider(
request.getProvider(), request.getAuthenticationCode(), request.getRedirectUrl());
if(providedInfo.getUsersVerifiedEmail() == null){
throw new IllegalArgumentException("OAuthProvider: "+request.getProvider().name()+" did not provide a user email");
}
// This is the ID of the user within the provider's system.
PrincipalAlias emailAlias = userManager.lookupUserByUsernameOrEmail(providedInfo.getUsersVerifiedEmail());
// Return the user's access token
return authManager.loginWithNoPasswordCheck(emailAlias.getPrincipalId(), tokenIssuer);
}
@WriteTransaction
public LoginResponse createAccountViaOauth(OAuthAccountCreationRequest request, String tokenIssuer) {
// Use the authentication code to lookup the user's information.
ProvidedUserInfo providedInfo = oauthManager.validateUserWithProvider(
request.getProvider(), request.getAuthenticationCode(), request.getRedirectUrl());
if(providedInfo.getUsersVerifiedEmail() == null){
throw new IllegalArgumentException("OAuthProvider: "+request.getProvider().name()+" did not provide a user email");
}
// create account with the returned user info.
NewUser newUser = new NewUser();
newUser.setEmail(providedInfo.getUsersVerifiedEmail());
newUser.setFirstName(providedInfo.getFirstName());
newUser.setLastName(providedInfo.getLastName());
newUser.setUserName(request.getUserName());
long newPrincipalId = userManager.createUser(newUser);
return authManager.loginWithNoPasswordCheck(newPrincipalId, tokenIssuer);
}
@Override
public PrincipalAlias bindExternalID(Long userId, OAuthValidationRequest validationRequest) {
if (AuthorizationUtils.isUserAnonymous(userId)) throw new UnauthorizedException("User ID is required.");
AliasAndType providersUserId = oauthManager.retrieveProvidersId(
validationRequest.getProvider(),
validationRequest.getAuthenticationCode(),
validationRequest.getRedirectUrl());
// now bind the ID to the user account
return userManager.bindAlias(providersUserId.getAlias(), providersUserId.getType(), userId);
}
@Override
public void unbindExternalID(Long userId, OAuthProvider provider, String aliasName) {
if (AuthorizationUtils.isUserAnonymous(userId)) throw new UnauthorizedException("User ID is required.");
AliasType aliasType = oauthManager.getAliasTypeForProvider(provider);
userManager.unbindAlias(aliasName, aliasType, userId);
}
@Override
public LoginResponse login(LoginRequest request, String tokenIssuer) {
return authManager.login(request, tokenIssuer);
}
@Override
public AuthenticatedOn getAuthenticatedOn(long userId) {
UserInfo userInfo = userManager.getUserInfo(userId);
return authManager.getAuthenticatedOn(userInfo);
}
@Override
public PrincipalAlias lookupUserForAuthentication(String alias) {
return userManager.lookupUserByUsernameOrEmail(alias);
}
@Override
public AccessTokenGenerationResponse createPersonalAccessToken(Long userId, String accessToken, AccessTokenGenerationRequest request, String oauthEndpoint) {
UserInfo userInfo = userManager.getUserInfo(userId);
return personalAccessTokenManager.issueToken(userInfo, accessToken, request, oauthEndpoint);
}
@Override
public AccessTokenRecordList getPersonalAccessTokenRecords(Long userId, String nextPageToken) {
UserInfo userInfo = userManager.getUserInfo(userId);
return personalAccessTokenManager.getTokenRecords(userInfo, nextPageToken);
}
@Override
public AccessTokenRecord getPersonalAccessTokenRecord(Long userId, Long tokenId) {
UserInfo userInfo = userManager.getUserInfo(userId);
return personalAccessTokenManager.getTokenRecord(userInfo, tokenId.toString());
}
@Override
public void revokePersonalAccessToken(Long userId, Long tokenId) {
UserInfo userInfo = userManager.getUserInfo(userId);
personalAccessTokenManager.revokeToken(userInfo, tokenId.toString());
}
}
| Sage-Bionetworks/Synapse-Repository-Services | services/repository/src/main/java/org/sagebionetworks/auth/services/AuthenticationServiceImpl.java | Java | apache-2.0 | 8,736 |
<?php
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
class CreateTypeBuysTable extends Migration {
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('type_buys', function(Blueprint $table)
{
$table->increments('id');
$table->string('description');
$table->timestamps();
$table->softDeletes();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::drop('type_buys');
}
}
| sonico999/deliveryguy | app/database/migrations/2014_11_05_213148_create_type_buys_table.php | PHP | apache-2.0 | 548 |
# -*- coding: utf-8 -*-
#!/usr/bin/env python
#
# Copyright 2014 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from world import world
from bigml.api import HTTP_OK
def i_get_the_project(step, resource):
resource = world.api.get_project(resource)
world.status = resource['code']
assert world.status == HTTP_OK
world.project = resource['object']
| ShaguptaS/python | bigml/tests/read_project_steps.py | Python | apache-2.0 | 869 |
# automate/server/user/views.py
#################
#### imports ####
#################
#from flask import render_template, Blueprint, url_for, \
# redirect, flash, request
#from flask_login import login_user, logout_user, login_required
#from automate.server import bcrypt, db
#from automate.server import db
#from automate.server.models import User
#from automate.server.user.forms import LoginForm, RegisterForm
################
#### config ####
################
#user_blueprint = Blueprint('user', __name__,)
################
#### routes ####
################
#@user_blueprint.route('/register', methods=['GET', 'POST'])
#def register():
# form = RegisterForm(request.form)
# if form.validate_on_submit():
# user = User(
# email=form.email.data,
# password=form.password.data
# )
# db.session.add(user)
# db.session.commit()
#
# login_user(user)
#
# flash('Thank you for registering.', 'success')
# return redirect(url_for("user.members"))
#
# return render_template('user/register.html', form=form)
#
#
#@user_blueprint.route('/login', methods=['GET', 'POST'])
#def login():
# form = LoginForm(request.form)
# if form.validate_on_submit():
# user = User.query.filter_by(email=form.email.data).first()
# if user:
# #if user and bcrypt.check_password_hash(
# # user.password, request.form['password']):
# # login_user(user)
# flash('You are logged in. Welcome!', 'success')
# return redirect(url_for('user.members'))
# else:
# flash('Invalid email and/or password.', 'danger')
# return render_template('user/login.html', form=form)
# return render_template('user/login.html', title='Please Login', form=form)
#
#
#@user_blueprint.route('/logout')
#@login_required
#def logout():
# logout_user()
# flash('You were logged out. Bye!', 'success')
# return redirect(url_for('main.home'))
#
#
#@user_blueprint.route('/members')
#@login_required
#def members():
# return render_template('user/members.html')
# | JeromeErasmus/browserstack_automate | automate/server/user/views.py | Python | apache-2.0 | 2,112 |
<?php
/**
* PHP Version 5.
*
* @category Amazon
*
* @copyright Copyright 2009 Amazon Technologies, Inc.
*
* @see http://aws.amazon.com
*
* @license http://aws.amazon.com/apache2.0 Apache License, Version 2.0
*
* @version 2009-01-01
*/
/*******************************************************************************
* Marketplace Web Service PHP5 Library
* Generated: Thu May 07 13:07:36 PDT 2009
*
*/
/**
* @see MarketplaceWebService_Model
*/
require_once 'MarketplaceWebService/Model.php';
/**
* MarketplaceWebService_Model_ErrorResponse.
*
* Properties:
* <ul>
*
* <li>Error: MarketplaceWebService_Model_Error</li>
* <li>RequestId: string</li>
*
* </ul>
*/
class MarketplaceWebService_Model_ErrorResponse extends MarketplaceWebService_Model
{
/**
* Construct new MarketplaceWebService_Model_ErrorResponse.
*
* @param mixed $data DOMElement or Associative Array to construct from.
*
* Valid properties:
* <ul>
*
* <li>Error: MarketplaceWebService_Model_Error</li>
* <li>RequestId: string</li>
*
* </ul>
*/
public function __construct($data = null)
{
$this->fields = array(
'Error' => array('FieldValue' => array(), 'FieldType' => 'MarketplaceWebService_Model_Error'),
'RequestId' => array('FieldValue' => null, 'FieldType' => 'string'),
);
parent::__construct($data);
}
/**
* Construct MarketplaceWebService_Model_ErrorResponse from XML string.
*
* @param string $xml XML string to construct from
*
* @return MarketplaceWebService_Model_ErrorResponse
*/
public static function fromXML($xml)
{
$dom = new DOMDocument();
$dom->loadXML($xml);
$xpath = new DOMXPath($dom);
$xpath->registerNamespace('a', 'http://mws.amazonaws.com/doc/2009-01-01/');
$response = $xpath->query('//a:ErrorResponse');
if ($response->length == 1) {
return new self(($response->item(0)));
} else {
throw new Exception('Unable to construct MarketplaceWebService_Model_ErrorResponse from provided XML.
Make sure that ErrorResponse is a root element');
}
}
/**
* Gets the value of the Error.
*
* @return array of Error Error
*/
public function getError()
{
return $this->fields['Error']['FieldValue'];
}
/**
* Sets the value of the Error.
*
* @param mixed Error or an array of Error Error
*
* @return $this instance
*/
public function setError($error)
{
if (!$this->_isNumericArray($error)) {
$error = array($error);
}
$this->fields['Error']['FieldValue'] = $error;
return $this;
}
/**
* Sets single or multiple values of Error list via variable number of arguments.
* For example, to set the list with two elements, simply pass two values as arguments to this function
* <code>withError($error1, $error2)</code>.
*
* @param Error $errorArgs one or more Error
*
* @return MarketplaceWebService_Model_ErrorResponse instance
*/
public function withError($errorArgs)
{
foreach (func_get_args() as $error) {
$this->fields['Error']['FieldValue'][] = $error;
}
return $this;
}
/**
* Checks if Error list is non-empty.
*
* @return bool true if Error list is non-empty
*/
public function isSetError()
{
return count($this->fields['Error']['FieldValue']) > 0;
}
/**
* Gets the value of the RequestId property.
*
* @return string RequestId
*/
public function getRequestId()
{
return $this->fields['RequestId']['FieldValue'];
}
/**
* Sets the value of the RequestId property.
*
* @param string RequestId
*
* @return $this instance
*/
public function setRequestId($value)
{
$this->fields['RequestId']['FieldValue'] = $value;
return $this;
}
/**
* Sets the value of the RequestId and returns this instance.
*
* @param string $value RequestId
*
* @return MarketplaceWebService_Model_ErrorResponse instance
*/
public function withRequestId($value)
{
$this->setRequestId($value);
return $this;
}
/**
* Checks if RequestId is set.
*
* @return bool true if RequestId is set
*/
public function isSetRequestId()
{
return !is_null($this->fields['RequestId']['FieldValue']);
}
/**
* XML Representation for this object.
*
* @return string XML for this object
*/
public function toXML()
{
$xml = '';
$xml .= '<ErrorResponse xmlns="http://mws.amazonaws.com/doc/2009-01-01/">';
$xml .= $this->_toXMLFragment();
$xml .= '</ErrorResponse>';
return $xml;
}
private $_responseHeaderMetadata = null;
public function getResponseHeaderMetadata()
{
return $this->_responseHeaderMetadata;
}
public function setResponseHeaderMetadata($responseHeaderMetadata)
{
return $this->_responseHeaderMetadata = $responseHeaderMetadata;
}
}
| coopTilleuls/amazon-mws | src/MarketplaceWebService/Model/ErrorResponse.php | PHP | apache-2.0 | 5,341 |
/*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {ApiRequestBuilder, ApiVersion} from "helpers/api_request_builder";
import SparkRoutes from "helpers/spark_routes";
export interface BulkUpdateSystemAdminJSON {
operations: {
users: {
add?: string[],
remove?: string[]
}
};
}
export class AdminsCRUD {
static API_VERSION_HEADER = ApiVersion.v2;
static all() {
return ApiRequestBuilder.GET(SparkRoutes.apisystemAdminsPath(), this.API_VERSION_HEADER);
}
static bulkUpdate(bulkUpdateSystemAdminJson: BulkUpdateSystemAdminJSON) {
return ApiRequestBuilder.PATCH(SparkRoutes.apisystemAdminsPath(), this.API_VERSION_HEADER, {payload: bulkUpdateSystemAdminJson});
}
}
| jyotisingh/gocd | server/webapp/WEB-INF/rails/webpack/models/admins/admin_crud.ts | TypeScript | apache-2.0 | 1,266 |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.iotevents.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iotevents-2018-07-27/UntagResource" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UntagResourceResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UntagResourceResult == false)
return false;
UntagResourceResult other = (UntagResourceResult) obj;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
return hashCode;
}
@Override
public UntagResourceResult clone() {
try {
return (UntagResourceResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| jentfoo/aws-sdk-java | aws-java-sdk-iotevents/src/main/java/com/amazonaws/services/iotevents/model/UntagResourceResult.java | Java | apache-2.0 | 2,326 |
๏ปฟ// Copyright (c) kuicker.org. All rights reserved.
// Modified By YYYY-MM-DD
// kevinjong 2016-02-11 - Creation
using System.IO;
using System.Linq;
using Xunit;
namespace IsTo.Tests
{
public class TestHelper
{
internal static void StreamComparison(
Stream stream1,
Stream stream2)
{
var bufferSize = 2048;
var buffer1 = new byte[bufferSize];
var buffer2 = new byte[bufferSize];
while(true) {
var count1 = stream1.Read(buffer1, 0, bufferSize);
var count2 = stream2.Read(buffer2, 0, bufferSize);
Assert.True(count1 == count2);
if(count1 == 0) { return; }
Assert.True(
buffer1
.Take(count1)
.SequenceEqual(buffer2.Take(count2))
);
}
}
}
}
| Kuick/IsTo | IsTo.Tests/Misc/TestHelper.cs | C# | apache-2.0 | 765 |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/model_service.proto
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Response message of [ModelService.ExportModel][google.cloud.aiplatform.v1.ModelService.ExportModel] operation.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ExportModelResponse}
*/
public final class ExportModelResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ExportModelResponse)
ExportModelResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ExportModelResponse.newBuilder() to construct.
private ExportModelResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ExportModelResponse() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ExportModelResponse();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private ExportModelResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.ModelServiceProto
.internal_static_google_cloud_aiplatform_v1_ExportModelResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.ModelServiceProto
.internal_static_google_cloud_aiplatform_v1_ExportModelResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ExportModelResponse.class,
com.google.cloud.aiplatform.v1.ExportModelResponse.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.ExportModelResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.ExportModelResponse other =
(com.google.cloud.aiplatform.v1.ExportModelResponse) obj;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1.ExportModelResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message of [ModelService.ExportModel][google.cloud.aiplatform.v1.ModelService.ExportModel] operation.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ExportModelResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ExportModelResponse)
com.google.cloud.aiplatform.v1.ExportModelResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.ModelServiceProto
.internal_static_google_cloud_aiplatform_v1_ExportModelResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.ModelServiceProto
.internal_static_google_cloud_aiplatform_v1_ExportModelResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ExportModelResponse.class,
com.google.cloud.aiplatform.v1.ExportModelResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.ExportModelResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.ModelServiceProto
.internal_static_google_cloud_aiplatform_v1_ExportModelResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ExportModelResponse getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.ExportModelResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ExportModelResponse build() {
com.google.cloud.aiplatform.v1.ExportModelResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ExportModelResponse buildPartial() {
com.google.cloud.aiplatform.v1.ExportModelResponse result =
new com.google.cloud.aiplatform.v1.ExportModelResponse(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.ExportModelResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1.ExportModelResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.ExportModelResponse other) {
if (other == com.google.cloud.aiplatform.v1.ExportModelResponse.getDefaultInstance())
return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.aiplatform.v1.ExportModelResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.aiplatform.v1.ExportModelResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ExportModelResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ExportModelResponse)
private static final com.google.cloud.aiplatform.v1.ExportModelResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ExportModelResponse();
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ExportModelResponse> PARSER =
new com.google.protobuf.AbstractParser<ExportModelResponse>() {
@java.lang.Override
public ExportModelResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ExportModelResponse(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ExportModelResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ExportModelResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ExportModelResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| googleapis/java-aiplatform | proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ExportModelResponse.java | Java | apache-2.0 | 16,200 |
/*!
* Module requirements
*/
var NeopreneError = require('../error')
/**
* Document Validation Error
*
* @api private
* @param {Document} instance
* @inherits NeopreneError
*/
function ValidationError (instance) {
NeopreneError.call(this, "Validation failed");
Error.captureStackTrace(this, arguments.callee);
this.name = 'ValidationError';
this.errors = instance.errors = {};
};
/**
* Console.log helper
* @api private
*/
ValidationError.prototype.toString = function () {
return this.name + ': ' + Object.keys(this.errors).map(function (key) {
return String(this.errors[key]);
}, this).join(', ');
};
/*!
* Inherits from NeopreneError.
*/
ValidationError.prototype.__proto__ = NeopreneError.prototype;
/*!
* Module exports
*/
module.exports = exports = ValidationError;
| rorymadden/neoprene | lib/errors/validation.js | JavaScript | apache-2.0 | 813 |
/*
* Copyright 2005-2010 Ignis Software Tools Ltd. All rights reserved.
*/
package com.aqua.filetransfer.ftp;
import java.io.File;
import java.io.FileInputStream;
import java.util.Properties;
import jsystem.framework.JSystemProperties;
import jsystem.framework.system.SystemObjectImpl;
import jsystem.utils.FileUtils;
import jsystem.utils.ReflectionUtils;
import jsystem.utils.StringUtils;
import systemobject.terminal.Cli;
import systemobject.terminal.Prompt;
import com.aqua.sysobj.conn.CliConnection;
import com.aqua.sysobj.conn.CliConnectionImpl;
import com.aqua.sysobj.conn.CliFactory;
/**
* <b>SystemObject for running FTP client on a remote machine.</b><br>
* The main purpose of this system object is to enable file transfer
* without assuming an FTP server is running on the remote machine.<br>
* In a typical usage of this SystemObject, an embedded FTP server
* will be activated on the local machine.
* A {@link Cli} session is opened with the remote client the session
* activates the FTP client on the remote machine. <br>
*
* <u>Using FTPRemoteClient</u><br>
* SystemObject can be instantiated from sut file or directly in the code.
* Once initiated copy operations can be used.
* The copy operations identifies whether a connection is already open if not
* a connection is opened.<br>
* In many cases the remote server (telnet/ssh) limits number of connections;
* use the {@link #closeFTPSession()} to close connection when needed.<br>
*
* Passivation: since TAS 4.9 the sys object support passivation. Please note that passivation
* is only supported when the remote client is a linux machine.
* In case the built-in prompts are not enough to open an FTP session
* with the FTP server you are using the system object also supports adding additional FTP prompts.
* To do that write a property file called {@link #FILE_TRANSFER_PROPERTIES_FILE_NAME}
* (in run directory) and add to it the following properties:
* {@link #FTP_LOGIN_PROMPTS} - comma seperated prompts which identifies that
* the FTP server waits for the user to enter the login user name
*
* {@link #FTP_PASSWORD_PROMPTS} - comma seperated prompts which identifies that
* the FTP server waits for the user to enter the password
*
* {@link #FTP_PROMPTS} - comma seperated prompts which identifies that
* the FTP server is waiting for an ftp command
*
* Since TAS 4.9 cli connectivity parameters to can be set using CliConnection.
* This can be done either by passing a CliConnection to the FtpRemoteClient constructor
* or setting the <code>cliConnection</code> member through the SUT file.
* When connectivity parameters are set using a CliConnection other connectivity
* parameters are ignored (host,operatingSystem,protocol,port,user,password).
*
* FTP Server address:
* -------------------
* FTP Server address is fetched as following:
* If the user gave value to the member {@link #ftpServerHostName} through the SUT file
* or by activating it's setter this will be the server to which the remote ftp client will
* try to connect.
* Next, when connecting, the system object will try to fetch the property {@value #LOCAL_HOST_ADDRESS_PROPERTY}
* from the jsystem.properties file, if the property was set it will use it as server address
* otherwise, the system object uses java API to get local machine host name and uses it as server address.
*/
public class FTPRemoteClient extends SystemObjectImpl {
public static final String FILE_TRANSFER_PROPERTIES_FILE_NAME = "filetransfer.properties";
public static final String FTP_PROMPTS = "ftp.prompts";
public static final String FTP_LOGIN_PROMPTS = "ftp.login.prompts";
public static final String FTP_PASSWORD_PROMPTS = "ftp.password.prompts";
public static final String LOCAL_HOST_ADDRESS_PROPERTY = "local.host.external.name";
public CliConnection cliConnection;
private Cli cli;
private String host;
private String operatingSystem = CliFactory.OPERATING_SYSTEM_WINDOWS;
private String protocol = "telnet";
private int port = 23;
private String user;
private String password;
private String ftpServerHostName;
private String ftpUserName="aqua";
private String ftpPassword="aqua";
private boolean ascii ;
private Prompt[] ftpGeneralPrompts;
private Prompt[] ftpLoginPrompts;
private Prompt[] ftpPasswordPrompts;
private java.net.InetAddress localMachine;
private boolean promptOn = true;
/**
*/
public FTPRemoteClient(CliConnection cliConn,String ftpServerHostName) throws Exception{
cliConnection = cliConn;
setFtpServerHostName(ftpServerHostName);
}
/**
* Constructs a FTPRemoteClient for working on local machine as the remote machine.<br>
* Used for testing purposes.
*/
public FTPRemoteClient() throws Exception{
localMachine = java.net.InetAddress.getLocalHost();
setHost(localMachine.getHostName());
}
/**
* Constructs a FTPRemoteClient were remote machine is this machine.
* The FTPRemoteClient assumes Aqua's embedded FTP server is running on
* this machine.
*/
public FTPRemoteClient(String user,String password) throws Exception {
this();
setUser(user);
setPassword(password);
}
/**
* Constructs a FTPRemoteClient were remote machine is <code>host</code>.
* The FTPRemoteClient assumes Aqua's embedded FTP server is running on
* this machine.
*/
public FTPRemoteClient(String host,String telnetUser,String telnetPassword,String ftpServerHostName) throws Exception{
this(telnetUser,telnetPassword);
setHost(host);
setFtpServerHostName(ftpServerHostName);
}
/**
* Initializes {@link FTPRemoteClient} members and verifies that
* a telnet connection can be opened to the remote client and
* that the remote client can open a FTP connection to the server.<br>
* All connections are closed when initialization is done.
* @see SystemObjectImpl#init()
*/
public void init() throws Exception {
super.init();
initPrompts();
}
/**
* Closes connection to remote machine.
*/
public void closeFTPSession(){
closeFtp();
closeCli();
}
/**
* Copies a file from FTP server machine(in most cases it will be the local machine)
* to the remote client.<br>
* Source file path should be relative to FTP user home directory and not absolute
* file path.
* Destination can be either absolute destination path or relative to client's
* user directory.<br>
*/
public void copyFileFromLocalMachineToRemoteClient(String source, String destination) throws Exception {
StringBuffer stringbuffer = new StringBuffer("get ");
destination = adjustPath(destination);
stringbuffer.append(source);
stringbuffer.append(" ");
stringbuffer.append(destination);
copyFileViaFTP(stringbuffer.toString());
}
/**
* Copies all files from FTP server machine(in most cases it will be the local machine)
* to the remote client.<br>
*
* @param filesPath - String Array (String...) of full file path.<br>
* @throws Exception
*/
public void copyAllFilesFromLocalMachineToLocalRemote(String... filesPath) throws Exception{
copyAllFilesViaFTP("mget ", filesPath);
}
/**
* Copies a file from the remote client to FTP server machine(in most cases it will be
* the local machine)
*
* Source file path can be either absolute destination path or relative to client's
* user directory.
* Destination should be relative to FTP user home directory and not absolute
* file path.
*/
public void copyFileFromRemoteClientToLocalMachine(String source, String destination) throws Exception {
source = adjustPath(source);
StringBuffer stringbuffer = new StringBuffer("put ");
stringbuffer.append(source);
stringbuffer.append(" ");
stringbuffer.append(destination);
copyFileViaFTP(stringbuffer.toString());
}
/**
* Copies all files from remote client to FTP server machine(in most cases it will be
* the local machine).<br>
*
* @param filesPath - String Array (String...) of full file path.<br>
* @throws Exception
*/
public void copyAllFilesFromRemoteMachineToLocalMachine(String... filesPath) throws Exception{
copyAllFilesViaFTP("mput ", filesPath);
}
private void copyFileViaFTP(String command) throws Exception {
openFTPSession();
setAsciiMode(isAscii());
setPromptMode(isPromptOn());
runCliCommand(command);
}
private void copyAllFilesViaFTP(String command, String... filesPath) throws Exception {
StringBuffer stringBuffer = new StringBuffer(command);
openFTPSession();
setAsciiMode(isAscii());
setPromptMode(isPromptOn());
for(String currentFilePath : filesPath){
String source = adjustPath(currentFilePath);
stringBuffer.append(source);
stringBuffer.append(" ");
}
runCliCommand(stringBuffer.toString());
}
private void runCliCommand(String command) throws Exception{
cli.command(command , 1000 *60 * 5,true,false,null,ftpGeneralPrompts);
if (cli.getResult().indexOf("226") < 0){
throw new Exception("Failed in files transfer");
}
}
/**
* Changes ftp session mode to passive
*/
public void passivate(boolean isPassive) throws Exception {
openFTPSession();
for (int i = 0; i < 2;i++){
cli.command("passive",1000*60,true,false,null,ftpGeneralPrompts);
String result = cli.getResult().toLowerCase();
boolean on = result.indexOf("on") >= 0;
boolean off = result.indexOf("off")>= 0;
boolean notSupported = result.indexOf("invalid")>= 0;
if (notSupported){
throw new Exception("Passivation not supported");
}
if ((isPassive && on) ||(!isPassive && off) ){
break;
}
}
}
/**
* Terminates FTPRemoteClient.
*/
public void close() {
closeFTPSession();
super.close();
}
/**
* Opens FTP session
*/
private void openFTPSession() throws Exception {
initCli();
ftpLogin();
}
/**
*/
private void initCli() throws Exception {
if (cli == null){
if (cliConnection != null){
initCliFromCliConnectionImpl();
return;
}
Prompt p = new Prompt();
p.setPrompt(">");
p.setCommandEnd(true);
cli =
CliFactory.createCli(getHost(),getOperatingSystem(), getProtocol(),getUser(),getPassword(),new Prompt[]{p});
}
}
private void initCliFromCliConnectionImpl() throws Exception{
if (!cliConnection.isConnected()){
cliConnection.connect();
}
cli = (Cli)ReflectionUtils.getField("cli", CliConnectionImpl.class).get(cliConnection);
}
/**
*/
private void closeFtp(){
try {
cli.command("bye", 1000 *2 ,true,false,null,new Prompt[]{new Prompt("bye.",true)});
if (cli.getResult().indexOf("221") < 0){
report.report("Did not find success code 221");
}
}catch (Exception e){
report.report("Could not find prompt after closing session. " + e.getMessage());
}
}
/**
*/
private void closeCli(){
if (cli != null){
try {
if (cliConnection != null){
closeCliConnectionImpl();
}
cli.close();
}catch (Exception e){
report.report("Failed closing telnet connection",e);
}
}
cli=null;
}
private void closeCliConnectionImpl() throws Exception{
if (cliConnection.isConnected()){
cliConnection.disconnect();
}
}
/**
* Starts FTP client and performs login.
*/
private void ftpLogin() throws Exception{
cli.command("");
String result = cli.getResult();
for (String ftpPrompt:promptsToStringArray(ftpGeneralPrompts)){
if (result.indexOf(ftpPrompt) >=0 ){
//we are already logged in
return;
}
}
String serverAddress = getFTPServerAddress();
cli.command("ftp " + serverAddress, 1000*60,true,false,null,ftpLoginPrompts);
if (cli.getResult().indexOf("220") < 0){
throw new Exception("Failed connecting to FTP server.("+serverAddress+"). Please verify that there is a ping between the remote client to the runner machine");
}
cli.command(getFtpUserName(),1000*60,true,false,null,ftpPasswordPrompts);
if (cli.getResult().indexOf("331") < 0){
throw new Exception("Failed in login process");
}
cli.command(getFtpPassword(),1000*60,true,false,null,ftpGeneralPrompts);
if (cli.getResult().indexOf("230") < 0){
throw new Exception("User not authorized to login");
}
}
/**
* Changes ftp session mode (ascii/binary)
*/
private void setAsciiMode(boolean isAscii) throws Exception {
String command = "binary";
if (isAscii){
command="ascii";
}
cli.command(command,1000*60,true,false,null,ftpGeneralPrompts);
if (cli.getResult().indexOf("200") < 0){
throw new Exception("Failed changing to binary mode");
}
}
/**
* Changes the FTP session mode ( on / off )
* @param promptOn
* @throws Exception
*/
private void setPromptMode(boolean promptOn) throws Exception{
String command = "prompt off";
if (promptOn){
command="prompt on";
}
cli.command(command,1000*60,true,false,null,ftpGeneralPrompts);
if (cli.getResult().indexOf("Interactive") < 0){
throw new Exception("Failed changing prompt mode");
}
}
public boolean isPromptOn() {
return promptOn;
}
public void setPromptOn(boolean promptOn) {
this.promptOn = promptOn;
}
/**
* Adjusts file path to operating system.
*/
private String adjustPath(String path) {
if (CliFactory.OPERATING_SYSTEM_WINDOWS.equals(getOperatingSystem())){
String toReturn = FileUtils.convertToWindowsPath(path);
if (!toReturn.startsWith("\"")){
toReturn = "\""+toReturn+"\"";
}
return toReturn;
}else {
return FileUtils.replaceSeparator(path);
}
}
/**
*
*/
private void initPrompts() throws Exception {
String[] defaultFTPPrompts = new String[]{"ftp>"};
String[] defaultLoginPrompts = new String[]{"):"};
String[] defaultPasswordPrompts = new String[]{"for "+getFtpUserName(),"Password:"};
if (!new File(FILE_TRANSFER_PROPERTIES_FILE_NAME).exists()){
ftpGeneralPrompts = stringArrayToPrompts(defaultFTPPrompts);
ftpLoginPrompts = stringArrayToPrompts(defaultLoginPrompts);
ftpPasswordPrompts = stringArrayToPrompts(defaultPasswordPrompts);
return;
}
Properties props = new Properties();
FileInputStream stream = new FileInputStream(FILE_TRANSFER_PROPERTIES_FILE_NAME);
try {
props.load(stream);
}finally{
try{stream.close();}catch(Exception e){};
}
String ftpPrompts = props.getProperty(FTP_PROMPTS);
String[] ftpPromptsAsStringArray = StringUtils.split(ftpPrompts, ";, ");
ftpPromptsAsStringArray = StringUtils.mergeStringArrays(new String[][]{ftpPromptsAsStringArray,defaultFTPPrompts});
ftpGeneralPrompts = stringArrayToPrompts(ftpPromptsAsStringArray);
String _ftpLoginPrompts = props.getProperty(FTP_LOGIN_PROMPTS);
String[] ftpLoginPromptsAsStringArray = StringUtils.split(_ftpLoginPrompts, ";, ");
ftpLoginPromptsAsStringArray = StringUtils.mergeStringArrays(new String[][]{ftpLoginPromptsAsStringArray,defaultLoginPrompts});
ftpLoginPrompts = stringArrayToPrompts(ftpLoginPromptsAsStringArray);
String _ftpPasswordPrompts = props.getProperty(FTP_PASSWORD_PROMPTS);
String[] ftpPasswordPromptsAsStringArray = StringUtils.split(_ftpPasswordPrompts, ";, ");
ftpPasswordPromptsAsStringArray = StringUtils.mergeStringArrays(new String[][]{ftpPasswordPromptsAsStringArray,defaultPasswordPrompts});
ftpPasswordPrompts = stringArrayToPrompts(ftpPasswordPromptsAsStringArray);
}
private String[] promptsToStringArray(Prompt[] prompts){
if (prompts == null){
return new String[0];
}
String[] res = new String[prompts.length];
int i=0;
for (Prompt p:prompts){
res[i]=p.getPrompt();
i++;
}
return res;
}
private Prompt[] stringArrayToPrompts(String[] promptsAsString){
if (promptsAsString == null){
return new Prompt[0];
}
Prompt[] res = new Prompt[promptsAsString.length];
int i=0;
for (String s:promptsAsString){
res[i]=new Prompt(s,false);
res[i].setCommandEnd(true);
i++;
}
return res;
}
private String getFTPServerAddress(){
if (!StringUtils.isEmpty(getFtpServerHostName())){
return getFtpServerHostName();
}
if (!StringUtils.isEmpty(JSystemProperties.getInstance().getPreference(LOCAL_HOST_ADDRESS_PROPERTY))){
return JSystemProperties.getInstance().getPreference(LOCAL_HOST_ADDRESS_PROPERTY);
}
return localMachine.getHostName();
}
/**********************************************************************
* FTPRemoteClient setters and getters
*********************************************************************/
public String getHost() {
return host;
}
public String getOperatingSystem() {
return operatingSystem;
}
public void setOperatingSystem(String operatingSystem) {
this.operatingSystem = operatingSystem;
}
public String getProtocol() {
return protocol;
}
public void setProtocol(String protocol) {
this.protocol = protocol;
}
public void setHost(String remoteHost) {
this.host = remoteHost;
}
public String getPassword() {
return password;
}
public void setPassword(String telnetPassword) {
this.password = telnetPassword;
}
public int getPort() {
return port;
}
public void setPort(int telnetPort) {
this.port = telnetPort;
}
public String getUser() {
return user;
}
public void setUser(String telnetUser) {
this.user = telnetUser;
}
public String getFtpServerHostName() {
return ftpServerHostName;
}
public void setFtpServerHostName(String ftpServerHostName) {
this.ftpServerHostName = ftpServerHostName;
}
public String getFtpUserName() {
return ftpUserName;
}
public void setFtpUserName(String ftpUserName) {
this.ftpUserName = ftpUserName;
}
public String getFtpPassword() {
return ftpPassword;
}
public void setFtpPassword(String ftpPassword) {
this.ftpPassword = ftpPassword;
}
public boolean isAscii() {
return ascii;
}
public void setAscii(boolean ascii) {
this.ascii = ascii;
}
}
| Top-Q/jsystem | jsystem-core-system-objects/FileTransfer-so/src/main/java/com/aqua/filetransfer/ftp/FTPRemoteClient.java | Java | apache-2.0 | 17,775 |
/*******************************************************************************
* Copyright 2017 Bstek
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package com.bstek.uflo.command.impl;
import java.util.List;
import org.hibernate.criterion.Order;
import org.hibernate.criterion.Restrictions;
import com.bstek.uflo.command.Command;
import com.bstek.uflo.env.Context;
import com.bstek.uflo.model.HistoryTask;
/**
* @author Jacky.gao
* @since 2013ๅนด9ๆ12ๆฅ
*/
public class GetListHistoryTasksCommand implements Command<List<HistoryTask>> {
private long processInstanceId;
public GetListHistoryTasksCommand(long processInstanceId) {
this.processInstanceId = processInstanceId;
}
@SuppressWarnings("unchecked")
public List<HistoryTask> execute(Context context) {
return context.getSession().createCriteria(HistoryTask.class)
.add(Restrictions.eq("processInstanceId", processInstanceId))
.addOrder(Order.desc("endDate")).list();
}
}
| youseries/uflo | uflo-core/src/main/java/com/bstek/uflo/command/impl/GetListHistoryTasksCommand.java | Java | apache-2.0 | 1,555 |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.elasticmapreduce.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.elasticmapreduce.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* DescribeClusterResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeClusterResultJsonUnmarshaller implements Unmarshaller<DescribeClusterResult, JsonUnmarshallerContext> {
public DescribeClusterResult unmarshall(JsonUnmarshallerContext context) throws Exception {
DescribeClusterResult describeClusterResult = new DescribeClusterResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return describeClusterResult;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("Cluster", targetDepth)) {
context.nextToken();
describeClusterResult.setCluster(ClusterJsonUnmarshaller.getInstance().unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return describeClusterResult;
}
private static DescribeClusterResultJsonUnmarshaller instance;
public static DescribeClusterResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new DescribeClusterResultJsonUnmarshaller();
return instance;
}
}
| dagnir/aws-sdk-java | aws-java-sdk-emr/src/main/java/com/amazonaws/services/elasticmapreduce/model/transform/DescribeClusterResultJsonUnmarshaller.java | Java | apache-2.0 | 2,841 |
๏ปฟ// @target: ES6
var x = 1 % `abc${ 1 }def`; | freedot/tstolua | tests/cases/conformance/es6/templates/templateStringInModuloES6.ts | TypeScript | apache-2.0 | 46 |
jQuery("#simulation")
.on("click", ".s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d .click", function(event, data) {
var jEvent, jFirer, cases;
if(data === undefined) { data = event; }
jEvent = jimEvent(event);
jFirer = jEvent.getEventFirer();
if(jFirer.is("#s-Label_58")) {
cases = [
{
"blocks": [
{
"actions": [
{
"action": "jimChangeStyle",
"parameter": [ {
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_58": {
"attributes": {
"font-size": "12.0pt",
"font-family": "Roboto-Regular,Arial"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_58 .valign": {
"attributes": {
"vertical-align": "middle",
"text-align": "left"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_58 span": {
"attributes": {
"color": "#80B8F1",
"text-align": "left",
"text-decoration": "none",
"font-family": "Roboto-Regular,Arial",
"font-size": "12.0pt"
}
}
} ],
"exectype": "serial",
"delay": 0
},
{
"action": "jimChangeStyle",
"parameter": [ {
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_59": {
"attributes": {
"font-size": "20.0pt",
"font-family": "IOS8-Icons-Regular,Arial"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_59 .valign": {
"attributes": {
"vertical-align": "middle",
"text-align": "left"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_59 span": {
"attributes": {
"color": "#80B8F1",
"text-align": "left",
"text-decoration": "none",
"font-family": "IOS8-Icons-Regular,Arial",
"font-size": "20.0pt"
}
}
} ],
"exectype": "serial",
"delay": 0
},
{
"action": "jimPause",
"parameter": {
"pause": 300
},
"exectype": "serial",
"delay": 0
},
{
"action": "jimChangeStyle",
"parameter": [ {
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_58": {
"attributes": {
"font-size": "12.0pt",
"font-family": "Roboto-Regular,Arial"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_58 .valign": {
"attributes": {
"vertical-align": "middle",
"text-align": "left"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_58 span": {
"attributes": {
"color": "#007DFF",
"text-align": "left",
"text-decoration": "none",
"font-family": "Roboto-Regular,Arial",
"font-size": "12.0pt"
}
}
} ],
"exectype": "serial",
"delay": 0
},
{
"action": "jimChangeStyle",
"parameter": [ {
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_59": {
"attributes": {
"font-size": "20.0pt",
"font-family": "IOS8-Icons-Regular,Arial"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_59 .valign": {
"attributes": {
"vertical-align": "middle",
"text-align": "left"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_59 span": {
"attributes": {
"color": "#157EFB",
"text-align": "left",
"text-decoration": "none",
"font-family": "IOS8-Icons-Regular,Arial",
"font-size": "20.0pt"
}
}
} ],
"exectype": "serial",
"delay": 0
}
]
}
],
"exectype": "serial",
"delay": 0
}
];
event.data = data;
jEvent.launchCases(cases);
} else if(jFirer.is("#s-cover")) {
cases = [
{
"blocks": [
{
"actions": [
{
"action": "jimChangeStyle",
"parameter": [ {
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-cover": {
"attributes": {
"opacity": "0.75"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-cover": {
"attributes-ie": {
"-ms-filter": "progid:DXImageTransform.Microsoft.Alpha(Opacity=75)",
"filter": "alpha(opacity=75)"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-cover": {
"attributes-ie8lte": {
"-ms-filter": "progid:DXImageTransform.Microsoft.Alpha(Opacity=75)",
"filter": "alpha(opacity=75)"
}
}
} ],
"exectype": "serial",
"delay": 0
},
{
"action": "jimPause",
"parameter": {
"pause": 300
},
"exectype": "serial",
"delay": 0
},
{
"action": "jimChangeStyle",
"parameter": [ {
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-cover": {
"attributes": {
"opacity": "1.0"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-cover": {
"attributes-ie": {
"-ms-filter": "progid:DXImageTransform.Microsoft.Alpha(Opacity=100)",
"filter": "alpha(opacity=100)"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-cover": {
"attributes-ie8lte": {
"-ms-filter": "progid:DXImageTransform.Microsoft.Alpha(Opacity=100)",
"filter": "alpha(opacity=100)"
}
}
} ],
"exectype": "serial",
"delay": 0
}
]
}
],
"exectype": "serial",
"delay": 0
}
];
event.data = data;
jEvent.launchCases(cases);
} else if(jFirer.is("#s-Hotspot_1")) {
cases = [
{
"blocks": [
{
"actions": [
{
"action": "jimNavigation",
"parameter": {
"target": "screens/6709a53d-60b3-4498-bf73-977706fff4da"
},
"exectype": "serial",
"delay": 0
}
]
}
],
"exectype": "serial",
"delay": 0
}
];
event.data = data;
jEvent.launchCases(cases);
} else if(jFirer.is("#s-Hotspot_3")) {
cases = [
{
"blocks": [
{
"actions": [
{
"action": "jimNavigation",
"parameter": {
"target": "screens/27852e19-fc20-4cac-8d96-13d00ac70f75"
},
"exectype": "serial",
"delay": 0
}
]
}
],
"exectype": "serial",
"delay": 0
}
];
event.data = data;
jEvent.launchCases(cases);
} else if(jFirer.is("#s-Button_1")) {
cases = [
{
"blocks": [
{
"actions": [
{
"action": "jimChangeStyle",
"parameter": [ {
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Button_1": {
"attributes": {
"font-size": "12.0pt",
"font-family": "Roboto-Regular,Arial"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Button_1 .valign": {
"attributes": {
"vertical-align": "middle",
"text-align": "center"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Button_1 span": {
"attributes": {
"color": "#80B8F1",
"text-align": "center",
"text-decoration": "none",
"font-family": "Roboto-Regular,Arial",
"font-size": "12.0pt"
}
}
} ],
"exectype": "serial",
"delay": 0
},
{
"action": "jimPause",
"parameter": {
"pause": 300
},
"exectype": "serial",
"delay": 0
},
{
"action": "jimChangeStyle",
"parameter": [ {
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Button_1": {
"attributes": {
"font-size": "12.0pt",
"font-family": "Roboto-Regular,Arial"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Button_1 .valign": {
"attributes": {
"vertical-align": "middle",
"text-align": "center"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Button_1 span": {
"attributes": {
"color": "#007DFF",
"text-align": "center",
"text-decoration": "none",
"font-family": "Roboto-Regular,Arial",
"font-size": "12.0pt"
}
}
} ],
"exectype": "serial",
"delay": 0
}
]
}
],
"exectype": "serial",
"delay": 0
}
];
event.data = data;
jEvent.launchCases(cases);
}
})
.on("pageload", ".s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d .pageload", function(event, data) {
var jEvent, jFirer, cases;
if(data === undefined) { data = event; }
jEvent = jimEvent(event);
jFirer = jEvent.getEventFirer();
if(jFirer.is("#s-Label_35")) {
cases = [
{
"blocks": [
{
"actions": [
{
"action": "jimSetValue",
"parameter": {
"target": "#s-Label_35",
"value": {
"action": "jimConcat",
"parameter": [ {
"action": "jimSubstring",
"parameter": [ {
"action": "jimSystemTime"
},"0","5" ]
}," PM" ]
}
},
"exectype": "serial",
"delay": 0
}
]
}
],
"exectype": "serial",
"delay": 0
}
];
event.data = data;
jEvent.launchCases(cases);
}
}); | yicold/axure-case | caiqu/่ดข่ถฃv1.5/resources/screens/cd8b0318-8942-4a64-b2c9-ee7c253d6b7d-1445822636718.js | JavaScript | apache-2.0 | 13,587 |
# Copyright 2015 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import uuidutils
from sqlalchemy.orm import exc
from sqlalchemy.sql import expression as expr
from neutron.db import models_v2
from neutron.extensions import l3
from neutron_lib import constants as l3_constants
from neutron_lib import exceptions as n_exc
from networking_cisco._i18n import _, _LW
from networking_cisco import backwards_compatibility as bc
from networking_cisco.plugins.cisco.common import cisco_constants
from networking_cisco.plugins.cisco.db.l3 import ha_db
from networking_cisco.plugins.cisco.db.l3 import l3_models
from networking_cisco.plugins.cisco.db.l3.l3_router_appliance_db import (
L3RouterApplianceDBMixin)
from networking_cisco.plugins.cisco.extensions import routerhostingdevice
from networking_cisco.plugins.cisco.extensions import routerrole
from networking_cisco.plugins.cisco.extensions import routertype
from networking_cisco.plugins.cisco.extensions import routertypeawarescheduler
from networking_cisco.plugins.cisco.l3 import drivers
LOG = logging.getLogger(__name__)
DEVICE_OWNER_GLOBAL_ROUTER_GW = cisco_constants.DEVICE_OWNER_GLOBAL_ROUTER_GW
HOSTING_DEVICE_ATTR = routerhostingdevice.HOSTING_DEVICE_ATTR
ROUTER_ROLE_GLOBAL = cisco_constants.ROUTER_ROLE_GLOBAL
ROUTER_ROLE_LOGICAL_GLOBAL = cisco_constants.ROUTER_ROLE_LOGICAL_GLOBAL
ROUTER_ROLE_HA_REDUNDANCY = cisco_constants.ROUTER_ROLE_HA_REDUNDANCY
TENANT_HSRP_GRP_RANGE = 1
TENANT_HSRP_GRP_OFFSET = 1064
EXT_HSRP_GRP_RANGE = 1
EXT_HSRP_GRP_OFFSET = 1064
N_ROUTER_PREFIX = 'nrouter-'
DEV_NAME_LEN = 14
class TopologyNotSupportedByRouterError(n_exc.Conflict):
message = _("Requested topology cannot be supported by router.")
class ASR1kL3RouterDriver(drivers.L3RouterBaseDriver):
def create_router_precommit(self, context, router_context):
pass
def create_router_postcommit(self, context, router_context):
pass
def update_router_precommit(self, context, router_context):
pass
def update_router_postcommit(self, context, router_context):
# Whenever a gateway is added to, or removed from, a router hosted on
# a hosting device, we must ensure that a global router is running
# (for add operation) or not running (for remove operation) on that
# hosting device.
current = router_context.current
if current[HOSTING_DEVICE_ATTR] is None:
return
e_context = context.elevated()
if current['gw_port_id']:
self._conditionally_add_global_router(e_context, current)
else:
self._conditionally_remove_global_router(
e_context, router_context.original, True)
def delete_router_precommit(self, context, router_context):
pass
def delete_router_postcommit(self, context, router_context):
pass
def schedule_router_precommit(self, context, router_context):
pass
def schedule_router_postcommit(self, context, router_context):
# When the hosting device hosts a Neutron router with external
# connectivity, a "global" router (modeled as a Neutron router) must
# also run on the hosting device (outside of any VRF) to enable the
# connectivity.
current = router_context.current
if current['gw_port_id'] and current[HOSTING_DEVICE_ATTR] is not None:
self._conditionally_add_global_router(context.elevated(), current)
def unschedule_router_precommit(self, context, router_context):
pass
def unschedule_router_postcommit(self, context, router_context):
# When there is no longer any router with external gateway hosted on
# a hosting device, the global router on that hosting device can also
# be removed.
current = router_context.current
hd_id = current[HOSTING_DEVICE_ATTR]
if current['gw_port_id'] and hd_id is not None:
self._conditionally_remove_global_router(context.elevated(),
current)
def add_router_interface_precommit(self, context, r_port_context):
# Inside an ASR1k, VLAN sub-interfaces are used to connect to internal
# neutron networks. Only one such sub-interface can be created for each
# VLAN. As the VLAN sub-interface is added to the VRF representing the
# Neutron router, we must only allow one Neutron router to attach to a
# particular Neutron subnet/network.
if (r_port_context.router_context.current[routerrole.ROUTER_ROLE_ATTR]
== ROUTER_ROLE_HA_REDUNDANCY):
# redundancy routers can be exempt as we check the user visible
# routers and the request will be rejected there.
return
e_context = context.elevated()
if r_port_context.current is None:
sn = self._core_plugin.get_subnet(e_context,
r_port_context.current_subnet_id)
net_id = sn['network_id']
else:
net_id = r_port_context.current['network_id']
filters = {'network_id': [net_id],
'device_owner': [bc.constants.DEVICE_OWNER_ROUTER_INTF]}
for port in self._core_plugin.get_ports(e_context,
filters=filters):
router_id = port['device_id']
if router_id is None:
continue
router = self._l3_plugin.get_router(e_context, router_id)
if router[routerrole.ROUTER_ROLE_ATTR] is None:
raise TopologyNotSupportedByRouterError()
def add_router_interface_postcommit(self, context, r_port_context):
pass
def remove_router_interface_precommit(self, context, r_port_context):
pass
def remove_router_interface_postcommit(self, context, r_port_context):
pass
def create_floatingip_precommit(self, context, fip_context):
pass
def create_floatingip_postcommit(self, context, fip_context):
pass
def update_floatingip_precommit(self, context, fip_context):
pass
def update_floatingip_postcommit(self, context, fip_context):
pass
def delete_floatingip_precommit(self, context, fip_context):
pass
def delete_floatingip_postcommit(self, context, fip_context):
pass
def ha_interface_ip_address_needed(self, context, router, port,
ha_settings_db, ha_group_uuid):
if port['device_owner'] == bc.constants.DEVICE_OWNER_ROUTER_GW:
return False
else:
return True
def generate_ha_group_id(self, context, router, port, ha_settings_db,
ha_group_uuid):
if port['device_owner'] in {bc.constants.DEVICE_OWNER_ROUTER_GW,
DEVICE_OWNER_GLOBAL_ROUTER_GW}:
ri_name = self._router_name(router['id'])[8:DEV_NAME_LEN]
group_id = int(ri_name, 16) % TENANT_HSRP_GRP_RANGE
group_id += TENANT_HSRP_GRP_OFFSET
return group_id
else:
net_id_digits = port['network_id'][:6]
group_id = int(net_id_digits, 16) % EXT_HSRP_GRP_RANGE
group_id += EXT_HSRP_GRP_OFFSET
return group_id
def pre_backlog_processing(self, context):
filters = {routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_GLOBAL]}
global_routers = self._l3_plugin.get_routers(context, filters=filters)
if not global_routers:
LOG.debug("There are no global routers")
return
for gr in global_routers:
filters = {
HOSTING_DEVICE_ATTR: [gr[HOSTING_DEVICE_ATTR]],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_HA_REDUNDANCY, None]
}
invert_filters = {'gw_port_id': [None]}
num_rtrs = self._l3_plugin.get_routers_count_extended(
context, filters=filters, invert_filters=invert_filters)
LOG.debug("Global router %(name)s[%(id)s] with hosting_device "
"%(hd)s has %(num)d routers with gw_port set on that "
"device",
{'name': gr['name'], 'id': gr['id'],
'hd': gr[HOSTING_DEVICE_ATTR], 'num': num_rtrs, })
if num_rtrs == 0:
LOG.warning(
_LW("Global router:%(name)s[id:%(id)s] is present for "
"hosting device:%(hd)s but there are no tenant or "
"redundancy routers with gateway set on that hosting "
"device. Proceeding to delete global router."),
{'name': gr['name'], 'id': gr['id'],
'hd': gr[HOSTING_DEVICE_ATTR]})
self._delete_global_router(context, gr['id'])
filters = {
#TODO(bmelande): Filter on routertype of global router
#routertype.TYPE_ATTR: [routertype_id],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_LOGICAL_GLOBAL]}
log_global_routers = self._l3_plugin.get_routers(
context, filters=filters)
if log_global_routers:
log_global_router_id = log_global_routers[0]['id']
self._delete_global_router(context, log_global_router_id,
logical=True)
def post_backlog_processing(self, context):
pass
# ---------------- Create workflow functions -----------------
def _conditionally_add_global_router(self, context, tenant_router):
# We could filter on hosting device id but we don't so we get all
# global routers for this router type. We can then use that count to
# determine which ha priority a new global router should get.
filters = {
routertype.TYPE_ATTR: [tenant_router[routertype.TYPE_ATTR]],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_GLOBAL]}
global_routers = self._l3_plugin.get_routers(
context, filters=filters)
hd_to_gr_dict = {r[HOSTING_DEVICE_ATTR]: r for r in global_routers}
hosting_device_id = tenant_router[HOSTING_DEVICE_ATTR]
ext_nw_id = tenant_router[l3.EXTERNAL_GW_INFO]['network_id']
global_router = hd_to_gr_dict.get(hosting_device_id)
logical_global_router = self._get_logical_global_router(context,
tenant_router)
self._conditionally_add_auxiliary_external_gateway_port(
context, logical_global_router, ext_nw_id, tenant_router, True)
if global_router is None:
# must create global router on hosting device
global_router = self._create_global_router(
context, hosting_device_id, hd_to_gr_dict, tenant_router,
logical_global_router)
self._conditionally_add_auxiliary_external_gateway_port(
context, global_router, ext_nw_id, tenant_router)
self._l3_plugin.add_type_and_hosting_device_info(context,
global_router)
for ni in self._l3_plugin.get_notifiers(context, [global_router]):
if ni['notifier']:
ni['notifier'].routers_updated(context, ni['routers'])
def _conditionally_add_auxiliary_external_gateway_port(
self, context, global_router, ext_net_id, tenant_router,
provision_ha=False, port_type=DEVICE_OWNER_GLOBAL_ROUTER_GW):
# tbe global router may or may not have an interface on the
# external network that the tenant router uses
filters = {
'device_id': [global_router['id']],
'device_owner': [port_type]}
connected_nets = {
p['network_id']: p['fixed_ips'] for p in
self._core_plugin.get_ports(context, filters=filters)}
if ext_net_id in connected_nets:
# already connected to the external network so we're done
return
else:
# not connected to the external network, so let's fix that
aux_gw_port = self._create_auxiliary_external_gateway_port(
context, global_router, ext_net_id, tenant_router, port_type)
if provision_ha:
self._provision_port_ha(context, aux_gw_port, global_router)
def _create_auxiliary_external_gateway_port(
self, context, global_router, ext_net_id, tenant_router,
port_type=DEVICE_OWNER_GLOBAL_ROUTER_GW):
# When a global router is connected to an external network then a
# special type of gateway port is created on that network. Such a
# port is called auxiliary gateway ports. It has an ip address on
# each subnet of the external network. A (logical) global router
# never has a traditional Neutron gateway port.
filters = {
'device_id': [tenant_router['id']],
'device_owner': [l3_constants.DEVICE_OWNER_ROUTER_GW]}
# fetch the gateway port of the *tenant* router so we can determine
# the CIDR of that port's subnet
gw_port = self._core_plugin.get_ports(context,
filters=filters)[0]
fixed_ips = self._get_fixed_ips_subnets(context, gw_port)
global_router_id = global_router['id']
with context.session.begin(subtransactions=True):
aux_gw_port = self._core_plugin.create_port(context, {
'port': {
'tenant_id': '', # intentionally not set
'network_id': ext_net_id,
'mac_address': bc.constants.ATTR_NOT_SPECIFIED,
'fixed_ips': fixed_ips,
'device_id': global_router_id,
'device_owner': port_type,
'admin_state_up': True,
'name': ''}})
router_port = bc.RouterPort(
port_id=aux_gw_port['id'],
router_id=global_router_id,
port_type=port_type)
context.session.add(router_port)
return aux_gw_port
def _create_global_router(
self, context, hosting_device_id, hd_to_gr_dict, tenant_router,
logical_global_router):
r_spec = {'router': {
# global routers are not tied to any tenant
'tenant_id': '',
'name': self._global_router_name(hosting_device_id),
'admin_state_up': True}}
global_router, r_hd_b_db = self._l3_plugin.do_create_router(
context, r_spec, tenant_router[routertype.TYPE_ATTR], False,
True, hosting_device_id, ROUTER_ROLE_GLOBAL)
# make the global router a redundancy router for the logical
# global router (which we treat as a hidden "user visible
# router" (how's that for a contradiction of terms! :-) )
with context.session.begin(subtransactions=True):
ha_priority = (
ha_db.DEFAULT_MASTER_PRIORITY -
len(hd_to_gr_dict) * ha_db.PRIORITY_INCREASE_STEP)
r_b_b = ha_db.RouterRedundancyBinding(
redundancy_router_id=global_router['id'],
priority=ha_priority,
user_router_id=logical_global_router['id'])
context.session.add(r_b_b)
return global_router
def _get_logical_global_router(self, context, tenant_router):
# Since HA is also enabled on the global routers on each hosting device
# those global routers need HA settings and VIPs. We represent that
# using a Neutron router that is never instantiated/hosted. That
# Neutron router is referred to as the "logical global" router.
filters = {routertype.TYPE_ATTR: [tenant_router[routertype.TYPE_ATTR]],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_LOGICAL_GLOBAL]}
logical_global_routers = self._l3_plugin.get_routers(
context, filters=filters)
if not logical_global_routers:
# must create logical global router
logical_global_router = self._create_logical_global_router(
context, tenant_router)
else:
logical_global_router = logical_global_routers[0]
self._update_ha_redundancy_level(context, logical_global_router, 1)
return logical_global_router
def _create_logical_global_router(self, context, tenant_router):
r_spec = {'router': {
# global routers are not tied to any tenant
'tenant_id': '',
'name': self._global_router_name('', logical=True),
'admin_state_up': True,
# set auto-schedule to false to keep this router un-hosted
routertypeawarescheduler.AUTO_SCHEDULE_ATTR: False}}
# notifications should never be sent for this logical router!
logical_global_router, r_hd_b_db = (
self._l3_plugin.do_create_router(
context, r_spec, tenant_router[routertype.TYPE_ATTR],
False, True, None, ROUTER_ROLE_LOGICAL_GLOBAL))
with context.session.begin(subtransactions=True):
r_ha_s_db = ha_db.RouterHASetting(
router_id=logical_global_router['id'],
ha_type=cfg.CONF.ha.default_ha_mechanism,
redundancy_level=1,
priority=ha_db.DEFAULT_MASTER_PRIORITY,
probe_connectivity=False,
probe_target=None,
probe_interval=None)
context.session.add(r_ha_s_db)
return logical_global_router
def _get_fixed_ips_subnets(self, context, gw_port):
nw = self._core_plugin.get_network(context, gw_port['network_id'])
subnets = [{'subnet_id': s} for s in nw['subnets']]
return subnets
def _provision_port_ha(self, context, ha_port, router, ha_binding_db=None):
ha_group_uuid = uuidutils.generate_uuid()
router_id = router['id']
with context.session.begin(subtransactions=True):
if ha_binding_db is None:
ha_binding_db = self._get_ha_binding(context, router_id)
group_id = self.generate_ha_group_id(
context, router,
{'device_owner': DEVICE_OWNER_GLOBAL_ROUTER_GW}, ha_binding_db,
ha_group_uuid)
r_ha_g = ha_db.RouterHAGroup(
id=ha_group_uuid,
tenant_id='',
ha_type=ha_binding_db.ha_type,
group_identity=group_id,
ha_port_id=ha_port['id'],
extra_port_id=None,
subnet_id=ha_port['fixed_ips'][0]['subnet_id'],
user_router_id=router_id,
timers_config='',
tracking_config='',
other_config='')
context.session.add(r_ha_g)
def _get_ha_binding(self, context, router_id):
with context.session.begin(subtransactions=True):
query = context.session.query(ha_db.RouterHASetting)
query = query.filter(
ha_db.RouterHASetting.router_id == router_id)
return query.first()
# ---------------- Remove workflow functions -----------------
def _conditionally_remove_global_router(self, context, tenant_router,
update_operation=False):
filters = {routertype.TYPE_ATTR: [tenant_router[routertype.TYPE_ATTR]],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_GLOBAL],
HOSTING_DEVICE_ATTR: [tenant_router[HOSTING_DEVICE_ATTR]]}
global_routers = self._l3_plugin.get_routers(context,
filters=filters)
hd_to_gr_dict = {r[HOSTING_DEVICE_ATTR]: r for r in global_routers}
if global_routers:
global_router_id = global_routers[0]['id']
if not tenant_router or not tenant_router[l3.EXTERNAL_GW_INFO]:
# let l3 plugin's periodic backlog processing take care of the
# clean up of the global router
return
ext_net_id = tenant_router[l3.EXTERNAL_GW_INFO]['network_id']
routertype_id = tenant_router[routertype.TYPE_ATTR]
hd_id = tenant_router[HOSTING_DEVICE_ATTR]
global_router = hd_to_gr_dict.get(hd_id)
port_deleted = self._conditionally_remove_auxiliary_gateway_port(
context, global_router_id, ext_net_id, routertype_id, hd_id,
update_operation)
if port_deleted is False:
# since no auxiliary gateway port was deleted we can
# abort no since auxiliary gateway port count cannot
# have reached zero
return
filters = {
'device_id': [global_router_id],
'device_owner': [DEVICE_OWNER_GLOBAL_ROUTER_GW]}
num_aux_gw_ports = self._core_plugin.get_ports_count(
context, filters=filters)
if num_aux_gw_ports == 0:
# global router not needed any more so we delete it
self._delete_global_router(context, global_router_id)
do_notify = False
else:
do_notify = True
# process logical global router to remove its port
self._conditionally_remove_auxiliary_gateway_vip_port(
context, ext_net_id, routertype_id)
self._l3_plugin.add_type_and_hosting_device_info(context,
global_router)
if do_notify is True:
for ni in self._l3_plugin.get_notifiers(context,
[global_router]):
if ni['notifier']:
ni['notifier'].routers_updated(context, ni['routers'])
def _conditionally_remove_auxiliary_gateway_port(
self, context, router_id, ext_net_id, routertype_id,
hosting_device_id, update_operation=False):
num_rtrs = self._get_gateway_routers_count(
context, ext_net_id, routertype_id, None, hosting_device_id)
if ((num_rtrs <= 1 and update_operation is False) or
(num_rtrs == 0 and update_operation is True)):
# there are no tenant routers *on ext_net_id* that are serviced by
# this global router so it's aux gw port can be deleted
self._delete_auxiliary_gateway_ports(context, router_id,
ext_net_id)
return True
return False
def _conditionally_remove_auxiliary_gateway_vip_port(
self, context, ext_net_id, routertype_id):
filters = {routertype.TYPE_ATTR: [routertype_id],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_LOGICAL_GLOBAL]}
log_global_routers = self._l3_plugin.get_routers(context,
filters=filters)
if not log_global_routers:
return
self._update_ha_redundancy_level(context, log_global_routers[0], -1)
log_global_router_id = log_global_routers[0]['id']
num_global_rtrs = self._get_gateway_routers_count(
context, ext_net_id, routertype_id, ROUTER_ROLE_GLOBAL)
if num_global_rtrs == 0:
# there are no global routers *on ext_net_id* that are serviced by
# this logical global router so it's aux gw VIP port can be deleted
self._delete_auxiliary_gateway_ports(context, log_global_router_id,
ext_net_id)
filters[routerrole.ROUTER_ROLE_ATTR] = [ROUTER_ROLE_GLOBAL]
total_num_global_rtrs = self._l3_plugin.get_routers_count(
context, filters=filters)
if total_num_global_rtrs == 0:
# there are no global routers left that are serviced by this
# logical global router so it can be deleted
self._delete_global_router(context, log_global_router_id, True)
return False
def _delete_auxiliary_gateway_ports(
self, context, router_id, net_id=None,
port_type=DEVICE_OWNER_GLOBAL_ROUTER_GW):
filters = {
'device_id': [router_id],
'device_owner': [port_type]}
if net_id is not None:
filters['network_id'] = [net_id]
for port in self._core_plugin.get_ports(context, filters=filters):
try:
self._core_plugin.delete_port(context, port['id'],
l3_port_check=False)
except (exc.ObjectDeletedError, n_exc.PortNotFound) as e:
LOG.warning(e)
def _delete_global_router(self, context, global_router_id, logical=False):
# ensure we clean up any stale auxiliary gateway ports
self._delete_auxiliary_gateway_ports(context, global_router_id)
try:
if logical is True:
# We use parent class method as no special operations beyond
# what the base implemenation does are needed for logical
# global router
super(L3RouterApplianceDBMixin, self._l3_plugin).delete_router(
context, global_router_id)
else:
self._l3_plugin.delete_router(
context, global_router_id, unschedule=False)
except (exc.ObjectDeletedError, l3.RouterNotFound) as e:
LOG.warning(e)
def _get_gateway_routers_count(self, context, ext_net_id, routertype_id,
router_role, hosting_device_id=None):
# Determine number of routers (with routertype_id and router_role)
# that act as gateway to ext_net_id and that are hosted on
# hosting_device_id (if specified).
query = context.session.query(bc.Router)
if router_role in [None, ROUTER_ROLE_HA_REDUNDANCY]:
# tenant router roles
query = query.join(models_v2.Port,
models_v2.Port.id == bc.Router.gw_port_id)
role_filter = expr.or_(
l3_models.RouterHostingDeviceBinding.role == expr.null(),
l3_models.RouterHostingDeviceBinding.role ==
ROUTER_ROLE_HA_REDUNDANCY)
else:
# global and logical global routers
query = query.join(models_v2.Port,
models_v2.Port.device_owner == bc.Router.id)
role_filter = (
l3_models.RouterHostingDeviceBinding.role == router_role)
query = query.join(
l3_models.RouterHostingDeviceBinding,
l3_models.RouterHostingDeviceBinding.router_id == bc.Router.id)
query = query.filter(
role_filter,
models_v2.Port.network_id == ext_net_id,
l3_models.RouterHostingDeviceBinding.router_type_id ==
routertype_id)
if hosting_device_id is not None:
query = query.filter(
l3_models.RouterHostingDeviceBinding.hosting_device_id ==
hosting_device_id)
return query.count()
# ---------------- General support functions -----------------
def _update_ha_redundancy_level(self, context, logical_global_router,
delta):
with context.session.begin(subtransactions=True):
log_g_router_db = self._l3_plugin._get_router(
context, logical_global_router['id'])
log_g_router_db.ha_settings.redundancy_level += delta
context.session.add(log_g_router_db.ha_settings)
def _router_name(self, router_id):
return N_ROUTER_PREFIX + router_id
def _global_router_name(self, hosting_device_id, logical=False):
if logical is True:
return cisco_constants.LOGICAL_ROUTER_ROLE_NAME
else:
return '%s-%s' % (cisco_constants.ROUTER_ROLE_NAME_PREFIX,
hosting_device_id[-cisco_constants.ROLE_ID_LEN:])
@property
def _core_plugin(self):
return bc.get_plugin()
@property
def _l3_plugin(self):
return bc.get_plugin(bc.constants.L3)
| Gitweijie/first_project | networking_cisco/plugins/cisco/l3/drivers/asr1k/asr1k_routertype_driver.py | Python | apache-2.0 | 29,107 |
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package version
import (
"fmt"
"os"
"runtime"
)
var (
// Version shows the version of kube batch.
Version = "Not provided."
// GitSHA shoows the git commit id of kube batch.
GitSHA = "Not provided."
// Built shows the built time of the binary.
Built = "Not provided."
)
// PrintVersionAndExit prints versions from the array returned by Info() and exit
func PrintVersionAndExit(apiVersion string) {
for _, i := range Info(apiVersion) {
fmt.Printf("%v\n", i)
}
os.Exit(0)
}
// Info returns an array of various service versions
func Info(apiVersion string) []string {
return []string{
fmt.Sprintf("API Version: %s", apiVersion),
fmt.Sprintf("Version: %s", Version),
fmt.Sprintf("Git SHA: %s", GitSHA),
fmt.Sprintf("Built At: %s", Built),
fmt.Sprintf("Go Version: %s", runtime.Version()),
fmt.Sprintf("Go OS/Arch: %s/%s", runtime.GOOS, runtime.GOARCH),
}
}
| k82cn/kube-arbitrator | pkg/version/version.go | GO | apache-2.0 | 1,456 |
package no.dusken.momus.model.websocket;
public enum Action {
CREATE, UPDATE, DELETE
} | Studentmediene/Momus | src/main/java/no/dusken/momus/model/websocket/Action.java | Java | apache-2.0 | 91 |
/*
* Copyright (c) 2021, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.demonstrations.imageprocessing;
import boofcv.abst.distort.FDistort;
import boofcv.alg.filter.kernel.GKernelMath;
import boofcv.alg.filter.kernel.SteerableKernel;
import boofcv.alg.misc.GImageStatistics;
import boofcv.core.image.GeneralizedImageOps;
import boofcv.gui.ListDisplayPanel;
import boofcv.gui.SelectAlgorithmPanel;
import boofcv.gui.image.VisualizeImageData;
import boofcv.struct.convolve.Kernel2D;
import boofcv.struct.image.ImageGray;
import javax.swing.*;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.List;
/**
* Visualizes steerable kernels.
*
* @author Peter Abeles
*/
public abstract class DisplaySteerableBase<T extends ImageGray<T>, K extends Kernel2D>
extends SelectAlgorithmPanel {
protected static int imageSize = 400;
protected static int radius = 100;
protected Class<T> imageType;
protected Class<K> kernelType;
ListDisplayPanel basisPanel = new ListDisplayPanel();
ListDisplayPanel steerPanel = new ListDisplayPanel();
T largeImg;
List<DisplayGaussianKernelApp.DerivType> order = new ArrayList<>();
protected DisplaySteerableBase( Class<T> imageType, Class<K> kernelType ) {
this.imageType = imageType;
this.kernelType = kernelType;
largeImg = GeneralizedImageOps.createSingleBand(imageType, imageSize, imageSize);
addAlgorithm("Deriv X", new DisplayGaussianKernelApp.DerivType(1, 0));
addAlgorithm("Deriv XX", new DisplayGaussianKernelApp.DerivType(2, 0));
addAlgorithm("Deriv XXX", new DisplayGaussianKernelApp.DerivType(3, 0));
addAlgorithm("Deriv XXXX", new DisplayGaussianKernelApp.DerivType(4, 0));
addAlgorithm("Deriv XY", new DisplayGaussianKernelApp.DerivType(1, 1));
addAlgorithm("Deriv XXY", new DisplayGaussianKernelApp.DerivType(2, 1));
addAlgorithm("Deriv XYY", new DisplayGaussianKernelApp.DerivType(1, 2));
addAlgorithm("Deriv XXXY", new DisplayGaussianKernelApp.DerivType(3, 1));
addAlgorithm("Deriv XXYY", new DisplayGaussianKernelApp.DerivType(2, 2));
addAlgorithm("Deriv XYYY", new DisplayGaussianKernelApp.DerivType(1, 3));
JPanel content = new JPanel(new GridLayout(0, 2));
content.add(basisPanel);
content.add(steerPanel);
setMainGUI(content);
}
protected abstract SteerableKernel<K> createKernel( int orderX, int orderY );
@Override
public void setActiveAlgorithm( String name, Object cookie ) {
DisplayGaussianKernelApp.DerivType dt = (DisplayGaussianKernelApp.DerivType)cookie;
// add basis
SteerableKernel<K> steerable = createKernel(dt.orderX, dt.orderY);
basisPanel.reset();
for (int i = 0; i < steerable.getBasisSize(); i++) {
T smallImg = GKernelMath.convertToImage(steerable.getBasis(i));
new FDistort(smallImg, largeImg).scaleExt().interpNN().apply();
double maxValue = GImageStatistics.maxAbs(largeImg);
BufferedImage out = VisualizeImageData.colorizeSign(largeImg, null, maxValue);
basisPanel.addImage(out, "Basis " + i);
}
// add steered kernels
steerPanel.reset();
for (int i = 0; i <= 20; i++) {
double angle = Math.PI*i/20.0;
K kernel = steerable.compute(angle);
T smallImg = GKernelMath.convertToImage(kernel);
new FDistort(smallImg, largeImg).scaleExt().interpNN().apply();
double maxValue = GImageStatistics.maxAbs(largeImg);
BufferedImage out = VisualizeImageData.colorizeSign(largeImg, null, maxValue);
steerPanel.addImage(out, String.format("%5d", (int)(180.0*angle/Math.PI)));
}
repaint();
}
}
| lessthanoptimal/BoofCV | demonstrations/src/main/java/boofcv/demonstrations/imageprocessing/DisplaySteerableBase.java | Java | apache-2.0 | 4,148 |
/*
* Copyright (c) 2017 Trail of Bits, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
namespace {
template <typename D, typename S>
DEF_SEM(MOV, D dst, const S src) {
WriteZExt(dst, Read(src));
return memory;
}
template <typename D1, typename S1, typename D2, typename S2>
DEF_SEM(XCHG, D1 dst, S1 dst_val, D2 src, S2 src_val) {
auto old_dst = Read(dst_val);
auto old_src = Read(src_val);
WriteZExt(dst, old_src);
WriteZExt(src, old_dst);
return memory;
}
template <typename D, typename S>
DEF_SEM(MOVBE16, D dst, const S src) {
WriteZExt(dst, __builtin_bswap16(Read(src)));
return memory;
}
template <typename D, typename S>
DEF_SEM(MOVBE32, D dst, const S src) {
WriteZExt(dst, __builtin_bswap32(Read(src)));
return memory;
}
#if 64 == ADDRESS_SIZE_BITS
template <typename D, typename S>
DEF_SEM(MOVBE64, D dst, const S src) {
Write(dst, __builtin_bswap64(Read(src)));
return memory;
}
#endif
template <typename D, typename S>
DEF_SEM(MOVQ, D dst, S src) {
UWriteV64(dst, UExtractV64(UReadV64(src), 0));
return memory;
}
template <typename D, typename S>
DEF_SEM(MOVD, D dst, S src) {
UWriteV32(dst, UExtractV32(UReadV32(src), 0));
return memory;
}
template <typename D, typename S>
DEF_SEM(MOVxPS, D dst, S src) {
FWriteV32(dst, FReadV32(src));
return memory;
}
template <typename D, typename S>
DEF_SEM(MOVxPD, D dst, S src) {
FWriteV64(dst, FReadV64(src));
return memory;
}
template <typename D, typename S>
DEF_SEM(MOVDQx, D dst, S src) {
UWriteV128(dst, UReadV128(src));
return memory;
}
template <typename D, typename S>
DEF_SEM(MOVLPS, D dst, S src) {
auto src_vec = FReadV32(src);
auto low1 = FExtractV32(src_vec, 0);
auto low2 = FExtractV32(src_vec, 1);
FWriteV32(dst, FInsertV32(FInsertV32(FReadV32(dst), 0, low1), 1, low2));
return memory;
}
DEF_SEM(MOVLHPS, V128W dst, V128 src) {
auto res = FReadV32(dst);
auto src1 = FReadV32(src);
res = FInsertV32(res, 2, FExtractV32(src1, 0));
res = FInsertV32(res, 3, FExtractV32(src1, 1));
FWriteV32(dst, res);
return memory;
}
DEF_SEM(MOVHLPS, V128W dst, V128 src) {
auto res = FReadV32(dst);
auto src1 = FReadV32(src);
res = FInsertV32(res, 0, FExtractV32(src1, 2));
res = FInsertV32(res, 1, FExtractV32(src1, 3));
FWriteV32(dst, res);
return memory;
}
template <typename D, typename S>
DEF_SEM(MOVLPD, D dst, S src) {
FWriteV64(dst, FInsertV64(FReadV64(dst), 0, FExtractV64(FReadV64(src), 0)));
return memory;
}
#if HAS_FEATURE_AVX
DEF_SEM(VMOVLPS, VV128W dst, V128 src1, MV64 src2) {
auto low_vec = FReadV32(src2);
FWriteV32(
dst, FInsertV32(FInsertV32(FReadV32(src1), 0, FExtractV32(low_vec, 0)), 1,
FExtractV32(low_vec, 1)));
return memory;
}
DEF_SEM(VMOVLPD, VV128W dst, V128 src1, MV64 src2) {
FWriteV64(dst, FInsertV64(FReadV64(src1), 0, FExtractV64(FReadV64(src2), 0)));
return memory;
}
DEF_SEM(VMOVLHPS, VV128W dst, V128 src1, V128 src2) {
/* DEST[63:0] โ SRC1[63:0] */
/* DEST[127:64] โ SRC2[63:0] */
/* DEST[VLMAX-1:128] โ 0 */
auto src1_vec = FReadV32(src1);
auto src2_vec = FReadV32(src2);
float32v4_t temp_vec = {};
temp_vec = FInsertV32(temp_vec, 0, FExtractV32(src1_vec, 0));
temp_vec = FInsertV32(temp_vec, 1, FExtractV32(src1_vec, 1));
temp_vec = FInsertV32(temp_vec, 2, FExtractV32(src2_vec, 0));
temp_vec = FInsertV32(temp_vec, 3, FExtractV32(src2_vec, 1));
FWriteV32(dst, temp_vec);
return memory;
}
DEF_SEM(VMOVHLPS, VV128W dst, V128 src1, V128 src2) {
auto src1_vec = FReadV32(src1);
auto src2_vec = FReadV32(src2);
float32v4_t temp_vec = {};
temp_vec = FInsertV32(temp_vec, 0, FExtractV32(src2_vec, 2));
temp_vec = FInsertV32(temp_vec, 1, FExtractV32(src2_vec, 3));
temp_vec = FInsertV32(temp_vec, 2, FExtractV32(src1_vec, 2));
temp_vec = FInsertV32(temp_vec, 3, FExtractV32(src1_vec, 3));
FWriteV32(dst, temp_vec);
return memory;
}
#endif // HAS_FEATURE_AVX
} // namespace
// Fused `CALL $0; POP reg` sequences.
DEF_ISEL(CALL_POP_FUSED_32) = MOV<R32W, I32>;
DEF_ISEL(CALL_POP_FUSED_64) = MOV<R64W, I64>;
DEF_ISEL(MOV_GPR8_IMMb_C6r0) = MOV<R8W, I8>;
DEF_ISEL(MOV_MEMb_IMMb) = MOV<M8W, I8>;
DEF_ISEL_RnW_In(MOV_GPRv_IMMz, MOV);
DEF_ISEL_MnW_In(MOV_MEMv_IMMz, MOV);
DEF_ISEL(MOVBE_GPRv_MEMv_16) = MOVBE16<R16W, M16>;
DEF_ISEL(MOVBE_GPRv_MEMv_32) = MOVBE32<R32W, M32>;
IF_64BIT(DEF_ISEL(MOVBE_GPRv_MEMv_64) = MOVBE64<R64W, M64>;)
DEF_ISEL(MOV_GPR8_GPR8_88) = MOV<R8W, R8>;
DEF_ISEL(MOV_MEMb_GPR8) = MOV<M8W, R8>;
DEF_ISEL_MnW_Rn(MOV_MEMv_GPRv, MOV);
DEF_ISEL_RnW_Rn(MOV_GPRv_GPRv_89, MOV);
DEF_ISEL_RnW_Rn(MOV_GPRv_GPRv_8B, MOV);
DEF_ISEL(MOV_GPR8_MEMb) = MOV<R8W, M8>;
DEF_ISEL(MOV_GPR8_GPR8_8A) = MOV<R8W, R8>;
DEF_ISEL_RnW_Mn(MOV_GPRv_MEMv, MOV);
DEF_ISEL_MnW_Rn(MOV_MEMv_GPRv_8B, MOV);
DEF_ISEL(MOV_AL_MEMb) = MOV<R8W, M8>;
DEF_ISEL_RnW_Mn(MOV_OrAX_MEMv, MOV);
DEF_ISEL(MOV_MEMb_AL) = MOV<M8W, R8>;
DEF_ISEL_MnW_Rn(MOV_MEMv_OrAX, MOV);
DEF_ISEL(MOV_GPR8_IMMb_D0) = MOV<R8W, I8>;
DEF_ISEL(MOV_GPR8_IMMb_B0) =
MOV<R8W, I8>; // https://github.com/intelxed/xed/commit/906d25
DEF_ISEL_RnW_In(MOV_GPRv_IMMv, MOV);
DEF_ISEL(MOVNTI_MEMd_GPR32) = MOV<M32W, R32>;
IF_64BIT(DEF_ISEL(MOVNTI_MEMq_GPR64) = MOV<M64W, R64>;)
DEF_ISEL(XCHG_MEMb_GPR8) = XCHG<M8W, M8, R8W, R8>;
DEF_ISEL(XCHG_GPR8_GPR8) = XCHG<R8W, R8, R8W, R8>;
DEF_ISEL_MnW_Mn_RnW_Rn(XCHG_MEMv_GPRv, XCHG);
DEF_ISEL_RnW_Rn_RnW_Rn(XCHG_GPRv_GPRv, XCHG);
DEF_ISEL_RnW_Rn_RnW_Rn(XCHG_GPRv_OrAX, XCHG);
DEF_ISEL(MOVQ_MMXq_MEMq_0F6E) = MOVQ<V64W, MV64>;
DEF_ISEL(MOVQ_MMXq_GPR64) = MOVQ<V64W, V64>;
DEF_ISEL(MOVQ_MEMq_MMXq_0F7E) = MOVQ<V64W, V64>;
DEF_ISEL(MOVQ_GPR64_MMXq) = MOVQ<V64W, V64>;
DEF_ISEL(MOVQ_MMXq_MEMq_0F6F) = MOVQ<V64W, MV64>;
DEF_ISEL(MOVQ_MMXq_MMXq_0F6F) = MOVQ<V64W, V64>;
DEF_ISEL(MOVQ_MEMq_MMXq_0F7F) = MOVQ<MV64W, V64>;
DEF_ISEL(MOVQ_MMXq_MMXq_0F7F) = MOVQ<V64W, V64>;
DEF_ISEL(MOVQ_XMMdq_MEMq_0F6E) = MOVQ<V128W, MV64>;
IF_64BIT(DEF_ISEL(MOVQ_XMMdq_GPR64) = MOVQ<V128W, V64>;)
DEF_ISEL(MOVQ_MEMq_XMMq_0F7E) = MOVQ<MV64W, V128>;
IF_64BIT(DEF_ISEL(MOVQ_GPR64_XMMq) = MOVQ<V64W, V128>;)
DEF_ISEL(MOVQ_MEMq_XMMq_0FD6) = MOVQ<MV64W, V128>;
DEF_ISEL(MOVQ_XMMdq_XMMq_0FD6) = MOVQ<V128W, V128>;
DEF_ISEL(MOVQ_XMMdq_MEMq_0F7E) = MOVQ<V128W, MV64>;
DEF_ISEL(MOVQ_XMMdq_XMMq_0F7E) = MOVQ<V128W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVQ_XMMdq_MEMq_6E) = MOVQ<VV128W, MV64>;
IF_64BIT(DEF_ISEL(VMOVQ_XMMdq_GPR64q) = MOVQ<VV128W, V64>;)
DEF_ISEL(VMOVQ_MEMq_XMMq_7E) = MOVQ<MV64W, V128>;
IF_64BIT(DEF_ISEL(VMOVQ_GPR64q_XMMq) = MOVQ<V64W, V128>;)
DEF_ISEL(VMOVQ_XMMdq_MEMq_7E) = MOVQ<VV128W, MV64>;
DEF_ISEL(VMOVQ_XMMdq_XMMq_7E) = MOVQ<VV128W, V128>;
DEF_ISEL(VMOVQ_MEMq_XMMq_D6) = MOVQ<MV64W, V128>;
DEF_ISEL(VMOVQ_XMMdq_XMMq_D6) = MOVQ<VV128W, V128>;
# if HAS_FEATURE_AVX512
DEF_ISEL(VMOVQ_XMMu64_MEMu64_AVX512) = MOVQ<VV128W, MV64>;
IF_64BIT(DEF_ISEL(VMOVQ_GPR64u64_XMMu64_AVX512) = MOVQ<V64W, V128>;)
IF_64BIT(DEF_ISEL(VMOVQ_XMMu64_GPR64u64_AVX512) = MOVQ<VV128W, V64>;)
DEF_ISEL(VMOVQ_XMMu64_XMMu64_AVX512) = MOVQ<VV128W, V128>;
DEF_ISEL(VMOVQ_MEMu64_XMMu64_AVX512) = MOVQ<MV64W, V128>;
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVD_MMXq_MEMd) = MOVD<V32W, MV32>;
DEF_ISEL(MOVD_MMXq_GPR32) = MOVD<V32W, V32>;
DEF_ISEL(MOVD_MEMd_MMXd) = MOVD<MV32W, V32>;
DEF_ISEL(MOVD_GPR32_MMXd) = MOVD<V32W, V32>;
DEF_ISEL(MOVD_XMMdq_MEMd) = MOVD<V128W, MV32>;
DEF_ISEL(MOVD_XMMdq_GPR32) = MOVD<V128W, V32>; // Zero extends.
DEF_ISEL(MOVD_MEMd_XMMd) = MOVD<MV32W, V128>;
DEF_ISEL(MOVD_GPR32_XMMd) = MOVD<V32W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVD_XMMdq_MEMd) = MOVD<VV128W, MV32>;
DEF_ISEL(VMOVD_XMMdq_GPR32d) = MOVD<VV128W, V32>;
DEF_ISEL(VMOVD_MEMd_XMMd) = MOVD<MV32W, V128>;
DEF_ISEL(VMOVD_GPR32d_XMMd) = MOVD<V32W, V128>;
# if HAS_FEATURE_AVX512
DEF_ISEL(VMOVD_XMMu32_MEMu32_AVX512) = MOVD<VV128W, MV32>;
DEF_ISEL(VMOVD_XMMu32_GPR32u32_AVX512) = MOVD<VV128W, V32>;
DEF_ISEL(VMOVD_MEMu32_XMMu32_AVX512) = MOVD<MV32W, V128>;
DEF_ISEL(VMOVD_GPR32u32_XMMu32_AVX512) = MOVD<V32W, V128>;
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVAPS_XMMps_MEMps) = MOVxPS<V128W, MV128>;
DEF_ISEL(MOVAPS_XMMps_XMMps_0F28) = MOVxPS<V128W, V128>;
DEF_ISEL(MOVAPS_MEMps_XMMps) = MOVxPS<MV128W, V128>;
DEF_ISEL(MOVAPS_XMMps_XMMps_0F29) = MOVxPS<V128W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVAPS_XMMdq_MEMdq) = MOVxPS<VV128W, MV128>;
DEF_ISEL(VMOVAPS_XMMdq_XMMdq_28) = MOVxPS<VV128W, VV128>;
DEF_ISEL(VMOVAPS_MEMdq_XMMdq) = MOVxPS<MV128W, VV128>;
DEF_ISEL(VMOVAPS_XMMdq_XMMdq_29) = MOVxPS<VV128W, VV128>;
DEF_ISEL(VMOVAPS_YMMqq_MEMqq) = MOVxPS<VV256W, MV256>;
DEF_ISEL(VMOVAPS_YMMqq_YMMqq_28) = MOVxPS<VV256W, VV256>;
DEF_ISEL(VMOVAPS_MEMqq_YMMqq) = MOVxPS<MV256W, VV256>;
DEF_ISEL(VMOVAPS_YMMqq_YMMqq_29) = MOVxPS<VV256W, VV256>;
# if HAS_FEATURE_AVX512
//4102 VMOVAPS VMOVAPS_ZMMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
//4103 VMOVAPS VMOVAPS_ZMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//4104 VMOVAPS VMOVAPS_ZMMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
//4105 VMOVAPS VMOVAPS_MEMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//4106 VMOVAPS VMOVAPS_XMMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
//4107 VMOVAPS VMOVAPS_XMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//4108 VMOVAPS VMOVAPS_XMMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
//4109 VMOVAPS VMOVAPS_MEMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//4110 VMOVAPS VMOVAPS_YMMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
//4111 VMOVAPS VMOVAPS_YMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//4112 VMOVAPS VMOVAPS_YMMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
//4113 VMOVAPS VMOVAPS_MEMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVNTPS_MEMdq_XMMps) = MOVxPS<MV128W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVNTPS_MEMdq_XMMdq) = MOVxPS<MV128W, VV128>;
DEF_ISEL(VMOVNTPS_MEMqq_YMMqq) = MOVxPS<MV256W, VV256>;
# if HAS_FEATURE_AVX512
//6168 VMOVNTPS VMOVNTPS_MEMf32_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
//6169 VMOVNTPS VMOVNTPS_MEMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
//6170 VMOVNTPS VMOVNTPS_MEMf32_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVUPS_XMMps_MEMps) = MOVxPS<V128W, MV128>;
DEF_ISEL(MOVUPS_XMMps_XMMps_0F10) = MOVxPS<V128W, V128>;
DEF_ISEL(MOVUPS_MEMps_XMMps) = MOVxPS<MV128W, V128>;
DEF_ISEL(MOVUPS_XMMps_XMMps_0F11) = MOVxPS<V128W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVUPS_XMMdq_MEMdq) = MOVxPS<VV128W, MV128>;
DEF_ISEL(VMOVUPS_XMMdq_XMMdq_10) = MOVxPS<VV128W, VV128>;
DEF_ISEL(VMOVUPS_MEMdq_XMMdq) = MOVxPS<MV128W, VV128>;
DEF_ISEL(VMOVUPS_XMMdq_XMMdq_11) = MOVxPS<VV128W, VV128>;
DEF_ISEL(VMOVUPS_YMMqq_MEMqq) = MOVxPS<VV256W, MV256>;
DEF_ISEL(VMOVUPS_YMMqq_YMMqq_10) = MOVxPS<VV256W, VV256>;
DEF_ISEL(VMOVUPS_MEMqq_YMMqq) = MOVxPS<MV256W, VV256>;
DEF_ISEL(VMOVUPS_YMMqq_YMMqq_11) = MOVxPS<VV256W, VV256>;
# if HAS_FEATURE_AVX512
//4954 VMOVUPS VMOVUPS_ZMMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
//4955 VMOVUPS VMOVUPS_ZMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//4956 VMOVUPS VMOVUPS_ZMMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
//4957 VMOVUPS VMOVUPS_MEMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//4958 VMOVUPS VMOVUPS_XMMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
//4959 VMOVUPS VMOVUPS_XMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//4960 VMOVUPS VMOVUPS_XMMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
//4961 VMOVUPS VMOVUPS_MEMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//4962 VMOVUPS VMOVUPS_YMMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
//4963 VMOVUPS VMOVUPS_YMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//4964 VMOVUPS VMOVUPS_YMMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
//4965 VMOVUPS VMOVUPS_MEMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVAPD_XMMpd_MEMpd) = MOVxPD<V128W, MV128>;
DEF_ISEL(MOVAPD_XMMpd_XMMpd_0F28) = MOVxPD<V128W, V128>;
DEF_ISEL(MOVAPD_MEMpd_XMMpd) = MOVxPD<MV128W, V128>;
DEF_ISEL(MOVAPD_XMMpd_XMMpd_0F29) = MOVxPD<V128W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVAPD_XMMdq_MEMdq) = MOVxPD<VV128W, MV128>;
DEF_ISEL(VMOVAPD_XMMdq_XMMdq_28) = MOVxPD<VV128W, VV128>;
DEF_ISEL(VMOVAPD_MEMdq_XMMdq) = MOVxPD<MV128W, VV128>;
DEF_ISEL(VMOVAPD_XMMdq_XMMdq_29) = MOVxPD<VV128W, VV128>;
DEF_ISEL(VMOVAPD_YMMqq_MEMqq) = MOVxPD<VV256W, MV256>;
DEF_ISEL(VMOVAPD_YMMqq_YMMqq_28) = MOVxPD<VV256W, VV256>;
DEF_ISEL(VMOVAPD_MEMqq_YMMqq) = MOVxPD<MV256W, VV256>;
DEF_ISEL(VMOVAPD_YMMqq_YMMqq_29) = MOVxPD<VV256W, VV256>;
# if HAS_FEATURE_AVX512
//5585 VMOVAPD VMOVAPD_ZMMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
//5586 VMOVAPD VMOVAPD_ZMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//5587 VMOVAPD VMOVAPD_ZMMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
//5588 VMOVAPD VMOVAPD_MEMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//5589 VMOVAPD VMOVAPD_XMMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
//5590 VMOVAPD VMOVAPD_XMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//5591 VMOVAPD VMOVAPD_XMMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
//5592 VMOVAPD VMOVAPD_MEMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//5593 VMOVAPD VMOVAPD_YMMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
//5594 VMOVAPD VMOVAPD_YMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//5595 VMOVAPD VMOVAPD_YMMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
//5596 VMOVAPD VMOVAPD_MEMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVNTPD_MEMdq_XMMpd) = MOVxPD<MV128W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVNTPD_MEMdq_XMMdq) = MOVxPD<MV128W, VV128>;
DEF_ISEL(VMOVNTPD_MEMqq_YMMqq) = MOVxPD<MV256W, VV256>;
# if HAS_FEATURE_AVX512
//6088 VMOVNTPD VMOVNTPD_MEMf64_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
//6089 VMOVNTPD VMOVNTPD_MEMf64_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
//6090 VMOVNTPD VMOVNTPD_MEMf64_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVUPD_XMMpd_MEMpd) = MOVxPD<V128W, MV128>;
DEF_ISEL(MOVUPD_XMMpd_XMMpd_0F10) = MOVxPD<V128W, V128>;
DEF_ISEL(MOVUPD_MEMpd_XMMpd) = MOVxPD<MV128W, V128>;
DEF_ISEL(MOVUPD_XMMpd_XMMpd_0F11) = MOVxPD<V128W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVUPD_XMMdq_MEMdq) = MOVxPD<VV128W, MV128>;
DEF_ISEL(VMOVUPD_XMMdq_XMMdq_10) = MOVxPD<VV128W, VV128>;
DEF_ISEL(VMOVUPD_MEMdq_XMMdq) = MOVxPD<MV128W, VV128>;
DEF_ISEL(VMOVUPD_XMMdq_XMMdq_11) = MOVxPD<VV128W, VV128>;
DEF_ISEL(VMOVUPD_YMMqq_MEMqq) = MOVxPD<VV256W, MV256>;
DEF_ISEL(VMOVUPD_YMMqq_YMMqq_10) = MOVxPD<VV256W, VV256>;
DEF_ISEL(VMOVUPD_MEMqq_YMMqq) = MOVxPD<MV256W, VV256>;
DEF_ISEL(VMOVUPD_YMMqq_YMMqq_11) = MOVxPD<VV256W, VV256>;
# if HAS_FEATURE_AVX512
//4991 VMOVUPD VMOVUPD_ZMMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
//4992 VMOVUPD VMOVUPD_ZMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//4993 VMOVUPD VMOVUPD_ZMMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
//4994 VMOVUPD VMOVUPD_MEMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//4995 VMOVUPD VMOVUPD_XMMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
//4996 VMOVUPD VMOVUPD_XMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//4997 VMOVUPD VMOVUPD_XMMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
//4998 VMOVUPD VMOVUPD_MEMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//4999 VMOVUPD VMOVUPD_YMMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
//5000 VMOVUPD VMOVUPD_YMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//5001 VMOVUPD VMOVUPD_YMMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
//5002 VMOVUPD VMOVUPD_MEMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVNTDQ_MEMdq_XMMdq) = MOVDQx<MV128W, V128>;
DEF_ISEL(MOVNTDQA_XMMdq_MEMdq) = MOVDQx<V128W, MV128>;
DEF_ISEL(MOVDQU_XMMdq_MEMdq) = MOVDQx<V128W, MV128>;
DEF_ISEL(MOVDQU_XMMdq_XMMdq_0F6F) = MOVDQx<V128W, V128>;
DEF_ISEL(MOVDQU_MEMdq_XMMdq) = MOVDQx<MV128W, V128>;
DEF_ISEL(MOVDQU_XMMdq_XMMdq_0F7F) = MOVDQx<V128W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVNTDQ_MEMdq_XMMdq) = MOVDQx<MV128W, V128>;
DEF_ISEL(VMOVNTDQ_MEMqq_YMMqq) = MOVDQx<MV256W, VV256>;
//5061 VMOVNTDQ VMOVNTDQ_MEMu32_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
//5062 VMOVNTDQ VMOVNTDQ_MEMu32_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
//5063 VMOVNTDQ VMOVNTDQ_MEMu32_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
DEF_ISEL(VMOVNTDQA_XMMdq_MEMdq) = MOVDQx<VV128W, MV128>;
DEF_ISEL(VMOVNTDQA_YMMqq_MEMqq) = MOVDQx<VV256W, MV256>;
//4142 VMOVNTDQA VMOVNTDQA_ZMMu32_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
//4143 VMOVNTDQA VMOVNTDQA_XMMu32_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
//4144 VMOVNTDQA VMOVNTDQA_YMMu32_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
DEF_ISEL(VMOVDQU_XMMdq_MEMdq) = MOVDQx<VV128W, MV128>;
DEF_ISEL(VMOVDQU_XMMdq_XMMdq_6F) = MOVDQx<VV128W, VV128>;
DEF_ISEL(VMOVDQU_MEMdq_XMMdq) = MOVDQx<MV128W, VV128>;
DEF_ISEL(VMOVDQU_XMMdq_XMMdq_7F) = MOVDQx<VV128W, VV128>;
DEF_ISEL(VMOVDQU_YMMqq_MEMqq) = MOVDQx<VV256W, MV256>;
DEF_ISEL(VMOVDQU_YMMqq_YMMqq_6F) = MOVDQx<VV256W, VV256>;
DEF_ISEL(VMOVDQU_MEMqq_YMMqq) = MOVDQx<MV256W, VV256>;
DEF_ISEL(VMOVDQU_YMMqq_YMMqq_7F) = MOVDQx<VV256W, VV256>;
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVDQA_MEMdq_XMMdq) = MOVDQx<MV128W, V128>;
DEF_ISEL(MOVDQA_XMMdq_XMMdq_0F7F) = MOVDQx<V128W, V128>;
DEF_ISEL(MOVDQA_XMMdq_MEMdq) = MOVDQx<V128W, MV128>;
DEF_ISEL(MOVDQA_XMMdq_XMMdq_0F6F) = MOVDQx<V128W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVDQA_XMMdq_MEMdq) = MOVDQx<VV128W, MV128>;
DEF_ISEL(VMOVDQA_XMMdq_XMMdq_6F) = MOVDQx<VV128W, VV128>;
DEF_ISEL(VMOVDQA_MEMdq_XMMdq) = MOVDQx<MV128W, VV128>;
DEF_ISEL(VMOVDQA_XMMdq_XMMdq_7F) = MOVDQx<VV128W, VV128>;
DEF_ISEL(VMOVDQA_YMMqq_MEMqq) = MOVDQx<VV256W, MV256>;
DEF_ISEL(VMOVDQA_YMMqq_YMMqq_6F) = MOVDQx<VV256W, VV256>;
DEF_ISEL(VMOVDQA_MEMqq_YMMqq) = MOVDQx<MV256W, VV256>;
DEF_ISEL(VMOVDQA_YMMqq_YMMqq_7F) = MOVDQx<VV256W, VV256>;
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVLPS_MEMq_XMMps) = MOVLPS<MV64W, V128>;
DEF_ISEL(MOVLPS_XMMq_MEMq) = MOVLPS<V128W, MV64>;
IF_AVX(DEF_ISEL(VMOVLPS_MEMq_XMMq) = MOVLPS<MV64W, VV128>;)
IF_AVX(DEF_ISEL(VMOVLPS_XMMdq_XMMdq_MEMq) = VMOVLPS;)
DEF_ISEL(MOVHLPS_XMMq_XMMq) = MOVHLPS;
IF_AVX(DEF_ISEL(VMOVHLPS_XMMdq_XMMq_XMMq) = VMOVHLPS;)
IF_AVX(DEF_ISEL(VMOVHLPS_XMMdq_XMMdq_XMMdq) = VMOVHLPS;)
DEF_ISEL(MOVLHPS_XMMq_XMMq) = MOVLHPS;
IF_AVX(DEF_ISEL(VMOVLHPS_XMMdq_XMMq_XMMq) = VMOVLHPS;)
IF_AVX(DEF_ISEL(VMOVLHPS_XMMdq_XMMdq_XMMdq) = VMOVLHPS;)
#if HAS_FEATURE_AVX
# if HAS_FEATURE_AVX512
//4606 VMOVLPS DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_TUPLE2
//4607 VMOVLPS VMOVLPS_MEMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_TUPLE2
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVLPD_XMMsd_MEMq) = MOVLPD<V128W, MV64>;
DEF_ISEL(MOVLPD_MEMq_XMMsd) = MOVLPD<MV64W, V128>;
IF_AVX(DEF_ISEL(VMOVLPD_MEMq_XMMq) = MOVLPD<MV64W, VV128>;)
IF_AVX(DEF_ISEL(VMOVLPD_XMMdq_XMMdq_MEMq) = VMOVLPD;)
#if HAS_FEATURE_AVX
# if HAS_FEATURE_AVX512
//4599 VMOVLPD VMOVLPD_XMMf64_XMMf64_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_SCALAR
//4600 VMOVLPD VMOVLPD_MEMf64_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_SCALAR
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
namespace {
template <typename D, typename S>
DEF_SEM(MOVSD_MEM, D dst, S src) {
FWriteV64(dst, FExtractV64(FReadV64(src), 0));
return memory;
}
DEF_SEM(MOVSD, V128W dst, V128 src) {
FWriteV64(dst, FInsertV64(FReadV64(dst), 0, FExtractV64(FReadV64(src), 0)));
return memory;
}
#if HAS_FEATURE_AVX
// Basically the same as `VMOVLPD`.
DEF_SEM(VMOVSD, VV128W dst, V128 src1, V128 src2) {
FWriteV64(dst, FInsertV64(FReadV64(src2), 1, FExtractV64(FReadV64(src1), 1)));
return memory;
}
#endif // HAS_FEATURE_AVX
} // namespace
DEF_ISEL(MOVSD_XMM_XMMsd_XMMsd_0F10) = MOVSD;
DEF_ISEL(MOVSD_XMM_XMMdq_MEMsd) = MOVSD_MEM<V128W, MV64>;
DEF_ISEL(MOVSD_XMM_MEMsd_XMMsd) = MOVSD_MEM<MV64W, V128>;
DEF_ISEL(MOVSD_XMM_XMMsd_XMMsd_0F11) = MOVSD;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVSD_XMMdq_MEMq) = MOVSD_MEM<VV128W, MV64>;
DEF_ISEL(VMOVSD_MEMq_XMMq) = MOVSD_MEM<MV64W, VV128>;
DEF_ISEL(VMOVSD_XMMdq_XMMdq_XMMq_10) = VMOVSD;
DEF_ISEL(VMOVSD_XMMdq_XMMdq_XMMq_11) = VMOVSD;
# if HAS_FEATURE_AVX512
//3632 VMOVSD VMOVSD_XMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: DISP8_SCALAR MASKOP_EVEX MEMORY_FAULT_SUPPRESSION SIMD_SCALAR
//3633 VMOVSD VMOVSD_MEMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: DISP8_SCALAR MASKOP_EVEX MEMORY_FAULT_SUPPRESSION SIMD_SCALAR
//3634 VMOVSD VMOVSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: MASKOP_EVEX SIMD_SCALAR
//3635 VMOVSD VMOVSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: MASKOP_EVEX SIMD_SCALAR
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVNTSD_MEMq_XMMq) = MOVSD_MEM<MV64W, V128>;
namespace {
template <typename D, typename S>
DEF_SEM(MOVSS_MEM, D dst, S src) {
FWriteV32(dst, FExtractV32(FReadV32(src), 0));
return memory;
}
DEF_SEM(MOVSS, V128W dst, V128 src) {
FWriteV32(dst, FInsertV32(FReadV32(dst), 0, FExtractV32(FReadV32(src), 0)));
return memory;
}
#if HAS_FEATURE_AVX
DEF_SEM(VMOVSS, VV128W dst, V128 src1, V128 src2) {
FWriteV32(dst, FInsertV32(FReadV32(src1), 0, FExtractV32(FReadV32(src2), 0)));
return memory;
}
#endif // HAS_FEATURE_AVX
} // namespace
DEF_ISEL(MOVSS_XMMdq_MEMss) = MOVSS_MEM<V128W, MV32>;
DEF_ISEL(MOVSS_MEMss_XMMss) = MOVSS_MEM<MV32W, V128>;
DEF_ISEL(MOVSS_XMMss_XMMss_0F10) = MOVSS;
DEF_ISEL(MOVSS_XMMss_XMMss_0F11) = MOVSS;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVSS_XMMdq_MEMd) = MOVSS_MEM<VV128W, MV32>;
DEF_ISEL(VMOVSS_MEMd_XMMd) = MOVSS_MEM<MV32W, V128>;
DEF_ISEL(VMOVSS_XMMdq_XMMdq_XMMd_10) = VMOVSS;
DEF_ISEL(VMOVSS_XMMdq_XMMdq_XMMd_11) = VMOVSS;
# if HAS_FEATURE_AVX512
//3650 VMOVSS VMOVSS_XMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: DISP8_SCALAR MASKOP_EVEX MEMORY_FAULT_SUPPRESSION SIMD_SCALAR
//3651 VMOVSS VMOVSS_MEMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: DISP8_SCALAR MASKOP_EVEX MEMORY_FAULT_SUPPRESSION SIMD_SCALAR
//3652 VMOVSS VMOVSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: MASKOP_EVEX SIMD_SCALAR
//3653 VMOVSS VMOVSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: MASKOP_EVEX SIMD_SCALAR
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVNTSS_MEMd_XMMd) = MOVSS_MEM<MV32W, V128>;
namespace {
DEF_SEM(MOVHPD, V128W dst, MV64 src) {
FWriteV64(dst, FInsertV64(FReadV64(dst), 1, FExtractV64(FReadV64(src), 0)));
return memory;
}
DEF_SEM(MOVHPD_STORE, MV64W dst, V128 src) {
FWriteV64(dst, FExtractV64(FReadV64(src), 1));
return memory;
}
#if HAS_FEATURE_AVX
DEF_SEM(VMOVHPD, VV256W dst, V128 src1, MV64 src2) {
FWriteV64(dst, FInsertV64(FReadV64(src1), 1, FExtractV64(FReadV64(src2), 0)));
return memory;
}
#endif // HAS_FEATURE_AVX
} // namespace
DEF_ISEL(MOVHPD_XMMsd_MEMq) = MOVHPD;
DEF_ISEL(MOVHPD_MEMq_XMMsd) = MOVHPD_STORE;
IF_AVX(DEF_ISEL(VMOVHPD_XMMdq_XMMq_MEMq) = VMOVHPD;)
IF_AVX(DEF_ISEL(VMOVHPD_MEMq_XMMdq) = MOVHPD_STORE;)
//5181 VMOVHPD VMOVHPD_XMMf64_XMMf64_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_SCALAR
//5182 VMOVHPD VMOVHPD_MEMf64_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_SCALAR
namespace {
DEF_SEM(MOVHPS, V128W dst, MV64 src) {
auto dst_vec = FReadV32(dst);
auto src_vec = FReadV32(src);
auto low_entry = FExtractV32(src_vec, 0);
auto high_entry = FExtractV32(src_vec, 1);
FWriteV32(dst, FInsertV32(FInsertV32(dst_vec, 2, low_entry), 3, high_entry));
return memory;
}
DEF_SEM(MOVHPS_STORE, MV64W dst, V128 src) {
auto dst_vec = FClearV32(FReadV32(dst));
auto src_vec = FReadV32(src);
auto low_entry = FExtractV32(src_vec, 2);
auto high_entry = FExtractV32(src_vec, 3);
FWriteV32(dst, FInsertV32(FInsertV32(dst_vec, 0, low_entry), 1, high_entry));
return memory;
}
#if HAS_FEATURE_AVX
DEF_SEM(VMOVHPS, VV256W dst, V128 src1, MV64 src2) {
auto dst_vec = FReadV32(src1);
auto src_vec = FReadV32(src2);
auto low_entry = FExtractV32(src_vec, 0);
auto high_entry = FExtractV32(src_vec, 1);
FWriteV32(dst, FInsertV32(FInsertV32(dst_vec, 2, low_entry), 3, high_entry));
return memory;
}
#endif // HAS_FEATURE_AVX
} // namespace
DEF_ISEL(MOVHPS_XMMq_MEMq) = MOVHPS;
DEF_ISEL(MOVHPS_MEMq_XMMps) = MOVHPS_STORE;
IF_AVX(DEF_ISEL(VMOVHPS_XMMdq_XMMq_MEMq) = VMOVHPS;)
IF_AVX(DEF_ISEL(VMOVHPS_MEMq_XMMdq) = MOVHPS_STORE;)
//5197 VMOVHPS VMOVHPS_XMMf32_XMMf32_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_TUPLE2
//5198 VMOVHPS VMOVHPS_MEMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_TUPLE2
namespace {
template <typename T>
DEF_SEM(MOV_ES, R16W dst, T src) {
Write(dst, Read(src));
return __remill_sync_hyper_call(state, memory,
SyncHyperCall::kX86SetSegmentES);
}
template <typename T>
DEF_SEM(MOV_SS, R16W dst, T src) {
Write(dst, Read(src));
return __remill_sync_hyper_call(state, memory,
SyncHyperCall::kX86SetSegmentSS);
}
template <typename T>
DEF_SEM(MOV_DS, R16W dst, T src) {
Write(dst, Read(src));
return __remill_sync_hyper_call(state, memory,
SyncHyperCall::kX86SetSegmentDS);
}
template <typename T>
DEF_SEM(MOV_FS, R16W dst, T src) {
Write(dst, Read(src));
return __remill_sync_hyper_call(state, memory,
SyncHyperCall::kX86SetSegmentFS);
}
template <typename T>
DEF_SEM(MOV_GS, R16W dst, T src) {
Write(dst, Read(src));
return __remill_sync_hyper_call(state, memory,
SyncHyperCall::kX86SetSegmentGS);
}
} // namespace
DEF_ISEL(MOV_MEMw_SEG) = MOV<M16W, R16>;
DEF_ISEL(MOV_GPRv_SEG_16) = MOV<R16W, R16>;
DEF_ISEL(MOV_GPRv_SEG_32) = MOV<R32W, R16>;
IF_64BIT(DEF_ISEL(MOV_GPRv_SEG_64) = MOV<R64W, R16>;)
DEF_ISEL(MOV_SEG_MEMw_ES) = MOV_ES<M16>;
DEF_ISEL(MOV_SEG_MEMw_SS) = MOV_SS<M16>;
DEF_ISEL(MOV_SEG_MEMw_DS) = MOV_DS<M16>;
DEF_ISEL(MOV_SEG_MEMw_FS) = MOV_FS<M16>;
DEF_ISEL(MOV_SEG_MEMw_GS) = MOV_GS<M16>;
DEF_ISEL(MOV_SEG_GPR16_ES) = MOV_ES<R16>;
DEF_ISEL(MOV_SEG_GPR16_SS) = MOV_SS<R16>;
DEF_ISEL(MOV_SEG_GPR16_DS) = MOV_DS<R16>;
DEF_ISEL(MOV_SEG_GPR16_FS) = MOV_FS<R16>;
DEF_ISEL(MOV_SEG_GPR16_GS) = MOV_GS<R16>;
/*
25 MOV_DR MOV_DR_DR_GPR32 DATAXFER BASE I86 ATTRIBUTES: NOTSX RING0
26 MOV_DR MOV_DR_DR_GPR64 DATAXFER BASE I86 ATTRIBUTES: NOTSX RING0
27 MOV_DR MOV_DR_GPR32_DR DATAXFER BASE I86 ATTRIBUTES: RING0
28 MOV_DR MOV_DR_GPR64_DR DATAXFER BASE I86 ATTRIBUTES: RING0
1312 MASKMOVDQU MASKMOVDQU_XMMdq_XMMdq DATAXFER SSE2 SSE2 ATTRIBUTES: FIXED_BASE0 MASKOP NOTSX
545 MOVMSKPS MOVMSKPS_GPR32_XMMps DATAXFER SSE SSE ATTRIBUTES:
585 MOVSHDUP MOVSHDUP_XMMps_MEMps DATAXFER SSE3 SSE3 ATTRIBUTES: REQUIRES_ALIGNMENT
586 MOVSHDUP MOVSHDUP_XMMps_XMMps DATAXFER SSE3 SSE3 ATTRIBUTES: REQUIRES_ALIGNMENT
647 MOVLHPS MOVLHPS_XMMq_XMMq DATAXFER SSE SSE ATTRIBUTES:
648 MOVQ2DQ MOVQ2DQ_XMMdq_MMXq DATAXFER SSE2 SSE2 ATTRIBUTES: MMX_EXCEPT NOTSX
689 MOV_CR MOV_CR_CR_GPR32 DATAXFER BASE I86 ATTRIBUTES: NOTSX RING0
690 MOV_CR MOV_CR_CR_GPR64 DATAXFER BASE I86 ATTRIBUTES: NOTSX RING0
691 MOV_CR MOV_CR_GPR32_CR DATAXFER BASE I86 ATTRIBUTES: RING0
692 MOV_CR MOV_CR_GPR64_CR DATAXFER BASE I86 ATTRIBUTES: RING0
957 MOVSLDUP MOVSLDUP_XMMps_MEMps DATAXFER SSE3 SSE3 ATTRIBUTES: REQUIRES_ALIGNMENT
958 MOVSLDUP MOVSLDUP_XMMps_XMMps DATAXFER SSE3 SSE3 ATTRIBUTES: REQUIRES_ALIGNMENT
1071 MOVBE MOVBE_GPRv_MEMv DATAXFER MOVBE MOVBE ATTRIBUTES: SCALABLE
1072 MOVBE MOVBE_MEMv_GPRv DATAXFER MOVBE MOVBE ATTRIBUTES: SCALABLE
1484 MOVDQ2Q MOVDQ2Q_MMXq_XMMq DATAXFER SSE2 SSE2 ATTRIBUTES: MMX_EXCEPT NOTSX
1495 MOVMSKPD MOVMSKPD_GPR32_XMMpd DATAXFER SSE2 SSE2 ATTRIBUTES:
1829 MASKMOVQ MASKMOVQ_MMXq_MMXq DATAXFER MMX PENTIUMMMX ATTRIBUTES: FIXED_BASE0 MASKOP NOTSX
1839 MOVHLPS MOVHLPS_XMMq_XMMq DATAXFER SSE SSE ATTRIBUTES:
1880 MOVDDUP MOVDDUP_XMMdq_MEMq DATAXFER SSE3 SSE3 ATTRIBUTES: UNALIGNED
1881 MOVDDUP MOVDDUP_XMMdq_XMMq DATAXFER SSE3 SSE3 ATTRIBUTES: UNALIGNED
1882 BSWAP BSWAP_GPRv DATAXFER BASE I486REAL ATTRIBUTES: SCALABLE
2101 VMOVMSKPD VMOVMSKPD_GPR32d_XMMdq DATAXFER AVX AVX ATTRIBUTES:
2102 VMOVMSKPD VMOVMSKPD_GPR32d_YMMqq DATAXFER AVX AVX ATTRIBUTES:
2107 VMOVMSKPS VMOVMSKPS_GPR32d_XMMdq DATAXFER AVX AVX ATTRIBUTES:
2108 VMOVMSKPS VMOVMSKPS_GPR32d_YMMqq DATAXFER AVX AVX ATTRIBUTES:
2202 VMOVSHDUP VMOVSHDUP_XMMdq_MEMdq DATAXFER AVX AVX ATTRIBUTES:
2203 VMOVSHDUP VMOVSHDUP_XMMdq_XMMdq DATAXFER AVX AVX ATTRIBUTES:
2204 VMOVSHDUP VMOVSHDUP_YMMqq_MEMqq DATAXFER AVX AVX ATTRIBUTES:
2205 VMOVSHDUP VMOVSHDUP_YMMqq_YMMqq DATAXFER AVX AVX ATTRIBUTES:
2281 VMOVDDUP VMOVDDUP_XMMdq_MEMq DATAXFER AVX AVX ATTRIBUTES:
2282 VMOVDDUP VMOVDDUP_XMMdq_XMMdq DATAXFER AVX AVX ATTRIBUTES:
2283 VMOVDDUP VMOVDDUP_YMMqq_MEMqq DATAXFER AVX AVX ATTRIBUTES:
2284 VMOVDDUP VMOVDDUP_YMMqq_YMMqq DATAXFER AVX AVX ATTRIBUTES:
2464 VMOVSLDUP VMOVSLDUP_XMMdq_MEMdq DATAXFER AVX AVX ATTRIBUTES:
2465 VMOVSLDUP VMOVSLDUP_XMMdq_XMMdq DATAXFER AVX AVX ATTRIBUTES:
2466 VMOVSLDUP VMOVSLDUP_YMMqq_MEMqq DATAXFER AVX AVX ATTRIBUTES:
2467 VMOVSLDUP VMOVSLDUP_YMMqq_YMMqq DATAXFER AVX AVX ATTRIBUTES:
2619 VMOVLHPS VMOVLHPS_XMMdq_XMMq_XMMq DATAXFER AVX AVX ATTRIBUTES:
3395 VMOVHLPS VMOVHLPS_XMMdq_XMMdq_XMMdq DATAXFER AVX AVX ATTRIBUTES:
3804 VPMOVDB VPMOVDB_XMMu8_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3805 VPMOVDB VPMOVDB_MEMu8_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3806 VPMOVDB VPMOVDB_XMMu8_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3807 VPMOVDB VPMOVDB_MEMu8_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3808 VPMOVDB VPMOVDB_XMMu8_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3809 VPMOVDB VPMOVDB_MEMu8_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3814 VPMOVSDB VPMOVSDB_XMMi8_MASKmskw_ZMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3815 VPMOVSDB VPMOVSDB_MEMi8_MASKmskw_ZMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3816 VPMOVSDB VPMOVSDB_XMMi8_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3817 VPMOVSDB VPMOVSDB_MEMi8_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3818 VPMOVSDB VPMOVSDB_XMMi8_MASKmskw_YMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3819 VPMOVSDB VPMOVSDB_MEMi8_MASKmskw_YMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3826 VPMOVDW VPMOVDW_YMMu16_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3827 VPMOVDW VPMOVDW_MEMu16_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3828 VPMOVDW VPMOVDW_XMMu16_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3829 VPMOVDW VPMOVDW_MEMu16_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3830 VPMOVDW VPMOVDW_XMMu16_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3831 VPMOVDW VPMOVDW_MEMu16_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3853 VMOVSHDUP VMOVSHDUP_ZMMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3854 VMOVSHDUP VMOVSHDUP_ZMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX
3855 VMOVSHDUP VMOVSHDUP_XMMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3856 VMOVSHDUP VMOVSHDUP_XMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX
3857 VMOVSHDUP VMOVSHDUP_YMMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3858 VMOVSHDUP VMOVSHDUP_YMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX
3861 VPMOVSDW VPMOVSDW_YMMi16_MASKmskw_ZMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3862 VPMOVSDW VPMOVSDW_MEMi16_MASKmskw_ZMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3863 VPMOVSDW VPMOVSDW_XMMi16_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3864 VPMOVSDW VPMOVSDW_MEMi16_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3865 VPMOVSDW VPMOVSDW_XMMi16_MASKmskw_YMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3866 VPMOVSDW VPMOVSDW_MEMi16_MASKmskw_YMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3897 VPMOVZXWQ VPMOVZXWQ_ZMMi64_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3898 VPMOVZXWQ VPMOVZXWQ_ZMMi64_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3899 VPMOVZXWQ VPMOVZXWQ_XMMi64_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3900 VPMOVZXWQ VPMOVZXWQ_XMMi64_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3901 VPMOVZXWQ VPMOVZXWQ_YMMi64_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3902 VPMOVZXWQ VPMOVZXWQ_YMMi64_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3937 VPMOVUSQW VPMOVUSQW_XMMu16_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3938 VPMOVUSQW VPMOVUSQW_MEMu16_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3939 VPMOVUSQW VPMOVUSQW_XMMu16_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3940 VPMOVUSQW VPMOVUSQW_MEMu16_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3941 VPMOVUSQW VPMOVUSQW_XMMu16_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3942 VPMOVUSQW VPMOVUSQW_MEMu16_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3962 VPMOVUSQB VPMOVUSQB_XMMu8_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3963 VPMOVUSQB VPMOVUSQB_MEMu8_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3964 VPMOVUSQB VPMOVUSQB_XMMu8_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3965 VPMOVUSQB VPMOVUSQB_MEMu8_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3966 VPMOVUSQB VPMOVUSQB_XMMu8_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3967 VPMOVUSQB VPMOVUSQB_MEMu8_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3968 VPMOVUSQD VPMOVUSQD_YMMu32_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3969 VPMOVUSQD VPMOVUSQD_MEMu32_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3970 VPMOVUSQD VPMOVUSQD_XMMu32_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3971 VPMOVUSQD VPMOVUSQD_MEMu32_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3972 VPMOVUSQD VPMOVUSQD_XMMu32_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3973 VPMOVUSQD VPMOVUSQD_MEMu32_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3980 VPMOVSXDQ VPMOVSXDQ_ZMMi64_MASKmskw_YMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3981 VPMOVSXDQ VPMOVSXDQ_ZMMi64_MASKmskw_MEMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3982 VPMOVSXDQ VPMOVSXDQ_XMMi64_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3983 VPMOVSXDQ VPMOVSXDQ_XMMi64_MASKmskw_MEMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3984 VPMOVSXDQ VPMOVSXDQ_YMMi64_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3985 VPMOVSXDQ VPMOVSXDQ_YMMi64_MASKmskw_MEMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4027 VMOVDDUP VMOVDDUP_ZMMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4028 VMOVDDUP VMOVDDUP_ZMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_MOVDDUP MASKOP_EVEX
4029 VMOVDDUP VMOVDDUP_XMMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4030 VMOVDDUP VMOVDDUP_XMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_MOVDDUP MASKOP_EVEX
4031 VMOVDDUP VMOVDDUP_YMMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4032 VMOVDDUP VMOVDDUP_YMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_MOVDDUP MASKOP_EVEX
4045 VMOVDQU32 VMOVDQU32_ZMMu32_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4046 VMOVDQU32 VMOVDQU32_ZMMu32_MASKmskw_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4047 VMOVDQU32 VMOVDQU32_ZMMu32_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4048 VMOVDQU32 VMOVDQU32_MEMu32_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4049 VMOVDQU32 VMOVDQU32_XMMu32_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4050 VMOVDQU32 VMOVDQU32_XMMu32_MASKmskw_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4051 VMOVDQU32 VMOVDQU32_XMMu32_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4052 VMOVDQU32 VMOVDQU32_MEMu32_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4053 VMOVDQU32 VMOVDQU32_YMMu32_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4054 VMOVDQU32 VMOVDQU32_YMMu32_MASKmskw_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4055 VMOVDQU32 VMOVDQU32_YMMu32_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4056 VMOVDQU32 VMOVDQU32_MEMu32_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4242 VPMOVD2M VPMOVD2M_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512DQ_128 ATTRIBUTES:
4243 VPMOVD2M VPMOVD2M_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512DQ_256 ATTRIBUTES:
4244 VPMOVD2M VPMOVD2M_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512DQ_512 ATTRIBUTES:
4260 VPMOVSXBQ VPMOVSXBQ_ZMMi64_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4261 VPMOVSXBQ VPMOVSXBQ_ZMMi64_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4262 VPMOVSXBQ VPMOVSXBQ_XMMi64_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4263 VPMOVSXBQ VPMOVSXBQ_XMMi64_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4264 VPMOVSXBQ VPMOVSXBQ_YMMi64_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4265 VPMOVSXBQ VPMOVSXBQ_YMMi64_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4284 VPMOVZXBD VPMOVZXBD_ZMMi32_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4285 VPMOVZXBD VPMOVZXBD_ZMMi32_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4286 VPMOVZXBD VPMOVZXBD_XMMi32_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4287 VPMOVZXBD VPMOVZXBD_XMMi32_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4288 VPMOVZXBD VPMOVZXBD_YMMi32_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4289 VPMOVZXBD VPMOVZXBD_YMMi32_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4314 VPMOVB2M VPMOVB2M_MASKmskw_XMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES:
4315 VPMOVB2M VPMOVB2M_MASKmskw_YMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES:
4316 VPMOVB2M VPMOVB2M_MASKmskw_ZMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES:
4356 VMOVSLDUP VMOVSLDUP_ZMMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4357 VMOVSLDUP VMOVSLDUP_ZMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX
4358 VMOVSLDUP VMOVSLDUP_XMMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4359 VMOVSLDUP VMOVSLDUP_XMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX
4360 VMOVSLDUP VMOVSLDUP_YMMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4361 VMOVSLDUP VMOVSLDUP_YMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX
4375 VPMOVSXBW VPMOVSXBW_XMMi16_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX
4376 VPMOVSXBW VPMOVSXBW_XMMi16_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4377 VPMOVSXBW VPMOVSXBW_YMMi16_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX
4378 VPMOVSXBW VPMOVSXBW_YMMi16_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4379 VPMOVSXBW VPMOVSXBW_ZMMi16_MASKmskw_YMMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX
4380 VPMOVSXBW VPMOVSXBW_ZMMi16_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4422 VPMOVZXBQ VPMOVZXBQ_ZMMi64_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4423 VPMOVZXBQ VPMOVZXBQ_ZMMi64_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4424 VPMOVZXBQ VPMOVZXBQ_XMMi64_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4425 VPMOVZXBQ VPMOVZXBQ_XMMi64_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4426 VPMOVZXBQ VPMOVZXBQ_YMMi64_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4427 VPMOVZXBQ VPMOVZXBQ_YMMi64_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4494 VPMOVW2M VPMOVW2M_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES:
4495 VPMOVW2M VPMOVW2M_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES:
4496 VPMOVW2M VPMOVW2M_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES:
4539 VPMOVM2W VPMOVM2W_XMMu16_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES:
4540 VPMOVM2W VPMOVM2W_YMMu16_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES:
4541 VPMOVM2W VPMOVM2W_ZMMu16_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES:
4560 VPMOVM2B VPMOVM2B_XMMu8_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES:
4561 VPMOVM2B VPMOVM2B_YMMu8_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES:
4562 VPMOVM2B VPMOVM2B_ZMMu8_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES:
4577 VPMOVM2D VPMOVM2D_XMMu32_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512DQ_128 ATTRIBUTES:
4578 VPMOVM2D VPMOVM2D_YMMu32_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512DQ_256 ATTRIBUTES:
4579 VPMOVM2D VPMOVM2D_ZMMu32_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512DQ_512 ATTRIBUTES:
4605 VMOVLHPS VMOVLHPS_XMMf32_XMMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES:
4671 VPMOVZXBW VPMOVZXBW_XMMi16_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX
4672 VPMOVZXBW VPMOVZXBW_XMMi16_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4673 VPMOVZXBW VPMOVZXBW_YMMi16_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX
4674 VPMOVZXBW VPMOVZXBW_YMMi16_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4675 VPMOVZXBW VPMOVZXBW_ZMMi16_MASKmskw_YMMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX
4676 VPMOVZXBW VPMOVZXBW_ZMMi16_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4696 VPMOVSQW VPMOVSQW_XMMi16_MASKmskw_ZMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4697 VPMOVSQW VPMOVSQW_MEMi16_MASKmskw_ZMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4698 VPMOVSQW VPMOVSQW_XMMi16_MASKmskw_XMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4699 VPMOVSQW VPMOVSQW_MEMi16_MASKmskw_XMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4700 VPMOVSQW VPMOVSQW_XMMi16_MASKmskw_YMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4701 VPMOVSQW VPMOVSQW_MEMi16_MASKmskw_YMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4717 VPMOVSQD VPMOVSQD_YMMi32_MASKmskw_ZMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4718 VPMOVSQD VPMOVSQD_MEMi32_MASKmskw_ZMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4719 VPMOVSQD VPMOVSQD_XMMi32_MASKmskw_XMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4720 VPMOVSQD VPMOVSQD_MEMi32_MASKmskw_XMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4721 VPMOVSQD VPMOVSQD_XMMi32_MASKmskw_YMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4722 VPMOVSQD VPMOVSQD_MEMi32_MASKmskw_YMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4723 VPMOVSQB VPMOVSQB_XMMi8_MASKmskw_ZMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4724 VPMOVSQB VPMOVSQB_MEMi8_MASKmskw_ZMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4725 VPMOVSQB VPMOVSQB_XMMi8_MASKmskw_XMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4726 VPMOVSQB VPMOVSQB_MEMi8_MASKmskw_XMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4727 VPMOVSQB VPMOVSQB_XMMi8_MASKmskw_YMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4728 VPMOVSQB VPMOVSQB_MEMi8_MASKmskw_YMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4735 VPMOVWB VPMOVWB_XMMu8_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX
4736 VPMOVWB VPMOVWB_MEMu8_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4737 VPMOVWB VPMOVWB_XMMu8_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX
4738 VPMOVWB VPMOVWB_MEMu8_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4739 VPMOVWB VPMOVWB_YMMu8_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX
4740 VPMOVWB VPMOVWB_MEMu8_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4862 VMOVDQU8 VMOVDQU8_XMMu8_MASKmskw_XMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX
4863 VMOVDQU8 VMOVDQU8_XMMu8_MASKmskw_MEMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4864 VMOVDQU8 VMOVDQU8_XMMu8_MASKmskw_XMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX
4865 VMOVDQU8 VMOVDQU8_MEMu8_MASKmskw_XMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4866 VMOVDQU8 VMOVDQU8_YMMu8_MASKmskw_YMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX
4867 VMOVDQU8 VMOVDQU8_YMMu8_MASKmskw_MEMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4868 VMOVDQU8 VMOVDQU8_YMMu8_MASKmskw_YMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX
4869 VMOVDQU8 VMOVDQU8_MEMu8_MASKmskw_YMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4870 VMOVDQU8 VMOVDQU8_ZMMu8_MASKmskw_ZMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX
4871 VMOVDQU8 VMOVDQU8_ZMMu8_MASKmskw_MEMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4872 VMOVDQU8 VMOVDQU8_ZMMu8_MASKmskw_ZMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX
4873 VMOVDQU8 VMOVDQU8_MEMu8_MASKmskw_ZMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4878 VPMOVUSDB VPMOVUSDB_XMMu8_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4879 VPMOVUSDB VPMOVUSDB_MEMu8_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4880 VPMOVUSDB VPMOVUSDB_XMMu8_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4881 VPMOVUSDB VPMOVUSDB_MEMu8_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4882 VPMOVUSDB VPMOVUSDB_XMMu8_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4883 VPMOVUSDB VPMOVUSDB_MEMu8_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4908 VPMOVUSDW VPMOVUSDW_YMMu16_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4909 VPMOVUSDW VPMOVUSDW_MEMu16_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4910 VPMOVUSDW VPMOVUSDW_XMMu16_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4911 VPMOVUSDW VPMOVUSDW_MEMu16_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4912 VPMOVUSDW VPMOVUSDW_XMMu16_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4913 VPMOVUSDW VPMOVUSDW_MEMu16_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5292 VPMOVQ2M VPMOVQ2M_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512DQ_128 ATTRIBUTES:
5293 VPMOVQ2M VPMOVQ2M_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512DQ_256 ATTRIBUTES:
5294 VPMOVQ2M VPMOVQ2M_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512DQ_512 ATTRIBUTES:
5515 VMOVDQU16 VMOVDQU16_XMMu16_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX
5516 VMOVDQU16 VMOVDQU16_XMMu16_MASKmskw_MEMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5517 VMOVDQU16 VMOVDQU16_XMMu16_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX
5518 VMOVDQU16 VMOVDQU16_MEMu16_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5519 VMOVDQU16 VMOVDQU16_YMMu16_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX
5520 VMOVDQU16 VMOVDQU16_YMMu16_MASKmskw_MEMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5521 VMOVDQU16 VMOVDQU16_YMMu16_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX
5522 VMOVDQU16 VMOVDQU16_MEMu16_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5523 VMOVDQU16 VMOVDQU16_ZMMu16_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX
5524 VMOVDQU16 VMOVDQU16_ZMMu16_MASKmskw_MEMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5525 VMOVDQU16 VMOVDQU16_ZMMu16_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX
5526 VMOVDQU16 VMOVDQU16_MEMu16_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5535 VPMOVSXBD VPMOVSXBD_ZMMi32_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
5536 VPMOVSXBD VPMOVSXBD_ZMMi32_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5537 VPMOVSXBD VPMOVSXBD_XMMi32_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
5538 VPMOVSXBD VPMOVSXBD_XMMi32_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5539 VPMOVSXBD VPMOVSXBD_YMMi32_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
5540 VPMOVSXBD VPMOVSXBD_YMMi32_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5627 VPMOVZXWD VPMOVZXWD_ZMMi32_MASKmskw_YMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
5628 VPMOVZXWD VPMOVZXWD_ZMMi32_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5629 VPMOVZXWD VPMOVZXWD_XMMi32_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
5630 VPMOVZXWD VPMOVZXWD_XMMi32_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5631 VPMOVZXWD VPMOVZXWD_YMMi32_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
5632 VPMOVZXWD VPMOVZXWD_YMMi32_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5636 VMOVDQU64 VMOVDQU64_ZMMu64_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
5637 VMOVDQU64 VMOVDQU64_ZMMu64_MASKmskw_MEMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5638 VMOVDQU64 VMOVDQU64_ZMMu64_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
5639 VMOVDQU64 VMOVDQU64_MEMu64_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5640 VMOVDQU64 VMOVDQU64_XMMu64_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
5641 VMOVDQU64 VMOVDQU64_XMMu64_MASKmskw_MEMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5642 VMOVDQU64 VMOVDQU64_XMMu64_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
5643 VMOVDQU64 VMOVDQU64_MEMu64_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5644 VMOVDQU64 VMOVDQU64_YMMu64_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
5645 VMOVDQU64 VMOVDQU64_YMMu64_MASKmskw_MEMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5646 VMOVDQU64 VMOVDQU64_YMMu64_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
5647 VMOVDQU64 VMOVDQU64_MEMu64_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5665 VMOVDQA64 VMOVDQA64_ZMMu64_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
5666 VMOVDQA64 VMOVDQA64_ZMMu64_MASKmskw_MEMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
5667 VMOVDQA64 VMOVDQA64_ZMMu64_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
5668 VMOVDQA64 VMOVDQA64_MEMu64_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
5669 VMOVDQA64 VMOVDQA64_XMMu64_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
5670 VMOVDQA64 VMOVDQA64_XMMu64_MASKmskw_MEMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
5671 VMOVDQA64 VMOVDQA64_XMMu64_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
5672 VMOVDQA64 VMOVDQA64_MEMu64_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
5673 VMOVDQA64 VMOVDQA64_YMMu64_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
5674 VMOVDQA64 VMOVDQA64_YMMu64_MASKmskw_MEMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
5675 VMOVDQA64 VMOVDQA64_YMMu64_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
5676 VMOVDQA64 VMOVDQA64_MEMu64_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
5902 VPMOVZXDQ VPMOVZXDQ_ZMMi64_MASKmskw_YMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
5903 VPMOVZXDQ VPMOVZXDQ_ZMMi64_MASKmskw_MEMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5904 VPMOVZXDQ VPMOVZXDQ_XMMi64_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
5905 VPMOVZXDQ VPMOVZXDQ_XMMi64_MASKmskw_MEMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5906 VPMOVZXDQ VPMOVZXDQ_YMMi64_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
5907 VPMOVZXDQ VPMOVZXDQ_YMMi64_MASKmskw_MEMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5931 VPMOVUSWB VPMOVUSWB_XMMu8_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX
5932 VPMOVUSWB VPMOVUSWB_MEMu8_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5933 VPMOVUSWB VPMOVUSWB_XMMu8_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX
5934 VPMOVUSWB VPMOVUSWB_MEMu8_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5935 VPMOVUSWB VPMOVUSWB_YMMu8_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX
5936 VPMOVUSWB VPMOVUSWB_MEMu8_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5961 VPMOVSWB VPMOVSWB_XMMi8_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX
5962 VPMOVSWB VPMOVSWB_MEMi8_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5963 VPMOVSWB VPMOVSWB_XMMi8_MASKmskw_YMMi16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX
5964 VPMOVSWB VPMOVSWB_MEMi8_MASKmskw_YMMi16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5965 VPMOVSWB VPMOVSWB_YMMi8_MASKmskw_ZMMi16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX
5966 VPMOVSWB VPMOVSWB_MEMi8_MASKmskw_ZMMi16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5990 VPMOVSXWD VPMOVSXWD_ZMMi32_MASKmskw_YMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
5991 VPMOVSXWD VPMOVSXWD_ZMMi32_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5992 VPMOVSXWD VPMOVSXWD_XMMi32_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
5993 VPMOVSXWD VPMOVSXWD_XMMi32_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5994 VPMOVSXWD VPMOVSXWD_YMMi32_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
5995 VPMOVSXWD VPMOVSXWD_YMMi32_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5996 VMOVHLPS VMOVHLPS_XMMf32_XMMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES:
6007 VPMOVSXWQ VPMOVSXWQ_ZMMi64_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
6008 VPMOVSXWQ VPMOVSXWQ_ZMMi64_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6009 VPMOVSXWQ VPMOVSXWQ_XMMi64_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
6010 VPMOVSXWQ VPMOVSXWQ_XMMi64_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6011 VPMOVSXWQ VPMOVSXWQ_YMMi64_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
6012 VPMOVSXWQ VPMOVSXWQ_YMMi64_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6171 VMOVDQA32 VMOVDQA32_ZMMu32_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
6172 VMOVDQA32 VMOVDQA32_ZMMu32_MASKmskw_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
6173 VMOVDQA32 VMOVDQA32_ZMMu32_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
6174 VMOVDQA32 VMOVDQA32_MEMu32_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
6175 VMOVDQA32 VMOVDQA32_XMMu32_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
6176 VMOVDQA32 VMOVDQA32_XMMu32_MASKmskw_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
6177 VMOVDQA32 VMOVDQA32_XMMu32_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
6178 VMOVDQA32 VMOVDQA32_MEMu32_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
6179 VMOVDQA32 VMOVDQA32_YMMu32_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
6180 VMOVDQA32 VMOVDQA32_YMMu32_MASKmskw_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
6181 VMOVDQA32 VMOVDQA32_YMMu32_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
6182 VMOVDQA32 VMOVDQA32_MEMu32_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
6309 VPMOVQW VPMOVQW_XMMu16_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
6310 VPMOVQW VPMOVQW_MEMu16_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6311 VPMOVQW VPMOVQW_XMMu16_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
6312 VPMOVQW VPMOVQW_MEMu16_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6313 VPMOVQW VPMOVQW_XMMu16_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
6314 VPMOVQW VPMOVQW_MEMu16_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6325 VPMOVQB VPMOVQB_XMMu8_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
6326 VPMOVQB VPMOVQB_MEMu8_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6327 VPMOVQB VPMOVQB_XMMu8_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
6328 VPMOVQB VPMOVQB_MEMu8_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6329 VPMOVQB VPMOVQB_XMMu8_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
6330 VPMOVQB VPMOVQB_MEMu8_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6331 VPMOVQD VPMOVQD_YMMu32_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
6332 VPMOVQD VPMOVQD_MEMu32_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6333 VPMOVQD VPMOVQD_XMMu32_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
6334 VPMOVQD VPMOVQD_MEMu32_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6335 VPMOVQD VPMOVQD_XMMu32_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
6336 VPMOVQD VPMOVQD_MEMu32_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6349 VPMOVM2Q VPMOVM2Q_XMMu64_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512DQ_128 ATTRIBUTES:
6350 VPMOVM2Q VPMOVM2Q_YMMu64_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512DQ_256 ATTRIBUTES:
6351 VPMOVM2Q VPMOVM2Q_ZMMu64_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512DQ_512 ATTRIBUTES:
*/
namespace {
template <typename D, typename S>
DEF_SEM(MOVZX, D dst, S src) {
WriteZExt(dst, Read(src));
return memory;
}
template <typename D, typename S, typename SextT>
DEF_SEM(MOVSX, D dst, S src) {
WriteZExt(dst, SExtTo<SextT>(Read(src)));
return memory;
}
} // namespace
DEF_ISEL(MOVZX_GPRv_MEMb_16) = MOVZX<R16W, M8>;
DEF_ISEL(MOVZX_GPRv_MEMb_32) = MOVZX<R32W, M8>;
IF_64BIT(DEF_ISEL(MOVZX_GPRv_MEMb_64) = MOVZX<R64W, M8>;)
DEF_ISEL(MOVZX_GPRv_GPR8_16) = MOVZX<R16W, R8>;
DEF_ISEL(MOVZX_GPRv_GPR8_32) = MOVZX<R32W, R8>;
IF_64BIT(DEF_ISEL(MOVZX_GPRv_GPR8_64) = MOVZX<R64W, R8>;)
DEF_ISEL(MOVZX_GPRv_MEMw_32) = MOVZX<R32W, M16>;
IF_64BIT(DEF_ISEL(MOVZX_GPRv_MEMw_64) = MOVZX<R64W, M16>;)
DEF_ISEL(MOVZX_GPRv_GPR16_32) = MOVZX<R32W, R16>;
IF_64BIT(DEF_ISEL(MOVZX_GPRv_GPR16_64) = MOVZX<R64W, R16>;)
DEF_ISEL(MOVSX_GPRv_MEMb_16) = MOVSX<R16W, M8, int16_t>;
DEF_ISEL(MOVSX_GPRv_MEMb_32) = MOVSX<R32W, M8, int32_t>;
IF_64BIT(DEF_ISEL(MOVSX_GPRv_MEMb_64) = MOVSX<R64W, M8, int64_t>;)
DEF_ISEL(MOVSX_GPRv_GPR8_16) = MOVSX<R16W, R8, int16_t>;
DEF_ISEL(MOVSX_GPRv_GPR8_32) = MOVSX<R32W, R8, int32_t>;
IF_64BIT(DEF_ISEL(MOVSX_GPRv_GPR8_64) = MOVSX<R64W, R8, int64_t>;)
DEF_ISEL(MOVSX_GPRv_MEMw_32) = MOVSX<R32W, M16, int32_t>;
IF_64BIT(DEF_ISEL(MOVSX_GPRv_MEMw_64) = MOVSX<R64W, M16, int64_t>;)
DEF_ISEL(MOVSX_GPRv_GPR16_32) = MOVSX<R32W, R16, int32_t>;
IF_64BIT(DEF_ISEL(MOVSX_GPRv_GPR16_64) = MOVSX<R64W, R16, int64_t>;)
DEF_ISEL(MOVSXD_GPRv_GPRz_16) = MOVSX<R32W, R16, int32_t>;
DEF_ISEL(MOVSXD_GPRv_GPRz_32) = MOVSX<R32W, R32, int32_t>;
IF_64BIT(DEF_ISEL(MOVSXD_GPRv_MEMd_32) = MOVSX<R64W, M32, int64_t>;)
IF_64BIT(DEF_ISEL(MOVSXD_GPRv_GPR32_32) = MOVSX<R64W, R32, int64_t>;)
IF_64BIT(DEF_ISEL(MOVSXD_GPRv_MEMd_64) = MOVSX<R64W, M32, int64_t>;)
IF_64BIT(DEF_ISEL(MOVSXD_GPRv_MEMz_64) = MOVSX<R64W, M32, int64_t>;)
IF_64BIT(DEF_ISEL(MOVSXD_GPRv_GPR32_64) = MOVSX<R64W, R32, int64_t>;)
IF_64BIT(DEF_ISEL(MOVSXD_GPRv_GPRz_64) = MOVSX<R64W, R32, int64_t>;)
#if HAS_FEATURE_AVX512
namespace {
template <typename D, typename K, typename S>
DEF_SEM(VPMOVSXBQ_MASKmskw_SIMD128, D dst, K k1, S src) {
auto src_vec = SReadV8(src);
auto dst_vec = SClearV64(SReadV64(dst));
auto k_vec = Read(k1);
for (auto i = 0u; i < 2u; i++) {
if (READBIT(k_vec, i) == 0) {
dst_vec = SInsertV64(dst_vec, i, 0);
} else {
auto v = SExtTo<int64_t>(SExtractV8(src_vec, i));
dst_vec = SInsertV64(dst_vec, i, v);
}
}
SWriteV64(dst, dst_vec);
return memory;
}
template <typename D, typename K, typename S>
DEF_SEM(VPMOVSXWD_MASKmskw_SIMD128, D dst, K k1, S src) {
auto src_vec = SReadV16(src);
auto dst_vec = SClearV32(SReadV32(dst));
auto k_vec = Read(k1);
for (auto i = 0u; i < 4u; i++) {
if (READBIT(k_vec, i) == 0) {
dst_vec = SInsertV32(dst_vec, i, 0);
} else {
auto v = SExtTo<int32_t>(SExtractV16(src_vec, i));
dst_vec = SInsertV32(dst_vec, i, v);
}
}
SWriteV32(dst, dst_vec);
return memory;
}
template <typename S1, typename S2>
DEF_SEM(KMOVW, S1 dst, S2 src) {
WriteZExt(dst, UInt16(Read(src)));
return memory;
}
} // namespace
DEF_ISEL(VPMOVSXBQ_XMMi64_MASKmskw_MEMi8_AVX512) = VPMOVSXBQ_MASKmskw_SIMD128<VV128W, R8, MV16>;
DEF_ISEL(VPMOVSXBQ_XMMi64_MASKmskw_XMMi8_AVX512) = VPMOVSXBQ_MASKmskw_SIMD128<VV128W, R8, V128>;
DEF_ISEL(VPMOVSXWD_XMMi32_MASKmskw_MEMi16_AVX512) = VPMOVSXWD_MASKmskw_SIMD128<VV128W, R8, MV64>;
DEF_ISEL(VPMOVSXWD_XMMi32_MASKmskw_XMMi16_AVX512) = VPMOVSXWD_MASKmskw_SIMD128<VV128W, R8, V128>;
DEF_ISEL(KMOVW_MASKmskw_MASKu16_AVX512) = KMOVW<R64W, R64>;
DEF_ISEL(KMOVW_GPR32u32_MASKmskw_AVX512) = KMOVW<R32W, R64>;
DEF_ISEL(KMOVW_MASKmskw_GPR32u32_AVX512) = KMOVW<R64W, R32>;
DEF_ISEL(KMOVW_MASKmskw_MEMu16_AVX512) = KMOVW<R64W, M16>;
DEF_ISEL(KMOVW_MEMu16_MASKmskw_AVX512) = KMOVW<M16W, R64>;
#endif // HAS_FEATURE_AVX512
| trailofbits/remill | lib/Arch/X86/Semantics/DATAXFER.cpp | C++ | apache-2.0 | 75,427 |
/**
* Copyright 2011-2017 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.compiler.batch;
import com.asakusafw.compiler.batch.batch.JobFlow1;
import com.asakusafw.vocabulary.batch.Batch;
import com.asakusafw.vocabulary.batch.BatchDescription;
/**
* A batch class which is not public.
*/
@Batch(name = "testing")
class NotPublic extends BatchDescription {
@Override
protected void describe() {
run(JobFlow1.class).soon();
}
}
| cocoatomo/asakusafw | mapreduce/compiler/core/src/test/java/com/asakusafw/compiler/batch/NotPublic.java | Java | apache-2.0 | 1,012 |
package com.google.api.ads.dfp.jaxws.v201511;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for CustomTargetingValue.Status.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="CustomTargetingValue.Status">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="ACTIVE"/>
* <enumeration value="INACTIVE"/>
* <enumeration value="UNKNOWN"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "CustomTargetingValue.Status")
@XmlEnum
public enum CustomTargetingValueStatus {
/**
*
* The object is active.
*
*
*/
ACTIVE,
/**
*
* The object is no longer active.
*
*
*/
INACTIVE,
/**
*
* The value returned if the actual value is not exposed by the requested
* API version.
*
*
*/
UNKNOWN;
public String value() {
return name();
}
public static CustomTargetingValueStatus fromValue(String v) {
return valueOf(v);
}
}
| gawkermedia/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201511/CustomTargetingValueStatus.java | Java | apache-2.0 | 1,306 |
/* ###
* IP: GHIDRA
* REVIEWED: YES
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.app.plugin.core.compositeeditor;
/**
* Composite Viewer Model component selection change listener interface.
*/
public interface CompositeModelSelectionListener {
/**
* Called to indicate the model's component selection has changed.
*/
void selectionChanged();
}
| NationalSecurityAgency/ghidra | Ghidra/Features/Base/src/main/java/ghidra/app/plugin/core/compositeeditor/CompositeModelSelectionListener.java | Java | apache-2.0 | 904 |
๏ปฟ/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/ec2/model/DescribeInstancesResponse.h>
#include <aws/core/utils/xml/XmlSerializer.h>
#include <aws/core/AmazonWebServiceResult.h>
#include <aws/core/utils/StringUtils.h>
#include <aws/core/utils/logging/LogMacros.h>
#include <utility>
using namespace Aws::EC2::Model;
using namespace Aws::Utils::Xml;
using namespace Aws::Utils::Logging;
using namespace Aws::Utils;
using namespace Aws;
DescribeInstancesResponse::DescribeInstancesResponse()
{
}
DescribeInstancesResponse::DescribeInstancesResponse(const Aws::AmazonWebServiceResult<XmlDocument>& result)
{
*this = result;
}
DescribeInstancesResponse& DescribeInstancesResponse::operator =(const Aws::AmazonWebServiceResult<XmlDocument>& result)
{
const XmlDocument& xmlDocument = result.GetPayload();
XmlNode rootNode = xmlDocument.GetRootElement();
XmlNode resultNode = rootNode;
if (!rootNode.IsNull() && (rootNode.GetName() != "DescribeInstancesResponse"))
{
resultNode = rootNode.FirstChild("DescribeInstancesResponse");
}
if(!resultNode.IsNull())
{
XmlNode reservationsNode = resultNode.FirstChild("reservationSet");
if(!reservationsNode.IsNull())
{
XmlNode reservationsMember = reservationsNode.FirstChild("item");
while(!reservationsMember.IsNull())
{
m_reservations.push_back(reservationsMember);
reservationsMember = reservationsMember.NextNode("item");
}
}
XmlNode nextTokenNode = resultNode.FirstChild("nextToken");
if(!nextTokenNode.IsNull())
{
m_nextToken = StringUtils::Trim(nextTokenNode.GetText().c_str());
}
}
if (!rootNode.IsNull()) {
XmlNode requestIdNode = rootNode.FirstChild("requestId");
if (!requestIdNode.IsNull())
{
m_responseMetadata.SetRequestId(StringUtils::Trim(requestIdNode.GetText().c_str()));
}
AWS_LOGSTREAM_DEBUG("Aws::EC2::Model::DescribeInstancesResponse", "x-amzn-request-id: " << m_responseMetadata.GetRequestId() );
}
return *this;
}
| JoyIfBam5/aws-sdk-cpp | aws-cpp-sdk-ec2/source/model/DescribeInstancesResponse.cpp | C++ | apache-2.0 | 2,558 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from unittest import mock
import pytest
from google.cloud.vision import enums
from google.cloud.vision_v1 import ProductSearchClient
from google.cloud.vision_v1.proto.image_annotator_pb2 import (
AnnotateImageResponse,
EntityAnnotation,
SafeSearchAnnotation,
)
from google.cloud.vision_v1.proto.product_search_service_pb2 import Product, ProductSet, ReferenceImage
from google.protobuf.json_format import MessageToDict
from parameterized import parameterized
from airflow.exceptions import AirflowException
from airflow.providers.google.cloud.hooks.vision import ERR_DIFF_NAMES, ERR_UNABLE_TO_CREATE, CloudVisionHook
from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id
PROJECT_ID_TEST = 'project-id'
PROJECT_ID_TEST_2 = 'project-id-2'
LOC_ID_TEST = 'loc-id'
LOC_ID_TEST_2 = 'loc-id-2'
PRODUCTSET_ID_TEST = 'ps-id'
PRODUCTSET_ID_TEST_2 = 'ps-id-2'
PRODUCTSET_NAME_TEST = f'projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/productSets/{PRODUCTSET_ID_TEST}'
PRODUCT_ID_TEST = 'p-id'
PRODUCT_ID_TEST_2 = 'p-id-2'
PRODUCT_NAME_TEST = f"projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/products/{PRODUCT_ID_TEST}"
PRODUCT_NAME = f"projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/products/{PRODUCT_ID_TEST}"
REFERENCE_IMAGE_ID_TEST = 'ri-id'
REFERENCE_IMAGE_GEN_ID_TEST = 'ri-id'
ANNOTATE_IMAGE_REQUEST = {
'image': {'source': {'image_uri': "gs://bucket-name/object-name"}},
'features': [{'type': enums.Feature.Type.LOGO_DETECTION}],
}
BATCH_ANNOTATE_IMAGE_REQUEST = [
{
'image': {'source': {'image_uri': "gs://bucket-name/object-name"}},
'features': [{'type': enums.Feature.Type.LOGO_DETECTION}],
},
{
'image': {'source': {'image_uri': "gs://bucket-name/object-name"}},
'features': [{'type': enums.Feature.Type.LOGO_DETECTION}],
},
]
REFERENCE_IMAGE_NAME_TEST = (
f"projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/products/"
f"{PRODUCTSET_ID_TEST}/referenceImages/{REFERENCE_IMAGE_ID_TEST}"
)
REFERENCE_IMAGE_TEST = ReferenceImage(name=REFERENCE_IMAGE_GEN_ID_TEST)
REFERENCE_IMAGE_WITHOUT_ID_NAME = ReferenceImage()
DETECT_TEST_IMAGE = {"source": {"image_uri": "https://foo.com/image.jpg"}}
DETECT_TEST_ADDITIONAL_PROPERTIES = {"test-property-1": "test-value-1", "test-property-2": "test-value-2"}
class TestGcpVisionHook(unittest.TestCase):
def setUp(self):
with mock.patch(
'airflow.providers.google.cloud.hooks.vision.CloudVisionHook.__init__',
new=mock_base_gcp_hook_default_project_id,
):
self.hook = CloudVisionHook(gcp_conn_id='test')
@mock.patch(
"airflow.providers.google.cloud.hooks.vision.CloudVisionHook.client_info",
new_callable=mock.PropertyMock,
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook._get_credentials")
@mock.patch("airflow.providers.google.cloud.hooks.vision.ProductSearchClient")
def test_product_search_client_creation(self, mock_client, mock_get_creds, mock_client_info):
result = self.hook.get_conn()
mock_client.assert_called_once_with(
credentials=mock_get_creds.return_value, client_info=mock_client_info.return_value
)
assert mock_client.return_value == result
assert self.hook._client == result
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_productset_explicit_id(self, get_conn):
# Given
create_product_set_method = get_conn.return_value.create_product_set
create_product_set_method.return_value = None
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product_set = ProductSet()
# When
result = self.hook.create_product_set(
location=LOC_ID_TEST,
product_set_id=PRODUCTSET_ID_TEST,
product_set=product_set,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
# ProductSet ID was provided explicitly in the method call above, should be returned from the method
assert result == PRODUCTSET_ID_TEST
create_product_set_method.assert_called_once_with(
parent=parent,
product_set=product_set,
product_set_id=PRODUCTSET_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_productset_autogenerated_id(self, get_conn):
# Given
autogenerated_id = 'autogen-id'
response_product_set = ProductSet(
name=ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, autogenerated_id)
)
create_product_set_method = get_conn.return_value.create_product_set
create_product_set_method.return_value = response_product_set
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product_set = ProductSet()
# When
result = self.hook.create_product_set(
location=LOC_ID_TEST, product_set_id=None, product_set=product_set, project_id=PROJECT_ID_TEST
)
# Then
# ProductSet ID was not provided in the method call above. Should be extracted from the API response
# and returned.
assert result == autogenerated_id
create_product_set_method.assert_called_once_with(
parent=parent,
product_set=product_set,
product_set_id=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_productset_autogenerated_id_wrong_api_response(self, get_conn):
# Given
response_product_set = None
create_product_set_method = get_conn.return_value.create_product_set
create_product_set_method.return_value = response_product_set
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product_set = ProductSet()
# When
with pytest.raises(AirflowException) as ctx:
self.hook.create_product_set(
location=LOC_ID_TEST,
product_set_id=None,
product_set=product_set,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
# API response was wrong (None) and thus ProductSet ID extraction should fail.
err = ctx.value
assert 'Unable to get name from response...' in str(err)
create_product_set_method.assert_called_once_with(
parent=parent,
product_set=product_set,
product_set_id=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_get_productset(self, get_conn):
# Given
name = ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST)
response_product_set = ProductSet(name=name)
get_product_set_method = get_conn.return_value.get_product_set
get_product_set_method.return_value = response_product_set
# When
response = self.hook.get_product_set(
location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, project_id=PROJECT_ID_TEST
)
# Then
assert response
assert response == MessageToDict(response_product_set)
get_product_set_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_productset_no_explicit_name(self, get_conn):
# Given
product_set = ProductSet()
update_product_set_method = get_conn.return_value.update_product_set
update_product_set_method.return_value = product_set
productset_name = ProductSearchClient.product_set_path(
PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST
)
# When
result = self.hook.update_product_set(
location=LOC_ID_TEST,
product_set_id=PRODUCTSET_ID_TEST,
product_set=product_set,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
assert result == MessageToDict(product_set)
update_product_set_method.assert_called_once_with(
product_set=ProductSet(name=productset_name),
metadata=None,
retry=None,
timeout=None,
update_mask=None,
)
@parameterized.expand([(None, None), (None, PRODUCTSET_ID_TEST), (LOC_ID_TEST, None)])
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_productset_no_explicit_name_and_missing_params_for_constructed_name(
self, location, product_set_id, get_conn
):
# Given
update_product_set_method = get_conn.return_value.update_product_set
update_product_set_method.return_value = None
product_set = ProductSet()
# When
with pytest.raises(AirflowException) as ctx:
self.hook.update_product_set(
location=location,
product_set_id=product_set_id,
product_set=product_set,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
err = ctx.value
assert err
assert ERR_UNABLE_TO_CREATE.format(label='ProductSet', id_label='productset_id') in str(err)
update_product_set_method.assert_not_called()
@parameterized.expand([(None, None), (None, PRODUCTSET_ID_TEST), (LOC_ID_TEST, None)])
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_productset_explicit_name_missing_params_for_constructed_name(
self, location, product_set_id, get_conn
):
# Given
explicit_ps_name = ProductSearchClient.product_set_path(
PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCTSET_ID_TEST_2
)
product_set = ProductSet(name=explicit_ps_name)
update_product_set_method = get_conn.return_value.update_product_set
update_product_set_method.return_value = product_set
# When
result = self.hook.update_product_set(
location=location,
product_set_id=product_set_id,
product_set=product_set,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
assert result == MessageToDict(product_set)
update_product_set_method.assert_called_once_with(
product_set=ProductSet(name=explicit_ps_name),
metadata=None,
retry=None,
timeout=None,
update_mask=None,
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_productset_explicit_name_different_from_constructed(self, get_conn):
# Given
update_product_set_method = get_conn.return_value.update_product_set
update_product_set_method.return_value = None
explicit_ps_name = ProductSearchClient.product_set_path(
PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCTSET_ID_TEST_2
)
product_set = ProductSet(name=explicit_ps_name)
template_ps_name = ProductSearchClient.product_set_path(
PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST
)
# When
# Location and product_set_id are passed in addition to a ProductSet with an explicit name,
# but both names differ (constructed != explicit).
# Should throw AirflowException in this case.
with pytest.raises(AirflowException) as ctx:
self.hook.update_product_set(
location=LOC_ID_TEST,
product_set_id=PRODUCTSET_ID_TEST,
product_set=product_set,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
err = ctx.value
# self.assertIn("The required parameter 'project_id' is missing", str(err))
assert err
assert (
ERR_DIFF_NAMES.format(
explicit_name=explicit_ps_name,
constructed_name=template_ps_name,
label="ProductSet",
id_label="productset_id",
)
in str(err)
)
update_product_set_method.assert_not_called()
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_delete_productset(self, get_conn):
# Given
delete_product_set_method = get_conn.return_value.delete_product_set
delete_product_set_method.return_value = None
name = ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST)
# When
response = self.hook.delete_product_set(
location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, project_id=PROJECT_ID_TEST
)
# Then
assert response is None
delete_product_set_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None)
@mock.patch(
'airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn',
**{'return_value.create_reference_image.return_value': REFERENCE_IMAGE_TEST},
)
def test_create_reference_image_explicit_id(self, get_conn):
# Given
create_reference_image_method = get_conn.return_value.create_reference_image
# When
result = self.hook.create_reference_image(
project_id=PROJECT_ID_TEST,
location=LOC_ID_TEST,
product_id=PRODUCT_ID_TEST,
reference_image=REFERENCE_IMAGE_WITHOUT_ID_NAME,
reference_image_id=REFERENCE_IMAGE_ID_TEST,
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
assert result == REFERENCE_IMAGE_ID_TEST
create_reference_image_method.assert_called_once_with(
parent=PRODUCT_NAME,
reference_image=REFERENCE_IMAGE_WITHOUT_ID_NAME,
reference_image_id=REFERENCE_IMAGE_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(
'airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn',
**{'return_value.create_reference_image.return_value': REFERENCE_IMAGE_TEST},
)
def test_create_reference_image_autogenerated_id(self, get_conn):
# Given
create_reference_image_method = get_conn.return_value.create_reference_image
# When
result = self.hook.create_reference_image(
project_id=PROJECT_ID_TEST,
location=LOC_ID_TEST,
product_id=PRODUCT_ID_TEST,
reference_image=REFERENCE_IMAGE_TEST,
reference_image_id=REFERENCE_IMAGE_ID_TEST,
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
assert result == REFERENCE_IMAGE_GEN_ID_TEST
create_reference_image_method.assert_called_once_with(
parent=PRODUCT_NAME,
reference_image=REFERENCE_IMAGE_TEST,
reference_image_id=REFERENCE_IMAGE_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_add_product_to_product_set(self, get_conn):
# Given
add_product_to_product_set_method = get_conn.return_value.add_product_to_product_set
# When
self.hook.add_product_to_product_set(
product_set_id=PRODUCTSET_ID_TEST,
product_id=PRODUCT_ID_TEST,
location=LOC_ID_TEST,
project_id=PROJECT_ID_TEST,
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
add_product_to_product_set_method.assert_called_once_with(
name=PRODUCTSET_NAME_TEST, product=PRODUCT_NAME_TEST, retry=None, timeout=None, metadata=None
)
# remove_product_from_product_set
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_remove_product_from_product_set(self, get_conn):
# Given
remove_product_from_product_set_method = get_conn.return_value.remove_product_from_product_set
# When
self.hook.remove_product_from_product_set(
product_set_id=PRODUCTSET_ID_TEST,
product_id=PRODUCT_ID_TEST,
location=LOC_ID_TEST,
project_id=PROJECT_ID_TEST,
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
remove_product_from_product_set_method.assert_called_once_with(
name=PRODUCTSET_NAME_TEST, product=PRODUCT_NAME_TEST, retry=None, timeout=None, metadata=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client')
def test_annotate_image(self, annotator_client_mock):
# Given
annotate_image_method = annotator_client_mock.annotate_image
# When
self.hook.annotate_image(request=ANNOTATE_IMAGE_REQUEST)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
annotate_image_method.assert_called_once_with(
request=ANNOTATE_IMAGE_REQUEST, retry=None, timeout=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client')
def test_batch_annotate_images(self, annotator_client_mock):
# Given
batch_annotate_images_method = annotator_client_mock.batch_annotate_images
# When
self.hook.batch_annotate_images(requests=BATCH_ANNOTATE_IMAGE_REQUEST)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
batch_annotate_images_method.assert_called_once_with(
requests=BATCH_ANNOTATE_IMAGE_REQUEST, retry=None, timeout=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_product_explicit_id(self, get_conn):
# Given
create_product_method = get_conn.return_value.create_product
create_product_method.return_value = None
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product = Product()
# When
result = self.hook.create_product(
location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, product=product, project_id=PROJECT_ID_TEST
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
assert result == PRODUCT_ID_TEST
create_product_method.assert_called_once_with(
parent=parent,
product=product,
product_id=PRODUCT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_product_autogenerated_id(self, get_conn):
# Given
autogenerated_id = 'autogen-p-id'
response_product = Product(
name=ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, autogenerated_id)
)
create_product_method = get_conn.return_value.create_product
create_product_method.return_value = response_product
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product = Product()
# When
result = self.hook.create_product(
location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST
)
# Then
# Product ID was not provided in the method call above. Should be extracted from the API response
# and returned.
assert result == autogenerated_id
create_product_method.assert_called_once_with(
parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_product_autogenerated_id_wrong_name_in_response(self, get_conn):
# Given
wrong_name = 'wrong_name_not_a_correct_path'
response_product = Product(name=wrong_name)
create_product_method = get_conn.return_value.create_product
create_product_method.return_value = response_product
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product = Product()
# When
with pytest.raises(AirflowException) as ctx:
self.hook.create_product(
location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST
)
# Then
# API response was wrong (wrong name format) and thus ProductSet ID extraction should fail.
err = ctx.value
assert 'Unable to get id from name' in str(err)
create_product_method.assert_called_once_with(
parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_product_autogenerated_id_wrong_api_response(self, get_conn):
# Given
response_product = None
create_product_method = get_conn.return_value.create_product
create_product_method.return_value = response_product
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product = Product()
# When
with pytest.raises(AirflowException) as ctx:
self.hook.create_product(
location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST
)
# Then
# API response was wrong (None) and thus ProductSet ID extraction should fail.
err = ctx.value
assert 'Unable to get name from response...' in str(err)
create_product_method.assert_called_once_with(
parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_product_no_explicit_name(self, get_conn):
# Given
product = Product()
update_product_method = get_conn.return_value.update_product
update_product_method.return_value = product
product_name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST)
# When
result = self.hook.update_product(
location=LOC_ID_TEST,
product_id=PRODUCT_ID_TEST,
product=product,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
assert result == MessageToDict(product)
update_product_method.assert_called_once_with(
product=Product(name=product_name), metadata=None, retry=None, timeout=None, update_mask=None
)
@parameterized.expand([(None, None), (None, PRODUCT_ID_TEST), (LOC_ID_TEST, None)])
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_product_no_explicit_name_and_missing_params_for_constructed_name(
self, location, product_id, get_conn
):
# Given
update_product_method = get_conn.return_value.update_product
update_product_method.return_value = None
product = Product()
# When
with pytest.raises(AirflowException) as ctx:
self.hook.update_product(
location=location,
product_id=product_id,
product=product,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
err = ctx.value
assert err
assert ERR_UNABLE_TO_CREATE.format(label='Product', id_label='product_id') in str(err)
update_product_method.assert_not_called()
@parameterized.expand([(None, None), (None, PRODUCT_ID_TEST), (LOC_ID_TEST, None)])
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_product_explicit_name_missing_params_for_constructed_name(
self, location, product_id, get_conn
):
# Given
explicit_p_name = ProductSearchClient.product_path(
PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCT_ID_TEST_2
)
product = Product(name=explicit_p_name)
update_product_method = get_conn.return_value.update_product
update_product_method.return_value = product
# When
result = self.hook.update_product(
location=location,
product_id=product_id,
product=product,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
assert result == MessageToDict(product)
update_product_method.assert_called_once_with(
product=Product(name=explicit_p_name), metadata=None, retry=None, timeout=None, update_mask=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_product_explicit_name_different_from_constructed(self, get_conn):
# Given
update_product_method = get_conn.return_value.update_product
update_product_method.return_value = None
explicit_p_name = ProductSearchClient.product_path(
PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCT_ID_TEST_2
)
product = Product(name=explicit_p_name)
template_p_name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST)
# When
# Location and product_id are passed in addition to a Product with an explicit name,
# but both names differ (constructed != explicit).
# Should throw AirflowException in this case.
with pytest.raises(AirflowException) as ctx:
self.hook.update_product(
location=LOC_ID_TEST,
product_id=PRODUCT_ID_TEST,
product=product,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
err = ctx.value
assert err
assert (
ERR_DIFF_NAMES.format(
explicit_name=explicit_p_name,
constructed_name=template_p_name,
label="Product",
id_label="product_id",
)
in str(err)
)
update_product_method.assert_not_called()
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_delete_product(self, get_conn):
# Given
delete_product_method = get_conn.return_value.delete_product
delete_product_method.return_value = None
name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST)
# When
response = self.hook.delete_product(
location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, project_id=PROJECT_ID_TEST
)
# Then
assert response is None
delete_product_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_detect_text(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.text_detection
detect_text_method.return_value = AnnotateImageResponse(
text_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.text_detection(image=DETECT_TEST_IMAGE)
# Then
detect_text_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_detect_text_with_additional_properties(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.text_detection
detect_text_method.return_value = AnnotateImageResponse(
text_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.text_detection(
image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"}
)
# Then
detect_text_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2"
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_detect_text_with_error_response(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.text_detection
detect_text_method.return_value = AnnotateImageResponse(
error={"code": 3, "message": "test error message"}
)
# When
with pytest.raises(AirflowException) as ctx:
self.hook.text_detection(image=DETECT_TEST_IMAGE)
err = ctx.value
assert "test error message" in str(err)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_document_text_detection(self, annotator_client_mock):
# Given
document_text_detection_method = annotator_client_mock.document_text_detection
document_text_detection_method.return_value = AnnotateImageResponse(
text_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.document_text_detection(image=DETECT_TEST_IMAGE)
# Then
document_text_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_document_text_detection_with_additional_properties(self, annotator_client_mock):
# Given
document_text_detection_method = annotator_client_mock.document_text_detection
document_text_detection_method.return_value = AnnotateImageResponse(
text_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.document_text_detection(
image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"}
)
# Then
document_text_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2"
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_detect_document_text_with_error_response(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.document_text_detection
detect_text_method.return_value = AnnotateImageResponse(
error={"code": 3, "message": "test error message"}
)
# When
with pytest.raises(AirflowException) as ctx:
self.hook.document_text_detection(image=DETECT_TEST_IMAGE)
err = ctx.value
assert "test error message" in str(err)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_label_detection(self, annotator_client_mock):
# Given
label_detection_method = annotator_client_mock.label_detection
label_detection_method.return_value = AnnotateImageResponse(
label_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.label_detection(image=DETECT_TEST_IMAGE)
# Then
label_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_label_detection_with_additional_properties(self, annotator_client_mock):
# Given
label_detection_method = annotator_client_mock.label_detection
label_detection_method.return_value = AnnotateImageResponse(
label_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.label_detection(
image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"}
)
# Then
label_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2"
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_label_detection_with_error_response(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.label_detection
detect_text_method.return_value = AnnotateImageResponse(
error={"code": 3, "message": "test error message"}
)
# When
with pytest.raises(AirflowException) as ctx:
self.hook.label_detection(image=DETECT_TEST_IMAGE)
err = ctx.value
assert "test error message" in str(err)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_safe_search_detection(self, annotator_client_mock):
# Given
safe_search_detection_method = annotator_client_mock.safe_search_detection
safe_search_detection_method.return_value = AnnotateImageResponse(
safe_search_annotation=SafeSearchAnnotation(
adult="VERY_UNLIKELY",
spoof="VERY_UNLIKELY",
medical="VERY_UNLIKELY",
violence="VERY_UNLIKELY",
racy="VERY_UNLIKELY",
)
)
# When
self.hook.safe_search_detection(image=DETECT_TEST_IMAGE)
# Then
safe_search_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_safe_search_detection_with_additional_properties(self, annotator_client_mock):
# Given
safe_search_detection_method = annotator_client_mock.safe_search_detection
safe_search_detection_method.return_value = AnnotateImageResponse(
safe_search_annotation=SafeSearchAnnotation(
adult="VERY_UNLIKELY",
spoof="VERY_UNLIKELY",
medical="VERY_UNLIKELY",
violence="VERY_UNLIKELY",
racy="VERY_UNLIKELY",
)
)
# When
self.hook.safe_search_detection(
image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"}
)
# Then
safe_search_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2"
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_safe_search_detection_with_error_response(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.safe_search_detection
detect_text_method.return_value = AnnotateImageResponse(
error={"code": 3, "message": "test error message"}
)
# When
with pytest.raises(AirflowException) as ctx:
self.hook.safe_search_detection(image=DETECT_TEST_IMAGE)
err = ctx.value
assert "test error message" in str(err)
| apache/incubator-airflow | tests/providers/google/cloud/hooks/test_vision.py | Python | apache-2.0 | 38,031 |
/* COPYRIGHT 2012 SUPERMAP
* ๆฌ็จๅบๅช่ฝๅจๆๆ็ๆๆ่ฎธๅฏไธไฝฟ็จใ
* ๆช็ป่ฎธๅฏ๏ผไธๅพไปฅไปปไฝๆๆฎตๆ
่ชไฝฟ็จๆไผ ๆญใ*/
/**
* @requires SuperMap/Util.js
* @requires SuperMap/REST.js
*/
/**
* Class: SuperMap.REST.ChartQueryParameters
* ๆตทๅพๆฅ่ฏขๅๆฐ็ฑป๏ผ่ฏฅ็ฑป็จไบ่ฎพ็ฝฎๆตทๅพๆฅ่ฏขๆถ็็ธๅ
ณๅๆฐ๏ผๆตทๅพๆฅ่ฏขๅไธบๆตทๅพๅฑๆง
* ๆฅ่ฏขๅๆตทๅพ่ๅดๆฅ่ฏขไธค็ฑป๏ผ้่ฟๅฑๆงqueryModeๆๅฎๆฅ่ฏขๆจกๅผใๅฟ
่ฎพๅฑๆงๆ๏ผ
* queryModeใchartLayerNamesใchartQueryFilterParametersใๅฝ่ฟ่กๆตทๅพ่ๅดๆฅ่ฏขๆถ๏ผๅฟ
่ฎพๅฑๆง่ฟๅ
ๆฌboundsใ
*/
SuperMap.REST.ChartQueryParameters = SuperMap.Class({
/**
* APIProperty: queryMode
* {String} ๆตทๅพๆฅ่ฏขๆจกๅผ็ฑปๅ๏ผSuperMap iClient for JavaScriptๅฏนๆตทๅพๆฏๆไธค็ง
* ๆฅ่ฏขๆนๅผ๏ผๆตทๅพๅฑๆงๆฅ่ฏข๏ผ"ChartAttributeQuery"๏ผๅๆตทๅพ็ฉบ้ดๆฅ่ฏข
* ๏ผ"ChartBoundsQuery"๏ผ ใ
*/
queryMode:null,
/**
* APIProperty: bounds
* {<SuperMap.Bounds>} ๆตทๅพๆฅ่ฏข่ๅดใ
*/
bounds:null,
/**
* APIProperty: chartLayerNames
* {Array(String)} ๆฅ่ฏข็ๆตทๅพๅพๅฑ็ๅ็งฐใ
*/
chartLayerNames:null,
/**
* APIProperty: chartQueryFilterParameters
* {Array <SuperMap.REST.ChartQueryFilterParameter>} ๆตทๅพๆฅ่ฏข่ฟๆปคๅๆฐใ
* ๅ
ๆฌ๏ผ็ฉๆ ไปฃ็ ใ็ฉๆ ๅฏๅบ็จๅฏน่ฑก็้ๆฉ๏ผๆฏๅฆๆฅ่ฏข็นใ็บฟๆ้ข๏ผใๅฑๆงๅญ
* ๆฎต่ฟๆปคๆกไปถใ
*/
chartQueryFilterParameters:null,
/**
* Property: returnContent
* {Boolean} ่ทๅๆ่ฎพ็ฝฎๆฏ่ฟๅๆฅ่ฏข็ปๆ่ฎฐๅฝ้ recordsets๏ผ่ฟๆฏ่ฟๅๆฅ่ฏข็ปๆ็
* ่ตๆบ resourceInfoใ้ป่ฎคไธบ true๏ผ่กจ็คบ่ฟๅ recordsetsใ
*
* note: Recordsets ๅ ResourceInfo ้ฝๅญๅจๅจๆฅ่ฏข็ปๆ็ฑป QueryResult ไธญใ
* ๅฝ
* (start code)
* ReturnContent = true
* (end)
* ่กจ็คบ่ฟๅๆฅ่ฏข่ฎฐๅฝ้๏ผ่ฟๆถ
* ๆฅ่ฏข็ปๆๅญๅจๅจ
* (start code)
* QueryResult.Recordsets
* (end)
* ไธญ๏ผ่
* (start code)
* QueryResult.ResourceInfo
* (end)
* ไธบ็ฉบ๏ผๅฝ
* (start code)
* ReturnContent = false
* (end)
* ๆถ๏ผ่กจ็คบ่ฟๅๆฅ่ฏข็ปๆ่ตๆบ๏ผ่ฟๆถๆฅ่ฏข็ปๆๅญๅจๅจ
* (start code)
* QueryResult.ResourceInfo
* (end)
* ไธญ๏ผ่
* (start code)
* QueryResult.Recordsets
* (end)
* ไธบ็ฉบใ
*/
returnContent:true,
/**
* APIProperty: startRecord
* {Number} ๆฅ่ฏข่ตทๅง่ฎฐๅฝไฝ็ฝฎ๏ผ้ป่ฎคไธบ0ใ
*/
startRecord:0,
/**
* APIProperty: expectCount
* {Number} ๆๆๆฅ่ฏข็ปๆ่ฟๅ็่ฎฐๅฝๆฐ๏ผ่ฏฅๅผๅคงไบ0ใ
*/
expectCount:null,
/**
* Constructor: SuperMap.REST.ChartQueryParameters
* ๅๅงๅ ChartQueryParameters ็ฑป็ๆฐๅฎไพใ
*
* Parameters:
* options - {Object} ๅๆฐใ
*
* Allowed options properties:
* queryMode - {String} ๆตทๅพๆฅ่ฏขๆจกๅผ็ฑปๅ๏ผSuperMap iClient for JavaScriptๅฏน
* ๆตทๅพๆฏๆไธค็งๆฅ่ฏขๆนๅผ๏ผๆตทๅพๅฑๆงๆฅ่ฏข๏ผ"ChartAttributeQuery"๏ผๅๆตทๅพ็ฉบ
* ้ดๆฅ่ฏข๏ผ"ChartBoundsQuery"๏ผ ใ
* bounds - {<SuperMap.Bounds>} ๆตทๅพๆฅ่ฏข่ๅดใ
* chartLayerNames - {Array(String)} ๆฅ่ฏข็ๆตทๅพๅพๅฑ็ๅ็งฐใ
* chartQueryFilterParameters - {Array <SuperMap.REST.ChartQueryFilterParameter>}
* ๆตทๅพๆฅ่ฏข่ฟๆปคๅๆฐใๅ
ๆฌ๏ผ็ฉๆ ไปฃ็ ใ็ฉๆ ๅฏๅบ็จๅฏน่ฑก็้ๆฉ๏ผๆฏๅฆๆฅ่ฏข็นใ
* ็บฟๆ้ข๏ผใๅฑๆงๅญๆฎต่ฟๆปคๆกไปถใ
* returnContent - {Boolean} ่ทๅๆ่ฎพ็ฝฎๆฏ่ฟๅๆฅ่ฏข็ปๆ่ฎฐๅฝ้ recordsets๏ผ่ฟ
* ๆฏ่ฟๅๆฅ่ฏข็ปๆ็่ตๆบ resourceInfoใ้ป่ฎคไธบ true๏ผ่กจ็คบ่ฟๅ recordsetsใ
* startRecord - {Number} ๆฅ่ฏข่ตทๅง่ฎฐๅฝไฝ็ฝฎ๏ผ้ป่ฎคไธบ0ใ
* expectCount - {Number} ๆๆๆฅ่ฏข็ปๆ่ฟๅ็่ฎฐๅฝๆฐ๏ผ่ฏฅๅผๅคงไบ0ใ
*/
initialize:function (options) {
if (!options) {
return;
}
SuperMap.Util.extend(this, options);
},
/**
* APIMethod: destroy
* ้ๆพ่ตๆบ๏ผๅฐๅผ็จ่ตๆบ็ๅฑๆง็ฝฎ็ฉบใ
*/
destroy:function () {
var me = this;
me.queryMode = null;
me.bounds = null;
me.chartLayerNames = null;
me.chartQueryFilterParameters = null;
me.returnContent = true;
me.startRecord = 0;
me.expectCount = null;
},
/**
* Method: getVariablesJson
* ๅฐๅฑๆงไฟกๆฏ่ฝฌๆขๆ่ฝๅค่ขซๆๅก่ฏๅซ็JSONๆ ผๅผๅญ็ฌฆไธฒใ
*/
getVariablesJson:function () {
var json="";
json += "\"queryMode\":\"" + this.queryMode + "\",";
if (this.chartLayerNames && this.chartLayerNames.length) {
var chartLayersArray = [];
var layerLength = this.chartLayerNames.length;
for (var i = 0; i < layerLength; i++)
{
chartLayersArray.push("\""+this.chartLayerNames[i]+"\"");
}
var layerNames = "[" + chartLayersArray.join(",") + "]";
json += "\"chartLayerNames\":" + layerNames + ",";
}
if (this.queryMode === "ChartBoundsQuery" && this.bounds) {
json += "\"bounds\":" + "{" + "\"leftBottom\":" + "{" + "\"x\":" + this.bounds.left + "," +
"\"y\":" + this.bounds.bottom + "}" + "," + "\"rightTop\":" + "{" + "\"x\":" + this.bounds.right + "," +
"\"y\":" + this.bounds.top + "}" + "},";
}
if (this.chartQueryFilterParameters && this.chartQueryFilterParameters.length) {
var chartParamArray = [];
var chartLength = this.chartQueryFilterParameters.length;
for (var j = 0; j < chartLength; j++)
{
var chartQueryFilterParameter = new SuperMap.REST.ChartQueryFilterParameter();
chartQueryFilterParameter = this.chartQueryFilterParameters[j];
chartParamArray.push(chartQueryFilterParameter.toJson());
}
var chartParamsJson = "[" + chartParamArray.join(",") + "]";
chartParamsJson = "\"chartQueryParams\":" + chartParamsJson + ",";
chartParamsJson += "\"startRecord\":" + this.startRecord + ",";
chartParamsJson += "\"expectCount\":" + this.expectCount;
chartParamsJson = "{" + chartParamsJson + "}";
json += "\"chartQueryParameters\":" + chartParamsJson;
}
json = "{" + json + "}";
return json;
},
CLASS_NAME:"SuperMap.REST.ChartQueryParameters"
}) | SuperMap/iClient-for-JavaScript | libs/SuperMap/REST/Query/ChartQueryParameters.js | JavaScript | apache-2.0 | 6,706 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cloudwatch.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.Request;
import com.amazonaws.DefaultRequest;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.cloudwatch.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.util.StringUtils;
/**
* ListDashboardsRequest Marshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListDashboardsRequestMarshaller implements Marshaller<Request<ListDashboardsRequest>, ListDashboardsRequest> {
public Request<ListDashboardsRequest> marshall(ListDashboardsRequest listDashboardsRequest) {
if (listDashboardsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
Request<ListDashboardsRequest> request = new DefaultRequest<ListDashboardsRequest>(listDashboardsRequest, "AmazonCloudWatch");
request.addParameter("Action", "ListDashboards");
request.addParameter("Version", "2010-08-01");
request.setHttpMethod(HttpMethodName.POST);
if (listDashboardsRequest.getDashboardNamePrefix() != null) {
request.addParameter("DashboardNamePrefix", StringUtils.fromString(listDashboardsRequest.getDashboardNamePrefix()));
}
if (listDashboardsRequest.getNextToken() != null) {
request.addParameter("NextToken", StringUtils.fromString(listDashboardsRequest.getNextToken()));
}
return request;
}
}
| aws/aws-sdk-java | aws-java-sdk-cloudwatch/src/main/java/com/amazonaws/services/cloudwatch/model/transform/ListDashboardsRequestMarshaller.java | Java | apache-2.0 | 2,151 |
/*
* Copyright 2013 Thomas Bocek
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package net.tomp2p.futures;
/**
* A generic future that can be used to set a future to complete with an attachment.
*
* @author Thomas Bocek
*
* @param <K>
*/
public class FutureDone<K> extends BaseFutureImpl<FutureDone<K>> {
public static FutureDone<Void> SUCCESS = new FutureDone<Void>().done();
private K object;
/**
* Creates a new future for the shutdown operation.
*/
public FutureDone() {
self(this);
}
/**
* Set future as finished and notify listeners.
*
* @return This class
*/
public FutureDone<K> done() {
done(null);
return this;
}
/**
* Set future as finished and notify listeners.
*
* @param object
* An object that can be attached.
* @return This class
*/
public FutureDone<K> done(final K object) {
synchronized (lock) {
if (!completedAndNotify()) {
return this;
}
this.object = object;
this.type = BaseFuture.FutureType.OK;
}
notifyListeners();
return this;
}
/**
* @return The attached object
*/
public K object() {
synchronized (lock) {
return object;
}
}
}
| jonaswagner/TomP2P | core/src/main/java/net/tomp2p/futures/FutureDone.java | Java | apache-2.0 | 1,873 |
package de.mhu.com.morse.channel.sql;
import java.lang.reflect.InvocationTargetException;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.LinkedList;
import de.mhu.lib.ASql;
import de.mhu.lib.dtb.Sth;
import de.mhu.com.morse.aaa.IAclManager;
import de.mhu.com.morse.channel.CMql;
import de.mhu.com.morse.channel.IChannelDriverServer;
import de.mhu.com.morse.channel.IConnectionServer;
import de.mhu.com.morse.channel.IQueryFunction;
import de.mhu.com.morse.channel.IQueryWhereFunction;
import de.mhu.com.morse.mql.ICompiledQuery;
import de.mhu.com.morse.types.IAttribute;
import de.mhu.com.morse.types.IAttributeDefault;
import de.mhu.com.morse.types.ITypes;
import de.mhu.com.morse.usr.UserInformation;
import de.mhu.com.morse.utils.AttributeUtil;
import de.mhu.com.morse.utils.MorseException;
public class WhereSqlListener implements WhereParser.IWhereListener {
private StringBuffer sb = null;
private Descriptor desc;
private SqlDriver driver;
private IConnectionServer con;
private ITypes types;
private IAclManager aclm;
private UserInformation user;
private ICompiledQuery code;
private boolean needComma;
public WhereSqlListener( SqlDriver pDriver, IConnectionServer pCon, ITypes pTypes, IAclManager pAclm, UserInformation pUser, Descriptor pDesc, ICompiledQuery pCode, StringBuffer dest ) {
desc = pDesc;
driver = pDriver;
con = pCon;
types = pTypes;
aclm = pAclm;
user = pUser;
code = pCode;
sb = dest;
}
public int appendTableSelect(String name, int off) throws MorseException {
name = name.toLowerCase();
if ( ! AttributeUtil.isAttrName( name, true ) )
throw new MorseException( MorseException.UNKNOWN_ATTRIBUTE, name );
Object[] obj = desc.attrMap.get( name );
if ( obj == null )
throw new MorseException( MorseException.UNKNOWN_ATTRIBUTE, name );
if ( obj.length == 0 )
throw new MorseException( MorseException.ATTR_AMBIGIOUS, name );
String tName = (String)obj[3];
int pos = tName.indexOf('.');
if ( pos < 0 )
tName = IAttribute.M_ID;
else
tName = tName.substring( 0, pos + 1 ) + IAttribute.M_ID;
sb.append( driver.getColumnName( tName ) );
sb.append( " IN ( SELECT " );
sb.append( driver.getColumnName( IAttribute.M_ID ) );
sb.append( " FROM r_" );
sb.append( ((IAttribute)obj[1]).getSourceType().getName() ).append( '_' ).append( ((IAttribute)obj[1]).getName() );
sb.append( " WHERE " );
Descriptor desc2 = new Descriptor();
Attr a = new Attr();
a.name = IAttribute.M_ID;
desc2.addAttr( a );
// find all tables / types
Table newTable = new Table();
newTable.name = ((IAttribute)obj[1]).getSourceType().getName() + '.' + ((IAttribute)obj[1]).getName();
desc2.addTable( newTable );
SqlUtils.checkTables( desc2, types, con, user, aclm );
SqlUtils.checkAttributes( con, desc2, user, aclm );
off+=2;
off = SqlUtils.createWhereClause( con, driver, off, code, desc2, types, sb, user, aclm );
// sb.append( ')' );
off++;
return off;
}
public void brackedClose() {
sb.append( ')' );
}
public void brackedOpen() {
sb.append( '(' );
}
public void compareEQ(String left, String right) {
sb.append( left ).append( '=' ).append( right );
}
public void compareGT(String left, String right) {
sb.append( left ).append( '>' ).append( right );
}
public void compareGTEQ(String left, String right) {
sb.append( left ).append( ">=" ).append( right );
}
public void compareINBegin(String left) {
sb.append( left ).append( " IN (" );
needComma = false;
}
public void compareINEnd() {
sb.append( ')' );
}
public void compareINValue(String string) {
if ( needComma )
sb.append( ',' );
needComma = true;
sb.append( string );
}
public void compareLIKE(String left, String right) {
sb.append( left ).append( " LIKE " ).append( right );
}
public void compareLT(String left, String right) {
sb.append( left ).append( '<' ).append( right );
}
public void compareLTEQ(String left, String right) {
sb.append( left ).append( "<=" ).append( right );
}
public void compareNOTEQ(String left, String right) {
sb.append( left ).append( "!=" ).append( right );
}
public int compareSubSelect(String name, int off, boolean distinct) throws MorseException {
Descriptor desc2 = new Descriptor();
off = SqlUtils.findAttributes(off, code, desc2);
if ( desc.attrSize == 0 )
throw new MorseException( MorseException.NO_ATTRIBUTES );
off++; // FROM
// find all tables / types
off = SqlUtils.findTables(off, code, desc2 );
SqlUtils.checkTables( desc2, types, con, user, aclm );
SqlUtils.checkAttributes( con, desc2, user, aclm );
SqlUtils.postCheckAttributes( desc2 );
SqlUtils.checkFunctions( con, desc2, desc2, user, driver.getAclManager() );
StringBuffer sb2 = new StringBuffer();
SqlUtils.createSelect( driver, desc2, sb2, distinct );
boolean hasWhere = false;
if ( SqlUtils.needHintWhere( driver, desc2 ) ) {
if ( ! hasWhere ) {
sb2.append( " WHERE (" );
} else {
sb2.append( " AND (" );
}
SqlUtils.createHintWhereClause( con, driver, desc2, driver.getTypes(), sb2, user, aclm );
sb2.append( " ) " );
hasWhere = true;
}
if ( code.getInteger( off ) == CMql.WHERE ) {
if ( ! hasWhere ) {
sb2.append( " WHERE (" );
} else {
sb2.append( " AND (" );
}
off++;
off = SqlUtils.createWhereClause( con, driver, off, code, desc2, types, sb2, user, aclm );
}
sb.append( name ).append( " IN ( " ).append( sb2.toString() ).append( " ) ");
off++; // )
return off;
}
public String executeFunction( IQueryFunction function, LinkedList<Object> functionAttrs ) throws MorseException {
// Object[] obj = desc.attrMap.get( aName.toLowerCase() );
if ( function instanceof IQuerySqlFunction ) {
String[] attrs = (String[])functionAttrs.toArray( new String[ functionAttrs.size() ] );
for ( int j = 0; j < attrs.length; j++ ) {
attrs[j] = SqlUtils.checkAttribute( driver, null, attrs[j], desc, user );
}
return ((IQuerySqlFunction)function).appendSqlCommand( driver, attrs );
} else {
Object[] values = new Object[ functionAttrs.size() ];
Class[] classes = new Class[ functionAttrs.size() ];
int cnt = 0;
for ( Iterator i = functionAttrs.iterator(); i.hasNext(); ) {
values[cnt] = i.next();
classes[cnt] = values[cnt].getClass();
cnt++;
}
if ( function instanceof IQueryWhereFunction )
return ((IQueryWhereFunction)function).getSingleResult( values );
else {
try {
function.getClass().getMethod( "append", classes ).invoke( function, values );
} catch (Exception e) {
throw new MorseException( MorseException.ERROR, e );
}
return function.getResult();
}
}
}
public void appendInFunction( String left, IQueryFunction function, LinkedList<Object> functionAttrs) throws MorseException {
Sth sth = null;
String tmpName = null;
try {
Object[] obj = desc.attrMap.get( left.toLowerCase() );
tmpName = "x_" + driver.getNextTmpId();
String drop = driver.getDropTmpTableSql( tmpName );
sth = driver.internatConnection.getPool().aquireStatement();
if ( drop != null ) {
try {
sth.executeUpdate( drop );
} catch ( SQLException sqle ) {
}
}
String create = new StringBuffer()
.append( driver.getCreateTmpTablePrefixSql() )
.append( ' ' )
.append( tmpName )
.append( " ( v " )
.append( driver.getColumnDefinition( (IAttribute)obj[1], false ) )
.append( ") ")
.append( driver.getCreateTmpTableSuffixSql() )
.toString();
sth.executeUpdate( create );
sth.executeUpdate( driver.getCreateTmpIndexSql( 1, tmpName, "v" ) );
if ( ! ( function instanceof IQueryWhereFunction ) )
throw new MorseException( MorseException.FUNCTION_NOT_COMPATIBLE );
Iterator<String> res = ((IQueryWhereFunction)function).getRepeatingResult( (Object[])functionAttrs.toArray( new Object[ functionAttrs.size() ] ) );
while ( res.hasNext() ) {
String insert = "INSERT INTO " + tmpName + "(v) VALUES (" + SqlUtils.getValueRepresentation(driver, (IAttribute)obj[1], res.next() ) + ")";
sth.executeUpdate( insert );
}
} catch ( Exception sqle ) {
if ( sqle instanceof MorseException ) throw (MorseException)sqle;
throw new MorseException( MorseException.ERROR, sqle );
} finally {
try { sth.release(); } catch ( Exception ex ) {}
}
desc.addTmpTable( tmpName );
sb.append( " IN ( SELECT v FROM " ).append( tmpName ).append( " ) ");
}
public void operatorAnd() {
sb.append( " AND " );
}
public void operatorNot() {
sb.append( " NOT " );
}
public void operatorOr() {
sb.append( " OR " );
}
public String transformAttribute(String name) throws MorseException {
Object[] obj = desc.attrMap.get( name );
if ( obj == null )
throw new MorseException( MorseException.UNKNOWN_ATTRIBUTE, name );
if ( obj.length == 0 )
throw new MorseException( MorseException.ATTR_AMBIGIOUS, name );
String tName = (String)obj[3];
/*
int pos = tName.indexOf('.');
if ( pos < 0 )
tName = IAttribute.M_ID;
else
tName = tName.substring( 0, pos + 1 ) + IAttribute.M_ID;
*/
return driver.getColumnName( tName );
// return SqlUtils.checkAttribute( driver, null, name, desc, user );
}
public Object transformValue( String attrName, String name) throws MorseException {
if ( ! AttributeUtil.isValue( name ) )
throw new MorseException( MorseException.WRONG_VALUE_FORMAT, name );
if ( attrName != null ) {
Object[] obj = desc.attrMap.get( attrName.toLowerCase() );
if ( obj != null && obj.length != 0 && obj[1] != null ) {
IAttribute attr = (IAttribute)obj[1];
String value = name;
if ( name.length() > 1 && name.charAt( 0 ) == '\'' && name.charAt( name.length() - 1 ) == '\'' )
value = ASql.unescape( name.substring( 1, name.length() - 1 ) );
if ( ! attr.getAco().validate( value ) )
throw new MorseException( MorseException.ATTR_VALUE_NOT_VALIDE, new String[] { attrName, name } );
return SqlUtils.getValueRepresentation( driver, attr, value );
} else {
IAttribute attr = IAttributeDefault.getAttribute( attrName );
if ( attr != null )
return SqlUtils.getValueRepresentation( driver, attr, name );
}
}
return name;
}
}
| mhus/mhus-inka | de.mhus.app.inka.morse.server/src/de/mhu/com/morse/channel/sql/WhereSqlListener.java | Java | apache-2.0 | 10,620 |
/*
* Copyright 2016 Code Above Lab LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.codeabovelab.dm.common.utils;
import java.util.function.Function;
import java.util.function.IntPredicate;
import java.util.function.Supplier;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
*/
public class StringUtils {
private StringUtils() {
}
public static String before(String s, char c) {
return beforeOr(s, c, () -> {
// we throw exception for preserve old behavior
throw new IllegalArgumentException("String '" + s + "' must contains '" + c + "'.");
});
}
/**
* Return part of 's' before 'c'
* @param s string which may contain char 'c'
* @param c char
* @param ifNone supplier of value which is used when 'c' is not present in 's' (null not allowed)
* @return part of 's' before 'c' or 'ifNone.get()'
*/
public static String beforeOr(String s, char c, Supplier<String> ifNone) {
int pos = s.indexOf(c);
if(pos < 0) {
return ifNone.get();
}
return s.substring(0, pos);
}
public static String after(String s, char c) {
int pos = s.indexOf(c);
if(pos < 0) {
throw new IllegalArgumentException("String '" + s + "' must contains '" + c + "'.");
}
return s.substring(pos + 1);
}
public static String beforeLast(String s, char c) {
int pos = s.lastIndexOf(c);
if(pos < 0) {
throw new IllegalArgumentException("String '" + s + "' must contains '" + c + "'.");
}
return s.substring(0, pos);
}
public static String afterLast(String s, char c) {
int pos = s.lastIndexOf(c);
if(pos < 0) {
throw new IllegalArgumentException("String '" + s + "' must contains '" + c + "'.");
}
return s.substring(pos + 1);
}
/**
* Split string into two pieces at last appearing of delimiter.
* @param s string
* @param c delimiter
* @return null if string does not contains delimiter
*/
public static String[] splitLast(String s, char c) {
int pos = s.lastIndexOf(c);
if(pos < 0) {
return null;
}
return new String[] {s.substring(0, pos), s.substring(pos + 1)};
}
/**
* Split string into two pieces at last appearing of delimiter.
* @param s string
* @param delimiter delimiter
* @return null if string does not contains delimiter
*/
public static String[] splitLast(String s, String delimiter) {
int pos = s.lastIndexOf(delimiter);
if(pos < 0) {
return null;
}
return new String[] {s.substring(0, pos), s.substring(pos + delimiter.length())};
}
/**
* Return string which contains only chars for which charJudge give true.
* @param src source string, may be null
* @param charJudge predicate which consume codePoint (not chars)
* @return string, null when incoming string is null
*/
public static String retain(String src, IntPredicate charJudge) {
if (src == null) {
return null;
}
final int length = src.length();
StringBuilder sb = new StringBuilder(length);
for (int i = 0; i < length; i++) {
int cp = src.codePointAt(i);
if(charJudge.test(cp)) {
sb.appendCodePoint(cp);
}
}
return sb.toString();
}
/**
* Retain only characters which is {@link #isAz09(int)}
* @param src source string, may be null
* @return string, null when incoming string is null
*/
public static String retainAz09(String src) {
return retain(src, StringUtils::isAz09);
}
/**
* Retain chars which is acceptable as file name or part of url on most operation systems. <p/>
* It: <code>'A'-'z', '0'-'9', '_', '-', '.'</code>
* @param src source string, may be null
* @return string, null when incoming string is null
*/
public static String retainForFileName(String src) {
return retain(src, StringUtils::isAz09);
}
/**
* Test that specified codePoint is an ASCII letter or digit
* @param cp codePoint
* @return true for specified chars
*/
public static boolean isAz09(int cp) {
return cp >= '0' && cp <= '9' ||
cp >= 'a' && cp <= 'z' ||
cp >= 'A' && cp <= 'Z';
}
/**
* Test that specified codePoint is an ASCII letter, digit or hyphen '-'.
* @param cp codePoint
* @return true for specified chars
*/
public static boolean isAz09Hyp(int cp) {
return isAz09(cp) || cp == '-';
}
/**
* Test that specified codePoint is an ASCII letter, digit or hyphen '-', '_', ':', '.'. <p/>
* It common matcher that limit alphabet acceptable for our system IDs.
* @param cp codePoint
* @return true for specified chars
*/
public static boolean isId(int cp) {
return isAz09(cp) || cp == '-' || cp == '_' || cp == ':' || cp == '.';
}
public static boolean isHex(int cp) {
return cp >= '0' && cp <= '9' ||
cp >= 'a' && cp <= 'f' ||
cp >= 'A' && cp <= 'F';
}
/**
* Chars which is acceptable as file name or part of url on most operation systems. <p/>
* It: <code>'A'-'z', '0'-'9', '_', '-', '.'</code>
* @param cp codePoint
* @return true for specified chars
*/
public static boolean isForFileName(int cp) {
return isAz09(cp) || cp == '-' || cp == '_' || cp == '.';
}
/**
* Invoke {@link Object#toString()} on specified argument, if arg is null then return null.
* @param o
* @return null or result of o.toString()
*/
public static String valueOf(Object o) {
return o == null? null : o.toString();
}
/**
* Test that each char of specified string match for predicate. <p/>
* Note that it method does not support unicode, because it usual applicable only for match letters that placed under 128 code.
* @param str string
* @param predicate char matcher
* @return true if all chars match
*/
public static boolean match(String str, IntPredicate predicate) {
final int len = str.length();
if(len == 0) {
return false;
}
for(int i = 0; i < len; i++) {
if(!predicate.test(str.charAt(i))) {
return false;
}
}
return true;
}
/**
* Is a <code>match(str, StringUtils::isAz09);</code>.
* @param str string
* @return true if string match [A-Za-z0-9]*
*/
public static boolean matchAz09(String str) {
return match(str, StringUtils::isAz09);
}
/**
* Is a <code>match(str, StringUtils::isAz09Hyp);</code>.
* @param str string
* @return true if string match [A-Za-z0-9-]*
*/
public static boolean matchAz09Hyp(String str) {
return match(str, StringUtils::isAz09Hyp);
}
/**
* Is a <code>match(str, StringUtils::isId);</code>.
* @param str string
* @return true if string match [A-Za-z0-9-_:.]*
*/
public static boolean matchId(String str) {
return match(str, StringUtils::isId);
}
public static boolean matchHex(String str) {
return match(str, StringUtils::isHex);
}
/**
* Replace string with pattern obtaining replacement values through handler function. <p/>
* Note that it differ from usual Pattern behavior when it process replacement for group references,
* this code do nothing with replacement.
* @param pattern pattern
* @param src source string
* @param handler function which take matched part of source string and return replacement value, must never return null
* @return result string
*/
public static String replace(Pattern pattern, String src, Function<String, String> handler) {
StringBuilder sb = null;
Matcher matcher = pattern.matcher(src);
int pos = 0;
while(matcher.find()) {
if(sb == null) {
// replacement can be a very rare operation, and we not need excess string buffer
sb = new StringBuilder();
}
String expr = matcher.group();
String replacement = handler.apply(expr);
sb.append(src, pos, matcher.start());
sb.append(replacement);
pos = matcher.end();
}
if(sb == null) {
return src;
}
sb.append(src, pos, src.length());
return sb.toString();
}
}
| codeabovelab/haven-platform | common/common-utils/src/main/java/com/codeabovelab/dm/common/utils/StringUtils.java | Java | apache-2.0 | 9,252 |
from django.db import models
from django.utils.html import format_html
from sorl.thumbnail import get_thumbnail
from sorl.thumbnail.fields import ImageField
from sno.models import Sno
class SnoGalleries(models.Model):
class Meta:
verbose_name = 'ะคะพัะพะณัะฐัะธั ะฒ ะณะฐะปะตัะตะธ ะกะะ'
verbose_name_plural = 'ะคะพัะพะณัะฐัะธะธ ะฒ ะณะฐะปะตัะตะธ ะกะะ'
name = models.CharField('ะะฐะทะฒะฐะฝะธะต ัะพัะพ', max_length=255, blank=True, null=True)
photo = ImageField(verbose_name='ะคะพัะพ', max_length=255)
description = models.TextField('ะะฟะธัะฐะฝะธะต', blank=True, null=True)
sno = models.ForeignKey(Sno, verbose_name='ะกะะ', on_delete=models.CASCADE)
date_created = models.DateField('ะะฐัะฐ', auto_now_add=True)
def photo_preview(self):
img = get_thumbnail(self.photo, '75x75', crop='center')
return format_html('<a href="{}" target="_blank"><img style="width:75px; height:75px;" src="{}"></a>',
self.photo.url, img.url)
photo_preview.short_description = 'ะคะพัะพ'
def __str__(self):
return '%s (%s)' % (self.name, self.sno.short_name)
| glad-web-developer/zab_sno | src/sno_galleries/models.py | Python | apache-2.0 | 1,164 |
/*
* Copyright 2015 Namihiko Matsumura (https://github.com/n-i-e/)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.n_i_e.deepfolderview;
import java.awt.Toolkit;
import java.awt.datatransfer.Clipboard;
import java.awt.datatransfer.StringSelection;
import java.io.IOException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Date;
import org.eclipse.core.databinding.DataBindingContext;
import org.eclipse.core.databinding.beans.PojoProperties;
import org.eclipse.core.databinding.observable.Realm;
import org.eclipse.core.databinding.observable.value.IObservableValue;
import org.eclipse.jface.databinding.swt.SWTObservables;
import org.eclipse.jface.databinding.swt.WidgetProperties;
import org.eclipse.jface.viewers.TableViewer;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.DisposeEvent;
import org.eclipse.swt.events.DisposeListener;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.swt.widgets.MenuItem;
import org.eclipse.swt.widgets.ProgressBar;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.forms.widgets.FormToolkit;
import org.eclipse.wb.swt.SWTResourceManager;
import com.github.n_i_e.dirtreedb.Assertion;
import com.github.n_i_e.dirtreedb.DBPathEntry;
import com.github.n_i_e.dirtreedb.PathEntry;
import com.github.n_i_e.dirtreedb.lazy.LazyRunnable;
import com.github.n_i_e.dirtreedb.lazy.LazyUpdater;
import com.github.n_i_e.dirtreedb.lazy.LazyUpdater.Dispatcher;
import com.ibm.icu.text.NumberFormat;
import com.ibm.icu.text.SimpleDateFormat;
public class SwtFileFolderMenu extends SwtCommonFileFolderMenu {
@SuppressWarnings("unused")
private DataBindingContext m_bindingContext;
protected Shell shell;
private FormToolkit formToolkit = new FormToolkit(Display.getDefault());
private Text txtLocation;
private Composite compositeToolBar;
private Table table;
private Label lblStatusBar;
private Composite compositeStatusBar;
private ProgressBar progressBar;
@Override protected Shell getShell() { return shell; }
@Override protected Table getTable() { return table; }
@Override protected Label getLblStatusBar() { return lblStatusBar; }
@Override protected ProgressBar getProgressBar() { return progressBar; }
public static void main(String[] args) {
final Display display = Display.getDefault();
Realm.runWithDefault(SWTObservables.getRealm(display), new Runnable() {
public void run() {
try {
final SwtFileFolderMenu window = new SwtFileFolderMenu();
window.open();
/*
display.asyncExec(new Runnable() {
public void run() {
TableItem tableItem = new TableItem(window.table, SWT.NONE);
tableItem.setText(new String[] {"C:\\", "2015-01-01 00:00:00", "1", "2", "3"});
TableItem tableItem_1 = new TableItem(window.table, SWT.NONE);
tableItem_1.setText(new String[] {"D:\\", "2014-01-01 00:00:00", "100", "200", "1"});
}
});*/
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
public void open() {
Display display = Display.getDefault();
//createContents();
//shell.open();
//shell.layout();
while (!shell.isDisposed()) {
if (!display.readAndDispatch()) {
display.sleep();
}
}
}
public SwtFileFolderMenu() {
createContents();
shell.open();
shell.layout();
location = new NavigatableList<Location>();
location.add(new Location());
}
/**
* Create contents of the window.
*/
private void createContents() {
shell = new Shell();
shell.addDisposeListener(new DisposeListener() {
public void widgetDisposed(DisposeEvent arg0) {
Point p = shell.getSize();
PreferenceRW.setSwtFileFolderMenuWindowWidth(p.x);
PreferenceRW.setSwtFileFolderMenuWindowHeight(p.y);
}
});
shell.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/drive-harddisk.png"));
shell.setMinimumSize(new Point(300, 200));
shell.setSize(PreferenceRW.getSwtFileFolderMenuWindowWidth(), PreferenceRW.getSwtFileFolderMenuWindowHeight());
GridLayout gl_shell = new GridLayout(1, false);
gl_shell.verticalSpacing = 6;
gl_shell.marginWidth = 3;
gl_shell.marginHeight = 3;
gl_shell.horizontalSpacing = 6;
shell.setLayout(gl_shell);
Menu menu = new Menu(shell, SWT.BAR);
shell.setMenuBar(menu);
MenuItem mntmFile = new MenuItem(menu, SWT.CASCADE);
mntmFile.setText(Messages.mntmFile_text);
Menu menuFile = new Menu(mntmFile);
mntmFile.setMenu(menuFile);
MenuItem mntmOpen_1 = new MenuItem(menuFile, SWT.NONE);
mntmOpen_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onOpenSelected(e);
}
});
mntmOpen_1.setText(Messages.mntmOpen_text);
MenuItem mntmOpenInNew_1 = new MenuItem(menuFile, SWT.NONE);
mntmOpenInNew_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onOpenInNewWindowSelected(e);
}
});
mntmOpenInNew_1.setText(Messages.mntmOpenInNewWindow_text);
MenuItem mntmOpenDuplicateDetails_1 = new MenuItem(menuFile, SWT.NONE);
mntmOpenDuplicateDetails_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onOpenDuplicateDetailsSelected(e);
}
});
mntmOpenDuplicateDetails_1.setText(Messages.mntmOpenDuplicateDetails_text);
MenuItem mntmCopyTo_2 = new MenuItem(menuFile, SWT.NONE);
mntmCopyTo_2.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onCopyToSelected();
}
});
mntmCopyTo_2.setText(Messages.mntmCopyTo_text);
MenuItem mntmClose = new MenuItem(menuFile, SWT.NONE);
mntmClose.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onCloseSelected();
}
});
mntmClose.setText(Messages.mntmClose_text);
MenuItem mntmQuit = new MenuItem(menuFile, SWT.NONE);
mntmQuit.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onQuitSelected();
}
});
mntmQuit.setText(Messages.mntmQuit_text);
MenuItem mntmEdit = new MenuItem(menu, SWT.CASCADE);
mntmEdit.setText(Messages.mntmEdit_text);
Menu menuEdit = new Menu(mntmEdit);
mntmEdit.setMenu(menuEdit);
MenuItem mntmRun_1 = new MenuItem(menuEdit, SWT.NONE);
mntmRun_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onRunSelected();
}
});
mntmRun_1.setText(Messages.mntmRun_text);
MenuItem mntmCopyAsString_1 = new MenuItem(menuEdit, SWT.NONE);
mntmCopyAsString_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onCopyAsStringSelected();
}
});
mntmCopyAsString_1.setText(Messages.mntmCopyAsString_text);
MenuItem mntmCopyTo_1 = new MenuItem(menuEdit, SWT.NONE);
mntmCopyTo_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onCopyToSelected();
}
});
mntmCopyTo_1.setText(Messages.mntmCopyTo_text);
MenuItem mntmVisibility = new MenuItem(menu, SWT.CASCADE);
mntmVisibility.setText(Messages.mntmVisibility_text);
Menu menuVisibility = new Menu(mntmVisibility);
mntmVisibility.setMenu(menuVisibility);
final MenuItem mntmFoldersVisible = new MenuItem(menuVisibility, SWT.CHECK);
mntmFoldersVisible.setSelection(true);
mntmFoldersVisible.setText(Messages.mntmFoldersVisible_text);
final MenuItem mntmFilesVisible = new MenuItem(menuVisibility, SWT.CHECK);
mntmFilesVisible.setSelection(true);
mntmFilesVisible.setText(Messages.mntmFilesVisible_text);
final MenuItem mntmCompressedFoldersVisible = new MenuItem(menuVisibility, SWT.CHECK);
mntmCompressedFoldersVisible.setSelection(true);
mntmCompressedFoldersVisible.setText(Messages.mntmCompressedFoldersVisible_text);
final MenuItem mntmCompressedFilesVisible = new MenuItem(menuVisibility, SWT.CHECK);
mntmCompressedFilesVisible.setSelection(true);
mntmCompressedFilesVisible.setText(Messages.mntmCompressedFilesVisible_text);
MenuItem mntmHelp = new MenuItem(menu, SWT.CASCADE);
mntmHelp.setText(Messages.mntmHelp_text);
Menu menuHelp = new Menu(mntmHelp);
mntmHelp.setMenu(menuHelp);
MenuItem mntmOpenSourceLicenses = new MenuItem(menuHelp, SWT.NONE);
mntmOpenSourceLicenses.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
new SwtOpenSourceLicenses(shell, SWT.TITLE|SWT.MIN|SWT.MAX|SWT.CLOSE).open();
}
});
mntmOpenSourceLicenses.setText(Messages.mntmOpenSourceLicenses_text);
compositeToolBar = new Composite(shell, SWT.NONE);
compositeToolBar.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1));
compositeToolBar.setBackground(SWTResourceManager.getColor(SWT.COLOR_WIDGET_BACKGROUND));
compositeToolBar.setFont(SWTResourceManager.getFont("Meiryo UI", 12, SWT.NORMAL));
GridLayout gl_compositeToolBar = new GridLayout(5, false);
gl_compositeToolBar.horizontalSpacing = 0;
gl_compositeToolBar.verticalSpacing = 0;
gl_compositeToolBar.marginWidth = 0;
gl_compositeToolBar.marginHeight = 0;
compositeToolBar.setLayout(gl_compositeToolBar);
formToolkit.adapt(compositeToolBar);
formToolkit.paintBordersFor(compositeToolBar);
Button btnLeft = new Button(compositeToolBar, SWT.NONE);
btnLeft.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/go-previous.png"));
btnLeft.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onNavigatePreviousSelected(e);
}
});
btnLeft.setFont(SWTResourceManager.getFont("Meiryo UI", 11, SWT.NORMAL));
formToolkit.adapt(btnLeft, true, true);
Button btnRight = new Button(compositeToolBar, SWT.NONE);
btnRight.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/go-next.png"));
btnRight.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onNavigateNextSelected(e);
}
});
btnRight.setFont(SWTResourceManager.getFont("Meiryo UI", 11, SWT.NORMAL));
formToolkit.adapt(btnRight, true, true);
Button btnUp = new Button(compositeToolBar, SWT.NONE);
btnUp.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/go-up.png"));
btnUp.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onUpperFolderSelected(e);
}
});
formToolkit.adapt(btnUp, true, true);
txtLocation = new Text(compositeToolBar, SWT.BORDER);
txtLocation.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent arg0) {
onLocationModified(arg0);
}
});
GridData gd_txtLocation = new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1);
gd_txtLocation.widthHint = 200;
txtLocation.setLayoutData(gd_txtLocation);
txtLocation.setFont(SWTResourceManager.getFont("Meiryo UI", 11, SWT.NORMAL));
formToolkit.adapt(txtLocation, true, true);
Button btnRefresh = new Button(compositeToolBar, SWT.NONE);
btnRefresh.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
refresh();
}
});
btnRefresh.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/view-refresh.png"));
formToolkit.adapt(btnRefresh, true, true);
final TableViewer tableViewer = new TableViewer(shell, SWT.MULTI | SWT.BORDER | SWT.FULL_SELECTION | SWT.VIRTUAL);
table = tableViewer.getTable();
table.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1));
//table = new Table(scrolledComposite, SWT.BORDER | SWT.FULL_SELECTION | SWT.VIRTUAL);
table.setHeaderVisible(true);
table.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onTableSelected(e);
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {
onOpenSelected(e);
}
});
formToolkit.adapt(table);
formToolkit.paintBordersFor(table);
final TableColumn tblclmnPath = new TableColumn(table, SWT.LEFT);
tblclmnPath.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
table.setSortColumn(tblclmnPath);
if (table.getSortDirection() == SWT.UP) {
table.setSortDirection(SWT.DOWN);
} else {
table.setSortDirection(SWT.UP);
}
onTblclmnPathSelected(tblclmnPath, e);
}
});
tblclmnPath.setWidth(230);
tblclmnPath.setText(Messages.tblclmnPath_text);
setTableSortDirection(tblclmnPath, "path", order);
final TableColumn tblclmnDateLastModified = new TableColumn(table, SWT.LEFT);
tblclmnDateLastModified.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
table.setSortColumn(tblclmnDateLastModified);
if (table.getSortDirection() == SWT.UP) {
table.setSortDirection(SWT.DOWN);
} else {
table.setSortDirection(SWT.UP);
}
onTblclmnDateLastModifiedSelected(tblclmnDateLastModified, e);
}
});
tblclmnDateLastModified.setWidth(136);
tblclmnDateLastModified.setText(Messages.tblclmnDateLastModified_text);
setTableSortDirection(tblclmnDateLastModified, "datelastmodified", order);
final TableColumn tblclmnSize = new TableColumn(table, SWT.RIGHT);
tblclmnSize.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
table.setSortColumn(tblclmnSize);
if (table.getSortDirection() == SWT.UP) {
table.setSortDirection(SWT.DOWN);
} else {
table.setSortDirection(SWT.UP);
}
onTblclmnSizeSelected(tblclmnSize, e);
}
});
tblclmnSize.setWidth(110);
tblclmnSize.setText(Messages.tblclmnSize_text);
setTableSortDirection(tblclmnSize, "size", order);
final TableColumn tblclmnCompressedsize = new TableColumn(table, SWT.RIGHT);
tblclmnCompressedsize.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
table.setSortColumn(tblclmnCompressedsize);
if (table.getSortDirection() == SWT.UP) {
table.setSortDirection(SWT.DOWN);
} else {
table.setSortDirection(SWT.UP);
}
onTblclmnCompressedsizeSelected(tblclmnCompressedsize, e);
}
});
tblclmnCompressedsize.setWidth(110);
tblclmnCompressedsize.setText(Messages.tblclmnCompressedesize_text);
setTableSortDirection(tblclmnCompressedsize, "compressedsize", order);
final TableColumn tblclmnDuplicate = new TableColumn(table, SWT.NONE);
tblclmnDuplicate.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
table.setSortColumn(tblclmnDuplicate);
if (table.getSortDirection() == SWT.UP) {
table.setSortDirection(SWT.DOWN);
} else {
table.setSortDirection(SWT.UP);
}
onTblclmnDuplicateSelected(tblclmnDuplicate, e);
}
});
tblclmnDuplicate.setWidth(35);
tblclmnDuplicate.setText(Messages.tblclmnDuplicate_text);
setTableSortDirection(tblclmnDuplicate, "duplicate", order);
final TableColumn tblclmnDedupablesize = new TableColumn(table, SWT.RIGHT);
tblclmnDedupablesize.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
table.setSortColumn(tblclmnDedupablesize);
if (table.getSortDirection() == SWT.UP) {
table.setSortDirection(SWT.DOWN);
} else {
table.setSortDirection(SWT.UP);
}
onTblclmnDedupablesizeSelected(tblclmnDedupablesize, e);
}
});
tblclmnDedupablesize.setWidth(110);
tblclmnDedupablesize.setText(Messages.tblclmnDedupablesize_text);
setTableSortDirection(tblclmnDedupablesize, "dedupablesize", order);
Menu popupMenu = new Menu(table);
table.setMenu(popupMenu);
MenuItem mntmRun = new MenuItem(popupMenu, SWT.NONE);
mntmRun.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onRunSelected();
}
});
mntmRun.setText(Messages.mntmRun_text);
MenuItem mntmOpen = new MenuItem(popupMenu, SWT.NONE);
mntmOpen.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onOpenSelected(e);
}
});
mntmOpen.setText(Messages.mntmOpen_text);
MenuItem mntmOpenInNew = new MenuItem(popupMenu, SWT.NONE);
mntmOpenInNew.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onOpenInNewWindowSelected(e);
}
});
mntmOpenInNew.setText(Messages.mntmOpenInNewWindow_text);
MenuItem mntmOpenDuplicateDetails = new MenuItem(popupMenu, SWT.NONE);
mntmOpenDuplicateDetails.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onOpenDuplicateDetailsSelected(e);
}
});
mntmOpenDuplicateDetails.setText(Messages.mntmOpenDuplicateDetails_text);
MenuItem mntmCopyAsString = new MenuItem(popupMenu, SWT.NONE);
mntmCopyAsString.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onCopyAsStringSelected();
}
});
mntmCopyAsString.setText(Messages.mntmCopyAsString_text);
MenuItem mntmCopyTo = new MenuItem(popupMenu, SWT.NONE);
mntmCopyTo.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onCopyToSelected();
}
});
mntmCopyTo.setText(Messages.mntmCopyTo_text);
MenuItem menuItem = new MenuItem(popupMenu, SWT.SEPARATOR);
menuItem.setText("Visibility");
final MenuItem mntmFoldersVisible_1 = new MenuItem(popupMenu, SWT.CHECK);
mntmFoldersVisible_1.setSelection(true);
mntmFoldersVisible_1.setText(Messages.mntmFoldersVisible_text);
mntmFoldersVisible_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmFoldersVisible.setSelection(mntmFoldersVisible_1.getSelection());
onFoldersVisibleChecked(mntmFoldersVisible.getSelection());
}
});
final MenuItem mntmFilesVisible_1 = new MenuItem(popupMenu, SWT.CHECK);
mntmFilesVisible_1.setSelection(true);
mntmFilesVisible_1.setText(Messages.mntmFilesVisible_text);
mntmFilesVisible_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmFilesVisible.setSelection(mntmFilesVisible_1.getSelection());
onFilesVisibleChecked(mntmFilesVisible.getSelection());
}
});
final MenuItem mntmCompressedFoldersVisible_1 = new MenuItem(popupMenu, SWT.CHECK);
mntmCompressedFoldersVisible_1.setSelection(true);
mntmCompressedFoldersVisible_1.setText(Messages.mntmCompressedFoldersVisible_text);
mntmCompressedFoldersVisible_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmCompressedFoldersVisible.setSelection(mntmCompressedFoldersVisible_1.getSelection());
onCompressedFoldersVisibleChecked(mntmCompressedFoldersVisible.getSelection());
}
});
final MenuItem mntmCompressedFilesVisible_1 = new MenuItem(popupMenu, SWT.CHECK);
mntmCompressedFilesVisible_1.setSelection(true);
mntmCompressedFilesVisible_1.setText(Messages.mntmCompressedFilesVisible_text);
mntmCompressedFilesVisible_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmCompressedFilesVisible.setSelection(mntmCompressedFilesVisible_1.getSelection());
onCompressedFilesVisibleSelected(mntmCompressedFilesVisible.getSelection());
}
});
mntmFoldersVisible.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmFoldersVisible_1.setSelection(mntmFoldersVisible.getSelection());
onFoldersVisibleChecked(mntmFoldersVisible.getSelection());
}
});
mntmFilesVisible.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmFilesVisible_1.setSelection(mntmFilesVisible.getSelection());
onFilesVisibleChecked(mntmFilesVisible.getSelection());
}
});
mntmCompressedFoldersVisible.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmCompressedFoldersVisible_1.setSelection(mntmCompressedFoldersVisible.getSelection());
onCompressedFoldersVisibleChecked(mntmCompressedFoldersVisible.getSelection());
}
});
mntmCompressedFilesVisible.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmCompressedFilesVisible_1.setSelection(mntmCompressedFilesVisible.getSelection());
onCompressedFilesVisibleSelected(mntmCompressedFilesVisible.getSelection());
}
});
compositeStatusBar = new Composite(shell, SWT.NONE);
compositeStatusBar.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1));
compositeStatusBar.setBackground(SWTResourceManager.getColor(SWT.COLOR_WIDGET_BACKGROUND));
GridLayout gl_compositeStatusBar = new GridLayout(2, false);
gl_compositeStatusBar.marginWidth = 0;
gl_compositeStatusBar.marginHeight = 0;
compositeStatusBar.setLayout(gl_compositeStatusBar);
formToolkit.adapt(compositeStatusBar);
formToolkit.paintBordersFor(compositeStatusBar);
lblStatusBar = new Label(compositeStatusBar, SWT.NONE);
lblStatusBar.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1));
lblStatusBar.setBackground(SWTResourceManager.getColor(SWT.COLOR_WIDGET_BACKGROUND));
formToolkit.adapt(lblStatusBar, true, true);
lblStatusBar.setText("");
progressBar = new ProgressBar(compositeStatusBar, SWT.NONE);
formToolkit.adapt(progressBar, true, true);
m_bindingContext = initDataBindings();
}
/*
* event handlers
*/
protected void onCopyAsStringSelected() {
ArrayList<String> s = new ArrayList<String>();
for (PathEntry p: getSelectedPathEntries()) {
s.add(p.getPath());
}
StringSelection ss = new StringSelection(String.join("\n", s));
Clipboard clip = Toolkit.getDefaultToolkit().getSystemClipboard();
clip.setContents(ss, ss);
}
protected void onOpenSelected(SelectionEvent e) {
DBPathEntry entry = getSelectedPathEntry();
if (entry != null) {
setLocationAndRefresh(entry);
}
}
protected void onOpenInNewWindowSelected(SelectionEvent e) {
DBPathEntry p = getSelectedPathEntry();
if (p == null) {
p = location.get().getPathEntry();
}
if (p != null) {
new SwtFileFolderMenu().setLocationAndRefresh(p);
} else if (location.get().getPathString() != null) {
new SwtFileFolderMenu().setLocationAndRefresh(location.get().getPathString());
} else if (location.get().getSearchString() != null) {
new SwtFileFolderMenu().setLocationAndRefresh(location.get().getSearchString());
} else if (location.get().getPathId() != 0L) {
new SwtFileFolderMenu().setLocationAndRefresh(location.get().getPathId());
}
}
protected void onOpenDuplicateDetailsSelected(SelectionEvent e) {
DBPathEntry p = getSelectedPathEntry();
if (p == null) {
p = location.get().getPathEntry();
}
if (p != null) {
new SwtDuplicateMenu().setLocationAndRefresh(p);
} else if (location.get().getPathString() != null) {
new SwtDuplicateMenu().setLocationAndRefresh(location.get().getPathString());
} else if (location.get().getSearchString() != null) {
new SwtDuplicateMenu().setLocationAndRefresh(location.get().getSearchString());
} else if (location.get().getPathId() != 0L) {
new SwtDuplicateMenu().setLocationAndRefresh(location.get().getPathId());
}
}
protected void onNavigatePreviousSelected(SelectionEvent e) {
location.navigatePrevious();
setLocationAndRefresh(location.get());
}
protected void onNavigateNextSelected(SelectionEvent e) {
location.navigateNext();
setLocationAndRefresh(location.get());
}
protected void onUpperFolderSelected(SelectionEvent e) {
DBPathEntry p = location.get().getPathEntry();
if (p != null && p.getParentId() != 0L) {
setLocationAndRefresh(p.getParentId());
} else {
writeStatusBar("Not ready for going up operation; be patient.");
}
}
protected void onLocationModified(ModifyEvent arg0) {
String newstring = txtLocation.getText();
Assertion.assertNullPointerException(newstring != null);
writeStatusBar(String.format("New path string is: %s", newstring));
shell.setText(newstring);
Location oldloc = location.get();
if (newstring.equals(oldloc.getPathString())) {
// noop
} else if (newstring.equals(oldloc.getSearchString())) {
oldloc.setPathEntry(null);
oldloc.setPathId(0L);
oldloc.setPathString(null);
} else {
Location newloc = new Location();
newloc.setPathString(newstring);
location.add(newloc);
}
refresh();
}
protected void onTableSelected(SelectionEvent e) {}
private String order = PreferenceRW.getSwtFileFolderMenuSortOrder();
private boolean isFolderChecked = true;
private boolean isFileChecked = true;
private boolean isCompressedFolderChecked = true;
private boolean isCompressedFileChecked = true;
protected void onTblclmnPathSelected(TableColumn tblclmnPath, SelectionEvent e) {
if (table.getSortDirection() == SWT.UP) {
order = "path";
} else {
order = "path DESC";
}
PreferenceRW.setSwtFileFolderMenuSortOrder(order);
refresh();
}
protected void onTblclmnDateLastModifiedSelected(TableColumn tblclmnDateLastModified, SelectionEvent e) {
if (table.getSortDirection() == SWT.UP) {
order = "datelastmodified";
} else {
order = "datelastmodified DESC";
}
PreferenceRW.setSwtFileFolderMenuSortOrder(order);
refresh();
}
protected void onTblclmnSizeSelected(TableColumn tblclmnSize, SelectionEvent e) {
if (table.getSortDirection() == SWT.UP) {
order = "size";
} else {
order = "size DESC";
}
PreferenceRW.setSwtFileFolderMenuSortOrder(order);
refresh();
}
protected void onTblclmnCompressedsizeSelected(TableColumn tblclmnCompressedesize, SelectionEvent e) {
if (table.getSortDirection() == SWT.UP) {
order = "compressedsize";
} else {
order = "compressedsize DESC";
}
PreferenceRW.setSwtFileFolderMenuSortOrder(order);
refresh();
}
protected void onTblclmnDuplicateSelected(TableColumn tblclmnDuplicate, SelectionEvent e) {
if (table.getSortDirection() == SWT.UP) {
order = "duplicate";
} else {
order = "duplicate DESC";
}
PreferenceRW.setSwtFileFolderMenuSortOrder(order);
refresh();
}
protected void onTblclmnDedupablesizeSelected(TableColumn tblclmnDedupablesize, SelectionEvent e) {
if (table.getSortDirection() == SWT.UP) {
order = "dedupablesize";
} else {
order = "dedupablesize DESC";
}
PreferenceRW.setSwtFileFolderMenuSortOrder(order);
refresh();
}
protected void onFoldersVisibleChecked(boolean checked) {
isFolderChecked = checked;
refresh();
}
protected void onFilesVisibleChecked(boolean checked) {
isFileChecked = checked;
refresh();
}
protected void onCompressedFoldersVisibleChecked(boolean checked) {
isCompressedFolderChecked = checked;
refresh();
}
protected void onCompressedFilesVisibleSelected(boolean checked) {
isCompressedFileChecked = checked;
refresh();
}
public void setLocationAndRefresh(final String text) {
Display.getDefault().asyncExec(new Runnable() {
public void run() {
txtLocation.setText(text); // onLocationModified() is automatically called here.
}
});
}
/*
* setLocationAndRefresh and related
*/
public void setLocationAndRefresh(final DBPathEntry entry) {
Assertion.assertNullPointerException(entry != null);
Assertion.assertNullPointerException(location != null);
Location oldloc = location.get();
if (oldloc.getPathEntry() != null && oldloc.getPathEntry().getPathId() == entry.getPathId()) {
// noop
} else if (oldloc.getPathString() != null && oldloc.getPathString().equals(entry.getPath())) {
oldloc.setPathEntry(entry);
oldloc.setPathId(entry.getPathId());
} else {
Location newloc = new Location();
newloc.setPathEntry(entry);
newloc.setPathId(entry.getPathId());
newloc.setPathString(entry.getPath());
location.add(newloc);
}
setLocationAndRefresh(entry.getPath());
}
public void setLocationAndRefresh(long id) {
writeStatusBar(String.format("Starting query; new ID is: %d", id));
Location oldloc = location.get();
if (oldloc.getPathId() == id) {
// null
} else {
Location newloc = new Location();
newloc.setPathId(id);
location.add(newloc);
}
refresh(new LazyRunnable() {
@Override
public void run() throws SQLException, InterruptedException {
Debug.writelog("-- SwtFileFolderMenu SetLocationAndRefresh LOCAL PATTERN (id based) --");
Location loc = location.get();
DBPathEntry p = getDB().getDBPathEntryByPathId(loc.getPathId());
if (p != null) {
loc.setPathEntry(p);
loc.setPathString(p.getPath());
loc.setSearchString(null);
setLocationAndRefresh(loc.getPathString());
}
}
});
}
public void setLocationAndRefresh(final Location loc) {
if (loc.getPathString() != null) {
setLocationAndRefresh(loc.getPathString());
} else if (loc.getPathEntry() != null) {
setLocationAndRefresh(loc.getPathEntry().getPath());
} else if (loc.getSearchString() != null) {
setLocationAndRefresh(loc.getSearchString());
} else {
setLocationAndRefresh("");
}
}
/*
* normal refresh
*/
private Scenario scenario = new Scenario();
protected synchronized void refresh() {
refresh(scenario);
}
class Scenario extends SwtCommonFileFolderMenu.Scenario {
@Override
public void run() throws SQLException, InterruptedException {
writeProgress(10);
Location loc = location.get();
if (loc.getPathEntry() == null && loc.getSearchString() == null &&
(loc.getPathEntry() != null || loc.getPathId() != 0L
|| (loc.getPathString() != null && !"".equals(loc.getPathString())))) {
writeProgress(50);
if (loc.getPathString() != null) {
DBPathEntry p = getDB().getDBPathEntryByPath(loc.getPathString());
if (p != null) {
loc.setPathEntry(p);
loc.setPathId(p.getPathId());
Debug.writelog("-- SwtFileFolderMenu PREPROCESS PATTERN 1 (path based entry detection) --");
} else {
loc.setSearchString(loc.getPathString());
loc.setPathString(null);
loc.setPathId(0L);
loc.setPathEntry(null);
Debug.writelog("-- SwtFileFolderMenu PREPROCESS PATTERN 2 (searchstring=" + loc.getSearchString() + ") --");
}
} else if (loc.getPathId() != 0L) {
Debug.writelog("-- SwtFileFolderMenu PREPROCESS PATTERN 3 (id based) --");
DBPathEntry p = getDB().getDBPathEntryByPathId(loc.getPathId());
assert(p != null);
setLocationAndRefresh(p);
return;
} else {
Debug.writelog("-- SwtFileFolderMenu PREPROCESS PATTERN 4 (show all paths) --");
}
}
try {
threadWait();
cleanupTable();
ArrayList<String> typelist = new ArrayList<String> ();
if (isFolderChecked) {
typelist.add("type=0");
}
if (isFileChecked) {
typelist.add("type=1");
}
if (isCompressedFolderChecked) {
typelist.add("type=2");
}
if (isCompressedFileChecked) {
typelist.add("type=3");
}
String typeWhere = typelist.size() == 0 ? "" : String.join(" OR ", typelist);
threadWait();
writeStatusBar("Querying...");
writeProgress(70);
String searchSubSQL;
ArrayList<String> searchStringElement = new ArrayList<String> ();
if (loc.getSearchString() == null || "".equals(loc.getSearchString())) {
searchSubSQL = "";
} else {
ArrayList<String> p = new ArrayList<String> ();
for (String s: loc.getSearchString().split(" ")) {
if (! "".equals(s)) {
p.add("path LIKE ?");
searchStringElement.add(s);
}
}
searchSubSQL = " AND (" + String.join(" AND ", p) + ")";
}
threadWait();
DBPathEntry locationPathEntry = null;
PreparedStatement ps;
if (loc.getPathString() == null || "".equals(loc.getPathString())) {
String sql = "SELECT * FROM directory AS d1 WHERE (" + typeWhere + ") " + searchSubSQL
+ " AND (parentid=0 OR EXISTS (SELECT * FROM directory AS d2 WHERE d1.parentid=d2.pathid))"
+ " ORDER BY " + order;
Debug.writelog(sql);
ps = getDB().prepareStatement(sql);
int c = 1;
for (String s: searchStringElement) {
ps.setString(c, "%" + s + "%");
Debug.writelog(c + " %" + s + "%");
c++;
}
} else if ((locationPathEntry = loc.getPathEntry()) != null) {
String sql = "SELECT * FROM directory AS d1 WHERE (" + typeWhere + ") " + searchSubSQL
+ " AND (pathid=? OR EXISTS (SELECT * FROM upperlower WHERE upper=? AND lower=pathid))"
+ " AND (parentid=0 OR EXISTS (SELECT * FROM directory AS d2 WHERE d1.parentid=d2.pathid))"
+ " ORDER BY " + order;
Debug.writelog(sql);
ps = getDB().prepareStatement(sql);
int c = 1;
for (String s: searchStringElement) {
ps.setString(c, "%" + s + "%");
Debug.writelog(c + " %" + s + "%");
c++;
}
ps.setLong(c++, locationPathEntry.getPathId());
ps.setLong(c++, locationPathEntry.getPathId());
Debug.writelog(locationPathEntry.getPath());
} else {
String sql = "SELECT * FROM directory AS d1 WHERE (" + typeWhere + ") " + searchSubSQL
+ " AND path LIKE ?"
+ " AND (parentid=0 OR EXISTS (SELECT * FROM directory AS d2 WHERE d1.parentid=d2.pathid))"
+ " ORDER BY " + order;
Debug.writelog(sql);
ps = getDB().prepareStatement(sql);
int c = 1;
for (String s: searchStringElement) {
ps.setString(c, "%" + s + "%");
Debug.writelog(c + " %" + s + "%");
c++;
}
ps.setString(c++, loc.getPathString() + "%");
Debug.writelog(loc.getPathString());
}
try {
LazyUpdater.Dispatcher disp = getDB().getDispatcher();
disp.setList(Dispatcher.NONE);
disp.setCsum(Dispatcher.NONE);
ResultSet rs = ps.executeQuery();
try {
threadWait();
Debug.writelog("QUERY FINISHED");
writeStatusBar("Listing...");
writeProgress(90);
int count = 0;
while (rs.next()) {
threadWait();
DBPathEntry p1 = getDB().rsToPathEntry(rs);
Assertion.assertAssertionError(p1 != null);
Assertion.assertAssertionError(p1.getPath() != null);
if (locationPathEntry != null) {
Assertion.assertAssertionError(locationPathEntry.getPath() != null);
Assertion.assertAssertionError(p1.getPath().startsWith(locationPathEntry.getPath()),
p1.getPath() + " does not start with " + locationPathEntry.getPath()
);
}
PathEntry p2;
try {
p2 = disp.dispatch(p1);
} catch (IOException e) {
p2 = null;
}
if (p2 == null) {
addRow(p1, rs.getInt("duplicate"), rs.getLong("dedupablesize"), true);
getDB().unsetClean(p1.getParentId());
} else {
Assertion.assertAssertionError(p1.getPath().equals(p2.getPath()),
"!! " + p1.getPath() + " != " + p2.getPath());
if (!PathEntry.dscMatch(p1, p2)) {
p1.setDateLastModified(p2.getDateLastModified());
p1.setSize(p2.getSize());
p1.setCompressedSize(p2.getCompressedSize());
p1.clearCsum();
getDB().unsetClean(p1.getParentId());
}
addRow(p1, rs.getInt("duplicate"), rs.getLong("dedupablesize"), false);
}
count ++;
}
writeStatusBar(String.format("%d items", count));
} finally {
rs.close();
}
} finally {
ps.close();
}
writeProgress(0);
} catch (WindowDisposedException e) {}
}
protected void cleanupTable() throws WindowDisposedException {
if (table.isDisposed()) {
throw new WindowDisposedException("!! Window disposed at cleanupTable");
}
Display.getDefault().asyncExec(new Runnable() {
public void run() {
pathentrylist.clear();
table.removeAll();;
}
});
}
protected void addRow(final DBPathEntry entry, final int duplicate,
final long dedupablesize, final boolean grayout) throws WindowDisposedException {
if (table.isDisposed()) {
throw new WindowDisposedException("!! Window disposed at addRow");
}
Display.getDefault().asyncExec(new Runnable() {
public void run() {
pathentrylist.add(entry);
final SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
final NumberFormat numf = NumberFormat.getNumberInstance();
Date d = new Date(entry.getDateLastModified());
String[] row = {
entry.getPath(),
sdf.format(d),
numf.format(entry.getSize()),
numf.format(entry.getCompressedSize()),
(duplicate > 0 ? numf.format(duplicate) : null),
(dedupablesize > 0 ? numf.format(dedupablesize) : null),
};
final Display display = Display.getDefault();
final Color blue = new Color(display, 0, 0, 255);
final Color red = new Color(display, 255, 0, 0);
final Color black = new Color(display, 0, 0, 0);
final Color gray = new Color(display, 127, 127, 127);
try {
TableItem tableItem = new TableItem(table, SWT.NONE);
tableItem.setText(row);
if (grayout) {
tableItem.setForeground(gray);
} else if (entry.isNoAccess()) {
tableItem.setForeground(red);
} else if (entry.isFile() && entry.getSize() != entry.getCompressedSize()) {
tableItem.setForeground(blue);
} else {
tableItem.setForeground(black);
}
} catch (Exception e) {
if (!table.isDisposed()) {
e.printStackTrace();
}
}
}
});
}
}
protected DataBindingContext initDataBindings() {
DataBindingContext bindingContext = new DataBindingContext();
//
IObservableValue observeBackgroundCompositeObserveWidget = WidgetProperties.background().observe(compositeToolBar);
IObservableValue backgroundShellObserveValue = PojoProperties.value("background").observe(shell);
bindingContext.bindValue(observeBackgroundCompositeObserveWidget, backgroundShellObserveValue, null, null);
//
IObservableValue observeBackgroundLblStatusBarObserveWidget = WidgetProperties.background().observe(lblStatusBar);
bindingContext.bindValue(observeBackgroundLblStatusBarObserveWidget, backgroundShellObserveValue, null, null);
//
IObservableValue observeBackgroundCompositeStatusBarObserveWidget = WidgetProperties.background().observe(compositeStatusBar);
bindingContext.bindValue(observeBackgroundCompositeStatusBarObserveWidget, backgroundShellObserveValue, null, null);
//
return bindingContext;
}
}
| n-i-e/deepfolderview | src/main/java/com/github/n_i_e/deepfolderview/SwtFileFolderMenu.java | Java | apache-2.0 | 41,340 |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.medialive.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* Settings for the action to deactivate the image in a specific layer.
*
* @see <a
* href="http://docs.aws.amazon.com/goto/WebAPI/medialive-2017-10-14/StaticImageDeactivateScheduleActionSettings"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class StaticImageDeactivateScheduleActionSettings implements Serializable, Cloneable, StructuredPojo {
/** The time in milliseconds for the image to fade out. Default is 0 (no fade-out). */
private Integer fadeOut;
/** The image overlay layer to deactivate, 0 to 7. Default is 0. */
private Integer layer;
/**
* The time in milliseconds for the image to fade out. Default is 0 (no fade-out).
*
* @param fadeOut
* The time in milliseconds for the image to fade out. Default is 0 (no fade-out).
*/
public void setFadeOut(Integer fadeOut) {
this.fadeOut = fadeOut;
}
/**
* The time in milliseconds for the image to fade out. Default is 0 (no fade-out).
*
* @return The time in milliseconds for the image to fade out. Default is 0 (no fade-out).
*/
public Integer getFadeOut() {
return this.fadeOut;
}
/**
* The time in milliseconds for the image to fade out. Default is 0 (no fade-out).
*
* @param fadeOut
* The time in milliseconds for the image to fade out. Default is 0 (no fade-out).
* @return Returns a reference to this object so that method calls can be chained together.
*/
public StaticImageDeactivateScheduleActionSettings withFadeOut(Integer fadeOut) {
setFadeOut(fadeOut);
return this;
}
/**
* The image overlay layer to deactivate, 0 to 7. Default is 0.
*
* @param layer
* The image overlay layer to deactivate, 0 to 7. Default is 0.
*/
public void setLayer(Integer layer) {
this.layer = layer;
}
/**
* The image overlay layer to deactivate, 0 to 7. Default is 0.
*
* @return The image overlay layer to deactivate, 0 to 7. Default is 0.
*/
public Integer getLayer() {
return this.layer;
}
/**
* The image overlay layer to deactivate, 0 to 7. Default is 0.
*
* @param layer
* The image overlay layer to deactivate, 0 to 7. Default is 0.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public StaticImageDeactivateScheduleActionSettings withLayer(Integer layer) {
setLayer(layer);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getFadeOut() != null)
sb.append("FadeOut: ").append(getFadeOut()).append(",");
if (getLayer() != null)
sb.append("Layer: ").append(getLayer());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof StaticImageDeactivateScheduleActionSettings == false)
return false;
StaticImageDeactivateScheduleActionSettings other = (StaticImageDeactivateScheduleActionSettings) obj;
if (other.getFadeOut() == null ^ this.getFadeOut() == null)
return false;
if (other.getFadeOut() != null && other.getFadeOut().equals(this.getFadeOut()) == false)
return false;
if (other.getLayer() == null ^ this.getLayer() == null)
return false;
if (other.getLayer() != null && other.getLayer().equals(this.getLayer()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getFadeOut() == null) ? 0 : getFadeOut().hashCode());
hashCode = prime * hashCode + ((getLayer() == null) ? 0 : getLayer().hashCode());
return hashCode;
}
@Override
public StaticImageDeactivateScheduleActionSettings clone() {
try {
return (StaticImageDeactivateScheduleActionSettings) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.medialive.model.transform.StaticImageDeactivateScheduleActionSettingsMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| jentfoo/aws-sdk-java | aws-java-sdk-medialive/src/main/java/com/amazonaws/services/medialive/model/StaticImageDeactivateScheduleActionSettings.java | Java | apache-2.0 | 5,899 |
/**
* Copyright (C) 2010-2013 Alibaba Group Holding Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.rocketmq.broker.client;
import io.netty.channel.Channel;
import java.util.List;
/**
* @author shijia.wxr<vintage.wang@gmail.com>
* @since 2013-6-24
*/
public interface ConsumerIdsChangeListener {
public void consumerIdsChanged(final String group, final List<Channel> channels);
}
| dingjun84/mq-backup | rocketmq-broker/src/main/java/com/alibaba/rocketmq/broker/client/ConsumerIdsChangeListener.java | Java | apache-2.0 | 963 |
// copyright (c) 2017 vmware, inc. all rights reserved.
//
// licensed under the apache license, version 2.0 (the "license");
// you may not use this file except in compliance with the license.
// you may obtain a copy of the license at
//
// http://www.apache.org/licenses/license-2.0
//
// unless required by applicable law or agreed to in writing, software
// distributed under the license is distributed on an "as is" basis,
// without warranties or conditions of any kind, either express or implied.
// see the license for the specific language governing permissions and
// limitations under the license.
package dao
import (
"github.com/astaxie/beego/orm"
"github.com/vmware/harbor/src/common/models"
"fmt"
"time"
)
// AddScanJob ...
func AddScanJob(job models.ScanJob) (int64, error) {
o := GetOrmer()
if len(job.Status) == 0 {
job.Status = models.JobPending
}
return o.Insert(&job)
}
// GetScanJob ...
func GetScanJob(id int64) (*models.ScanJob, error) {
o := GetOrmer()
j := models.ScanJob{ID: id}
err := o.Read(&j)
if err == orm.ErrNoRows {
return nil, nil
}
return &j, nil
}
// GetScanJobsByImage returns a list of scan jobs with given repository and tag
func GetScanJobsByImage(repository, tag string, limit ...int) ([]*models.ScanJob, error) {
var res []*models.ScanJob
_, err := scanJobQs(limit...).Filter("repository", repository).Filter("tag", tag).OrderBy("-id").All(&res)
return res, err
}
// GetScanJobsByDigest returns a list of scan jobs with given digest
func GetScanJobsByDigest(digest string, limit ...int) ([]*models.ScanJob, error) {
var res []*models.ScanJob
_, err := scanJobQs(limit...).Filter("digest", digest).OrderBy("-id").All(&res)
return res, err
}
// UpdateScanJobStatus updates the status of a scan job.
func UpdateScanJobStatus(id int64, status string) error {
o := GetOrmer()
sj := models.ScanJob{
ID: id,
Status: status,
UpdateTime: time.Now(),
}
n, err := o.Update(&sj, "Status", "UpdateTime")
if n == 0 {
return fmt.Errorf("Failed to update scan job with id: %d, error: %v", id, err)
}
return err
}
func scanJobQs(limit ...int) orm.QuerySeter {
o := GetOrmer()
l := -1
if len(limit) == 1 {
l = limit[0]
}
return o.QueryTable(models.ScanJobTable).Limit(l)
}
| wknet123/harbor | src/common/dao/scan_job.go | GO | apache-2.0 | 2,269 |
package com.bjorktech.cayman.idea.designpattern.structure.proxy;
public class TargetClass implements TargetInterface {
@Override
public long add(long a, long b) {
long temp = a + b;
System.out.println(temp);
return temp;
}
@Override
public long sub(long a, long b) {
long temp = a - b;
System.out.println(temp);
return temp;
}
}
| wanliwang/cayman | cm-idea/src/main/java/com/bjorktech/cayman/idea/designpattern/structure/proxy/TargetClass.java | Java | apache-2.0 | 351 |
package com.badlogic.gdx.ingenuity.scene2d;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.ingenuity.GdxData;
import com.badlogic.gdx.ingenuity.helper.PixmapHelper;
import com.badlogic.gdx.ingenuity.utils.GdxUtilities;
import com.badlogic.gdx.scenes.scene2d.Group;
import com.badlogic.gdx.scenes.scene2d.Stage;
import com.badlogic.gdx.scenes.scene2d.actions.Actions;
import com.badlogic.gdx.scenes.scene2d.ui.Image;
import com.badlogic.gdx.utils.Align;
import com.badlogic.gdx.utils.Disposable;
/**
* @ไฝ่
Mitkey
* @ๆถ้ด 2017ๅนด3ๆ24ๆฅ ไธๅ3:09:56
* @็ฑป่ฏดๆ:
* @็ๆฌ xx
*/
public class Loading implements Disposable {
private Group root = new Group();
private Image imgOut;
private Image imgInner;
public Loading() {
root.setSize(GdxData.WIDTH, GdxData.HEIGHT);
Image imgBg = new Image(PixmapHelper.getInstance().newTranslucentDrawable(5, 5));
imgBg.setFillParent(true);
root.addActor(imgBg);
imgOut = new Image(PixmapHelper.getInstance().newRectangleDrawable(Color.YELLOW, 40, 40));
imgOut.setOrigin(Align.center);
imgInner = new Image(PixmapHelper.getInstance().newCircleDrawable(Color.RED, 18));
imgInner.setOrigin(Align.center);
GdxUtilities.center(imgOut);
GdxUtilities.center(imgInner);
root.addActor(imgOut);
root.addActor(imgInner);
}
public void show(Stage stage) {
stage.addActor(root);
root.toFront();
imgOut.clearActions();
imgOut.addAction(Actions.forever(Actions.rotateBy(-360, 1f)));
imgInner.clearActions();
imgInner.addAction(Actions.forever(Actions.rotateBy(360, 2f)));
}
public void hide() {
root.remove();
}
@Override
public void dispose() {
hide();
}
}
| mitkey/libgdx-ingenuity | depot/src/com/badlogic/gdx/ingenuity/scene2d/Loading.java | Java | apache-2.0 | 1,675 |
<?php
/**
* amadeus-ws-client
*
* Copyright 2020 Amadeus Benelux NV
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @package Amadeus
* @license https://opensource.org/licenses/Apache-2.0 Apache 2.0
*/
namespace Amadeus\Client\RequestCreator\Converter\Fare;
use Amadeus\Client\RequestCreator\Converter\BaseConverter;
use Amadeus\Client\RequestOptions\FarePriceUpsellWithoutPnrOptions;
use Amadeus\Client\Struct;
/**
* Fare_PriceUpsellWithoutPNR Request converter
*
* @package Amadeus\Client\RequestCreator\Converter\Fare
* @author Valerii Nezhurov <valeriy.nezhuriov@gmail.com>
*/
class PriceUpsellWithoutPNRConv extends BaseConverter
{
/**
* @param FarePriceUpsellWithoutPnrOptions $requestOptions
* @param int|string $version
* @return Struct\Fare\PriceUpsellWithoutPNR
*/
public function convert($requestOptions, $version)
{
return new Struct\Fare\PriceUpsellWithoutPNR($requestOptions);
}
}
| amabnl/amadeus-ws-client | src/Amadeus/Client/RequestCreator/Converter/Fare/PriceUpsellWithoutPNRConv.php | PHP | apache-2.0 | 1,463 |
package com.concavenp.nanodegree.shared;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* To work on unit tests, switch the Test Artifact in the Build Variants view.
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() throws Exception {
assertEquals(4, 2 + 2);
}
} | concaveNP/GoUbiquitous | shared/src/test/java/com/concavenp/nanodegree/shared/ExampleUnitTest.java | Java | apache-2.0 | 324 |
/*
* Copyright (c) 2013-2015 Josef Hardi <josef.hardi@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.obidea.semantika.datatype;
import com.obidea.semantika.datatype.exception.InvalidLexicalFormException;
import com.obidea.semantika.datatype.primitive.XsdDecimal;
public abstract class AbstractDerivedDecimalType extends AbstractXmlType<Number>
{
protected AbstractDerivedDecimalType(String name)
{
super(name);
}
@Override
public IDatatype<?> getPrimitiveDatatype()
{
return XsdDecimal.getInstance();
}
@Override
public Number getValue(String lexicalForm)
{
return parseLexicalForm(lexicalForm);
}
@Override
public boolean isPrimitive()
{
return false;
}
/**
* Parse and validate a lexical form of the literal.
*
* @param lexicalForm
* the typing form of the literal.
* @return A <code>Number</code> representation of the literal
* @throws InvalidLexicalFormException
* if the literal form is invalid or the value is out of range
*/
protected abstract Number parseLexicalForm(String lexicalForm) throws InvalidLexicalFormException;
}
| obidea/semantika | src/main/java/com/obidea/semantika/datatype/AbstractDerivedDecimalType.java | Java | apache-2.0 | 1,709 |
๏ปฟusing System;
namespace EtoTest.Model
{
public class DataFileVersion
{
/// <summary>
/// The name of this station - writen to name of conlict files when resyncing.
/// </summary>
public String StationName { get; set; }
/// <summary>
/// If this ID is different from the current on google drive then updates have occured on google
/// </summary>
public int FromVersionId { get; set; }
/// <summary>
/// If we have made some of our own updates when offline, then this value is incremented from 0.
/// </summary>
public int? CurrentVersionId { get; set; }
/// <summary>
/// A description of the operation that we are about to attempt. If this value is set; then we crashed before the last operation completed.
/// Set to null immediately after completing an operation.
/// </summary>
public String BeforeOperation { get; set; }
}
} | michaeljfarr/FilePharoah | m.Indexer/EtoTest/Model/DataFileVersion.cs | C# | apache-2.0 | 984 |
/*
Copyright (C) 2013-2020 Expedia Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.hotels.styx.support.matchers;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
import java.util.Objects;
import java.util.Optional;
/**
* Provides matchers around the {@code Optional} class.
*
* @param <T>
* @author john.butler
* @see Optional
*/
public final class IsOptional<T> extends TypeSafeMatcher<Optional<? extends T>> {
/**
* Checks that the passed Optional is not present.
*/
public static IsOptional<Object> isAbsent() {
return new IsOptional<>(false);
}
/**
* Checks that the passed Optional is present.
*/
public static IsOptional<Object> isPresent() {
return new IsOptional<>(true);
}
public static <T> IsOptional<T> isValue(T value) {
return new IsOptional<>(value);
}
public static <T> IsOptional<T> matches(Matcher<T> matcher) {
return new IsOptional<>(matcher);
}
public static <T extends Iterable> IsOptional<T> isIterable(Matcher<? extends Iterable> matcher) {
return new IsOptional<>((Matcher) matcher);
}
private final boolean someExpected;
private final Optional<T> expected;
private final Optional<Matcher<T>> matcher;
private IsOptional(boolean someExpected) {
this.someExpected = someExpected;
this.expected = Optional.empty();
this.matcher = Optional.empty();
}
private IsOptional(T value) {
this.someExpected = true;
this.expected = Optional.of(value);
this.matcher = Optional.empty();
}
private IsOptional(Matcher<T> matcher) {
this.someExpected = true;
this.expected = Optional.empty();
this.matcher = Optional.of(matcher);
}
@Override
public void describeTo(Description description) {
if (!someExpected) {
description.appendText("<Absent>");
} else if (expected.isPresent()) {
description.appendValue(expected);
} else if (matcher.isPresent()) {
description.appendText("a present value matching ");
matcher.get().describeTo(description);
} else {
description.appendText("<Present>");
}
}
@Override
public boolean matchesSafely(Optional<? extends T> item) {
if (!someExpected) {
return !item.isPresent();
} else if (expected.isPresent()) {
return item.isPresent() && Objects.equals(item.get(), expected.get());
} else if (matcher.isPresent()) {
return item.isPresent() && matcher.get().matches(item.get());
} else {
return item.isPresent();
}
}
}
| mikkokar/styx | support/testsupport/src/main/java/com/hotels/styx/support/matchers/IsOptional.java | Java | apache-2.0 | 3,272 |
<?php
namespace Deliveryboy\V1\Rest\Orderproducts;
class OrderproductsEntity {
public $order_product_id;
public $order_id;
public $order_product_name;
public $order_item_id;
public $order_offer_id;
public $order_type;
public $package_id;
public $unit_price;
public $total_price;
public $quantity;
public function getArrayCopy() {
return array(
'order_product_id' => $this->order_product_id,
'order_id' => $this->order_id,
'order_product_name' => $this->order_product_name,
'order_item_id' => $this->order_item_id,
'order_offer_id' => $this->order_offer_id,
'order_type' => $this->order_type,
'package_id' => $this->package_id,
'unit_price' => $this->unit_price,
'total_price' => $this->total_price,
'quantity' => $this->quantity,
);
}
public function exchangeArray(array $array) {
$this->order_product_id = $array['order_product_id'];
$this->order_id = $array['order_id'];
$this->order_product_name = $array['order_product_name'];
$this->order_item_id = $array['order_item_id'];
$this->order_offer_id = $array['order_offer_id'];
$this->order_type = $array['order_type'];
$this->package_id = $array['package_id'];
$this->unit_price = $array['unit_price'];
$this->total_price = $array['total_price'];
$this->quantity = $array['quantity'];
}
}
| ankuradhey/laundry | api/module/Deliveryboy/src/Deliveryboy/V1/Rest/Orderproducts/OrderproductsEntity.php | PHP | apache-2.0 | 1,519 |
/*
* Copyright (C) 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.googlecode.android_scripting.language;
import com.googlecode.android_scripting.rpc.ParameterDescriptor;
/**
* Represents the BeanShell programming language.
*
* @author igor.v.karp@gmail.com (Igor Karp)
*/
public class BeanShellLanguage extends Language {
@Override
protected String getImportStatement() {
// FIXME(igor.v.karp): this is interpreter specific
return "source(\"/sdcard/com.googlecode.bshforandroid/extras/bsh/android.bsh\");\n";
}
@Override
protected String getRpcReceiverDeclaration(String rpcReceiver) {
return rpcReceiver + " = Android();\n";
}
@Override
protected String getMethodCallText(String receiver, String method,
ParameterDescriptor[] parameters) {
StringBuilder result =
new StringBuilder().append(getApplyReceiverText(receiver)).append(getApplyOperatorText())
.append(method);
if (parameters.length > 0) {
result.append(getLeftParametersText());
} else {
result.append(getQuote());
}
String separator = "";
for (ParameterDescriptor parameter : parameters) {
result.append(separator).append(getValueText(parameter));
separator = getParameterSeparator();
}
result.append(getRightParametersText());
return result.toString();
}
@Override
protected String getApplyOperatorText() {
return ".call(\"";
}
@Override
protected String getLeftParametersText() {
return "\", ";
}
@Override
protected String getRightParametersText() {
return ")";
}
}
| kuri65536/sl4a | android/Common/src/com/googlecode/android_scripting/language/BeanShellLanguage.java | Java | apache-2.0 | 2,132 |
/*
* Copyright [2017]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netpet.spools.book.insidethejavavirtualmachine.chapter18;
/**
* @Desc javap -verbose / javap -c Hello.class ๆฅ็ๅญ่็ ๆไปถ
* Created by woncz on 2017/8/18.
*/
public class Hello {
}
| WindsorWang/Spools | spools-book/src/main/java/com/netpet/spools/book/insidethejavavirtualmachine/chapter18/Hello.java | Java | apache-2.0 | 789 |
<?php
/*******************************************************************************
* Copyright 2009-2014 Amazon Services. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
*
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at: http://aws.amazon.com/apache2.0
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*******************************************************************************
* PHP Version 5
* @category Amazon
* @package Marketplace Web Service Sellers
* @version 2011-07-01
* Library Version: 2014-10-20
* Generated: Fri Oct 17 18:34:06 GMT 2014
*/
class MarketplaceWebServiceSellers_Mock implements MarketplaceWebServiceSellers_Interface
{
// Public API ------------------------------------------------------------//
/**
* Get Service Status
* Returns the service status of a particular MWS API section. The operation
* takes no input.
* All API sections within the API are required to implement this operation.
*
* @param mixed $request array of parameters for MarketplaceWebServiceSellers_Model_GetServiceStatus request or MarketplaceWebServiceSellers_Model_GetServiceStatus object itself
* @see MarketplaceWebServiceSellers_Model_GetServiceStatus
* @return MarketplaceWebServiceSellers_Model_GetServiceStatusResponse
*
* @throws MarketplaceWebServiceSellers_Exception
*/
public function getServiceStatus($request)
{
return MarketplaceWebServiceSellers_Model_GetServiceStatusResponse::fromXML($this->_invoke('GetServiceStatus'));
}
/**
* List Marketplace Participations
* Returns a list of marketplaces that the seller submitting the request can sell in,
* and a list of participations that include seller-specific information in that marketplace.
*
* @param mixed $request array of parameters for MarketplaceWebServiceSellers_Model_ListMarketplaceParticipations request or MarketplaceWebServiceSellers_Model_ListMarketplaceParticipations object itself
* @see MarketplaceWebServiceSellers_Model_ListMarketplaceParticipations
* @return MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsResponse
*
* @throws MarketplaceWebServiceSellers_Exception
*/
public function listMarketplaceParticipations($request)
{
return MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsResponse::fromXML($this->_invoke('ListMarketplaceParticipations'));
}
/**
* List Marketplace Participations By Next Token
* Returns the next page of marketplaces and participations using the NextToken value
* that was returned by your previous request to either ListMarketplaceParticipations or
* ListMarketplaceParticipationsByNextToken.
*
* @param mixed $request array of parameters for MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsByNextToken request or MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsByNextToken object itself
* @see MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsByNextToken
* @return MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsByNextTokenResponse
*
* @throws MarketplaceWebServiceSellers_Exception
*/
public function listMarketplaceParticipationsByNextToken($request)
{
return MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsByNextTokenResponse::fromXML($this->_invoke('ListMarketplaceParticipationsByNextToken'));
}
// Private API ------------------------------------------------------------//
private function _invoke($actionName)
{
return $xml = file_get_contents(dirname(__FILE__) . '/Mock/' . $actionName . 'Response.xml',
/** search include path */
true);
}
}
| brickfox/mws | src/MarketplaceWebServiceSellers/Mock.php | PHP | apache-2.0 | 4,108 |
๏ปฟusing System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Trifolia.DB
{
public enum Conformance
{
SHALL = 1,
SHALL_NOT = 2,
SHOULD = 3,
SHOULD_NOT = 4,
MAY = 5,
MAY_NOT = 6,
UNKNOWN = 999
}
}
| lantanagroup/trifolia | Trifolia.DB/Conformance.cs | C# | apache-2.0 | 302 |
package com.coolweather.android.util;
import okhttp3.OkHttpClient;
import okhttp3.Request;
/**
* Created by fengj on 2017/1/27.
*/
public class HttpUtil {
public static void sendOkHttpRequest(String address,okhttp3.Callback callback){
OkHttpClient client=new OkHttpClient();
Request request=new Request.Builder().url(address).build();
client.newCall(request).enqueue(callback);
}
}
| cabbagemaoyi/coolweather | app/src/main/java/com/coolweather/android/util/HttpUtil.java | Java | apache-2.0 | 419 |
# Copyright 2018 Flight Lab authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Library for network related helpers."""
import socket
def get_ip():
"""Get primary IP (the one with a default route) of local machine.
This works on both Linux and Windows platforms, and doesn't require working
internet connection.
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
# doesn't even have to be reachable
s.connect(('10.255.255.255', 1))
return s.getsockname()[0]
except:
return '127.0.0.1'
finally:
s.close()
| google/flight-lab | controller/common/net.py | Python | apache-2.0 | 1,062 |
/*
* Copyright 2013
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.openntf.domino.iterators;
import java.util.Iterator;
import org.openntf.domino.Base;
import org.openntf.domino.Database;
import org.openntf.domino.DocumentCollection;
import org.openntf.domino.Session;
import org.openntf.domino.View;
import org.openntf.domino.ViewEntryCollection;
import org.openntf.domino.utils.DominoUtils;
import org.openntf.domino.utils.Factory;
// TODO: Auto-generated Javadoc
/**
* The Class AbstractDominoIterator.
*
* @param <T>
* the generic type
*/
public abstract class AbstractDominoIterator<T> implements Iterator<T> {
/** The server name_. */
private String serverName_;
/** The file path_. */
private String filePath_;
/** The collection_. */
private Base<?> collection_;
/** The session_. */
private transient Session session_;
/** The database_. */
private transient Database database_;
/**
* Instantiates a new abstract domino iterator.
*
* @param collection
* the collection
*/
protected AbstractDominoIterator(final Base<?> collection) {
setCollection(collection);
}
/**
* Gets the session.
*
* @return the session
*/
protected Session getSession() {
if (session_ == null) {
try {
session_ = Factory.getSession();
} catch (Throwable e) {
DominoUtils.handleException(e);
return null;
}
}
return session_;
}
/**
* Gets the database.
*
* @return the database
*/
protected Database getDatabase() {
if (database_ == null) {
Session session = getSession();
try {
database_ = session.getDatabase(getServerName(), getFilePath());
} catch (Throwable e) {
DominoUtils.handleException(e);
return null;
}
}
return database_;
}
/**
* Gets the file path.
*
* @return the file path
*/
protected String getFilePath() {
return filePath_;
}
/**
* Gets the server name.
*
* @return the server name
*/
protected String getServerName() {
return serverName_;
}
/**
* Sets the database.
*
* @param database
* the new database
*/
protected void setDatabase(final Database database) {
if (database != null) {
try {
setFilePath(database.getFilePath());
setServerName(database.getServer());
} catch (Throwable e) {
DominoUtils.handleException(e);
}
}
}
/**
* Sets the file path.
*
* @param filePath
* the new file path
*/
protected void setFilePath(final String filePath) {
filePath_ = filePath;
}
/**
* Sets the server name.
*
* @param serverName
* the new server name
*/
protected void setServerName(final String serverName) {
serverName_ = serverName;
}
/**
* Gets the collection.
*
* @return the collection
*/
public Base<?> getCollection() {
return collection_;
}
/**
* Sets the collection.
*
* @param collection
* the new collection
*/
public void setCollection(final Base<?> collection) {
if (collection != null) {
if (collection instanceof DocumentCollection) {
org.openntf.domino.Database parent = ((org.openntf.domino.DocumentCollection) collection).getParent();
session_ = Factory.fromLotus(parent.getParent(), Session.SCHEMA, null); // FIXME NTF - this is suboptimal,
database_ = Factory.fromLotus(parent, Database.SCHEMA, session_);
// but we still need to
// sort out the parent/child pattern
} else if (collection instanceof ViewEntryCollection) {
View vw = ((ViewEntryCollection) collection).getParent();
database_ = vw.getParent();
session_ = Factory.getSession(database_);
}
if (database_ != null) {
setDatabase(database_);
}
}
collection_ = collection;
}
}
| mariusj/org.openntf.domino | domino/core/archive/AbstractDominoIterator.java | Java | apache-2.0 | 4,458 |
'use strict';
var chai = require('chai');
var promised = require('chai-as-promised');
chai.use(promised);
global.expect = chai.expect;
exports.config = {
// The timeout for each script run on the browser. This should be longer
// than the maximum time your application needs to stabilize between tasks.
allScriptsTimeout: 15000,
// Capabilities to be passed to the webdriver instance.
capabilities: {
'browserName': 'chrome',
'loggingPrefs': {
'browser': 'ALL'
}
},
// ----- What tests to run -----
//
// Spec patterns are relative to the location of the spec file. They may
// include glob patterns.
specs: [
'admin/specs/admin-user-spec.js',
'admin/specs/users-spec.js',
'admin/specs/groups-spec.js',
'admin/specs/system-spec.js',
'admin/specs/authorizations-spec.js',
'cockpit/specs/dashboard-spec.js',
'cockpit/specs/process-definition-spec.js',
'cockpit/specs/decision-definition-spec.js',
'cockpit/specs/process-instance-spec.js',
'cockpit/specs/process-definition-filter-spec.js',
'cockpit/specs/variable-spec.js',
'cockpit/specs/suspension-spec.js',
'tasklist/specs/filter-basic-spec.js',
'tasklist/specs/filter-permissions-spec.js',
'tasklist/specs/filter-criteria-spec.js',
'tasklist/specs/filter-vg-spec.js',
'tasklist/specs/process-stariables-spec.js',
'tasklist/specs/task-claiminart-spec.js',
'tasklist/specs/tasklist-sorting-spec.js',
'tasklist/specs/tasklist-search-spec.js',
'tasklist/specs/task-detail-view-spec.js',
'tasklist/specs/task-dates-spec.js'
],
// A base URL for your application under test. Calls to protractor.get()
// with relative paths will be prepended with this.
baseUrl: 'http://localhost:8080',
// ----- The test framework -----
//
// Jasmine is fully supported as a test and assertion framework.
// Mocha has limited beta support. You will need to include your own
// assertion framework if working with mocha.
framework: 'mocha',
// ----- Options to be passed to minijasminenode -----
//
// Options to be passed to Mocha-node.
// See the full list at https://github.com/juliemr/minijasminenode
mochaOpts: {
timeout: 15000,
colors: false,
reporter: 'xunit-file',
slow: 3000
}
};
| jangalinski/camunda-bpm-webapp | webapp/src/test/js/e2e/ci.conf.js | JavaScript | apache-2.0 | 2,310 |
/*
* Copyright (c) 2015 IRCCloud, Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.irccloud.android.fragment;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.os.Bundle;
import android.text.SpannableStringBuilder;
import android.text.Spanned;
import android.text.method.ScrollingMovementMethod;
import android.text.style.TabStopSpan;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.TextView;
import androidx.appcompat.app.AlertDialog;
import androidx.fragment.app.DialogFragment;
import com.irccloud.android.R;
import com.irccloud.android.activity.MainActivity;
public class TextListFragment extends DialogFragment {
private TextView textView;
private String title = null;
private String text = null;
public boolean dismissed = false;
public String type;
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
Context ctx = getActivity();
if(ctx == null)
return null;
LayoutInflater inflater = (LayoutInflater) ctx.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
View v = inflater.inflate(R.layout.dialog_textlist, null);
textView = v.findViewById(R.id.textView);
textView.setHorizontallyScrolling(true);
textView.setMovementMethod(new ScrollingMovementMethod());
if (savedInstanceState != null && savedInstanceState.containsKey("text")) {
text = savedInstanceState.getString("text");
}
if(text != null) {
setText(text);
}
Dialog d = new AlertDialog.Builder(ctx)
.setView(v)
.setTitle(title)
.setNegativeButton("Close", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
})
.create();
return d;
}
@Override
public void onDismiss(DialogInterface dialog) {
super.onDismiss(dialog);
dismissed = true;
if(getActivity() != null && ((MainActivity)getActivity()).help_fragment == this)
((MainActivity)getActivity()).help_fragment = null;
}
@Override
public void onCancel(DialogInterface dialog) {
super.onCancel(dialog);
dismissed = true;
if(getActivity() != null && ((MainActivity)getActivity()).help_fragment == this)
((MainActivity)getActivity()).help_fragment = null;
}
@Override
public void onSaveInstanceState(Bundle state) {
state.putString("text", text);
}
public void refresh() {
Bundle args = getArguments();
if(args.containsKey("title")) {
title = args.getString("title");
if(getDialog() != null)
getDialog().setTitle(title);
}
if(args.containsKey("text")) {
text = args.getString("text");
if(textView != null)
setText(text);
}
}
private void setText(String text) {
SpannableStringBuilder sb = new SpannableStringBuilder(text);
for (int i = 0; i < 100; i++)
sb.setSpan(new TabStopSpan.Standard(i * 300), 0, sb.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
textView.setText(sb, TextView.BufferType.SPANNABLE);
}
@Override
public void setArguments(Bundle args) {
super.setArguments(args);
refresh();
}
@Override
public void onPause() {
super.onPause();
}
}
| irccloud/android | src/com/irccloud/android/fragment/TextListFragment.java | Java | apache-2.0 | 4,169 |
// Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package log
import (
"fmt"
"strings"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/adfin/statster/metrics/core"
)
func TestSimpleWrite(t *testing.T) {
now := time.Now()
batch := core.DataBatch{
Timestamp: now,
MetricSets: make(map[string]*core.MetricSet),
}
batch.MetricSets["pod1"] = &core.MetricSet{
Labels: map[string]string{"bzium": "hocuspocus"},
MetricValues: map[string]core.MetricValue{
"m1": {
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 31415,
},
},
LabeledMetrics: []core.LabeledMetric{
{
Name: "lm",
MetricValue: core.MetricValue{
MetricType: core.MetricGauge,
ValueType: core.ValueInt64,
IntValue: 279,
},
Labels: map[string]string{
"disk": "hard",
},
},
},
}
log := batchToString(&batch)
assert.True(t, strings.Contains(log, "31415"))
assert.True(t, strings.Contains(log, "m1"))
assert.True(t, strings.Contains(log, "bzium"))
assert.True(t, strings.Contains(log, "hocuspocus"))
assert.True(t, strings.Contains(log, "pod1"))
assert.True(t, strings.Contains(log, "279"))
assert.True(t, strings.Contains(log, "disk"))
assert.True(t, strings.Contains(log, "hard"))
assert.True(t, strings.Contains(log, fmt.Sprintf("%s", now)))
}
func TestSortedOutput(t *testing.T) {
const (
label1 = "abcLabel"
label2 = "xyzLabel"
pod1 = "pod1"
pod2 = "pod2"
metric1 = "metricA"
metric2 = "metricB"
)
metricVal := core.MetricValue{
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 31415,
}
metricSet := func(pod string) *core.MetricSet {
return &core.MetricSet{
Labels: map[string]string{label2 + pod: pod, label1 + pod: pod},
MetricValues: map[string]core.MetricValue{
metric2 + pod: metricVal,
metric1 + pod: metricVal,
},
LabeledMetrics: []core.LabeledMetric{},
}
}
now := time.Now()
batch := core.DataBatch{
Timestamp: now,
MetricSets: map[string]*core.MetricSet{
pod2: metricSet(pod2),
pod1: metricSet(pod1),
},
}
log := batchToString(&batch)
sorted := []string{
pod1,
label1 + pod1,
label2 + pod1,
metric1 + pod1,
metric2 + pod1,
pod2,
label1 + pod2,
label2 + pod2,
metric1 + pod2,
metric2 + pod2,
}
var (
previous string
previousIndex int
)
for _, metric := range sorted {
metricIndex := strings.Index(log, metric)
assert.NotEqual(t, -1, metricIndex, "%q not found", metric)
if previous != "" {
assert.True(t, previousIndex < metricIndex, "%q should be before %q", previous, metric)
}
previous = metric
previousIndex = metricIndex
}
}
| adfin/statster | metrics/sinks/log/log_sink_test.go | GO | apache-2.0 | 3,246 |
import functools
import warnings
from collections import Mapping, Sequence
from numbers import Number
import numpy as np
import pandas as pd
from . import ops
from . import utils
from . import common
from . import groupby
from . import indexing
from . import alignment
from . import formatting
from .. import conventions
from .alignment import align, partial_align
from .coordinates import DatasetCoordinates, Indexes
from .common import ImplementsDatasetReduce, BaseDataObject
from .utils import (Frozen, SortedKeysDict, ChainMap, maybe_wrap_array)
from .variable import as_variable, Variable, Coordinate, broadcast_variables
from .pycompat import (iteritems, itervalues, basestring, OrderedDict,
dask_array_type)
from .combine import concat
# list of attributes of pd.DatetimeIndex that are ndarrays of time info
_DATETIMEINDEX_COMPONENTS = ['year', 'month', 'day', 'hour', 'minute',
'second', 'microsecond', 'nanosecond', 'date',
'time', 'dayofyear', 'weekofyear', 'dayofweek',
'quarter']
def _get_virtual_variable(variables, key):
"""Get a virtual variable (e.g., 'time.year') from a dict of xray.Variable
objects (if possible)
"""
if not isinstance(key, basestring):
raise KeyError(key)
split_key = key.split('.', 1)
if len(split_key) != 2:
raise KeyError(key)
ref_name, var_name = split_key
ref_var = variables[ref_name]
if ref_var.ndim == 1:
date = ref_var.to_index()
elif ref_var.ndim == 0:
date = pd.Timestamp(ref_var.values)
else:
raise KeyError(key)
if var_name == 'season':
# TODO: move 'season' into pandas itself
seasons = np.array(['DJF', 'MAM', 'JJA', 'SON'])
month = date.month
data = seasons[(month // 3) % 4]
else:
data = getattr(date, var_name)
return ref_name, var_name, Variable(ref_var.dims, data)
def _as_dataset_variable(name, var):
"""Prepare a variable for adding it to a Dataset
"""
try:
var = as_variable(var, key=name)
except TypeError:
raise TypeError('Dataset variables must be an array or a tuple of '
'the form (dims, data[, attrs, encoding])')
if name in var.dims:
# convert the into an Index
if var.ndim != 1:
raise ValueError('an index variable must be defined with '
'1-dimensional data')
var = var.to_coord()
return var
def _align_variables(variables, join='outer'):
"""Align all DataArrays in the provided dict, leaving other values alone.
"""
alignable = [k for k, v in variables.items() if hasattr(v, 'indexes')]
aligned = align(*[variables[a] for a in alignable],
join=join, copy=False)
new_variables = OrderedDict(variables)
new_variables.update(zip(alignable, aligned))
return new_variables
def _expand_variables(raw_variables, old_variables={}, compat='identical'):
"""Expand a dictionary of variables.
Returns a dictionary of Variable objects suitable for inserting into a
Dataset._variables dictionary.
This includes converting tuples (dims, data) into Variable objects,
converting coordinate variables into Coordinate objects and expanding
DataArray objects into Variables plus coordinates.
Raises ValueError if any conflicting values are found, between any of the
new or old variables.
"""
new_variables = OrderedDict()
new_coord_names = set()
variables = ChainMap(new_variables, old_variables)
def maybe_promote_or_replace(name, var):
existing_var = variables[name]
if name not in existing_var.dims:
if name in var.dims:
variables[name] = var
else:
common_dims = OrderedDict(zip(existing_var.dims,
existing_var.shape))
common_dims.update(zip(var.dims, var.shape))
variables[name] = existing_var.expand_dims(common_dims)
new_coord_names.update(var.dims)
def add_variable(name, var):
var = _as_dataset_variable(name, var)
if name not in variables:
variables[name] = var
new_coord_names.update(variables[name].dims)
else:
if not getattr(variables[name], compat)(var):
raise ValueError('conflicting value for variable %s:\n'
'first value: %r\nsecond value: %r'
% (name, variables[name], var))
if compat == 'broadcast_equals':
maybe_promote_or_replace(name, var)
for name, var in iteritems(raw_variables):
if hasattr(var, 'coords'):
# it's a DataArray
new_coord_names.update(var.coords)
for dim, coord in iteritems(var.coords):
if dim != name:
add_variable(dim, coord.variable)
var = var.variable
add_variable(name, var)
return new_variables, new_coord_names
def _calculate_dims(variables):
"""Calculate the dimensions corresponding to a set of variables.
Returns dictionary mapping from dimension names to sizes. Raises ValueError
if any of the dimension sizes conflict.
"""
dims = {}
last_used = {}
scalar_vars = set(k for k, v in iteritems(variables) if not v.dims)
for k, var in iteritems(variables):
for dim, size in zip(var.dims, var.shape):
if dim in scalar_vars:
raise ValueError('dimension %s already exists as a scalar '
'variable' % dim)
if dim not in dims:
dims[dim] = size
last_used[dim] = k
elif dims[dim] != size:
raise ValueError('conflicting sizes for dimension %r: '
'length %s on %r and length %s on %r' %
(dim, size, k, dims[dim], last_used[dim]))
return dims
def _merge_expand(aligned_self, other, overwrite_vars, compat):
possible_conflicts = dict((k, v) for k, v in aligned_self._variables.items()
if k not in overwrite_vars)
new_vars, new_coord_names = _expand_variables(other, possible_conflicts, compat)
replace_vars = aligned_self._variables.copy()
replace_vars.update(new_vars)
return replace_vars, new_vars, new_coord_names
def _merge_dataset(self, other, overwrite_vars, compat, join):
aligned_self, other = partial_align(self, other, join=join, copy=False)
replace_vars, new_vars, new_coord_names = _merge_expand(
aligned_self, other._variables, overwrite_vars, compat)
new_coord_names.update(other._coord_names)
return replace_vars, new_vars, new_coord_names
def _merge_dict(self, other, overwrite_vars, compat, join):
other = _align_variables(other, join='outer')
alignable = [k for k, v in other.items() if hasattr(v, 'indexes')]
aligned = partial_align(self, *[other[a] for a in alignable],
join=join, copy=False, exclude=overwrite_vars)
aligned_self = aligned[0]
other = OrderedDict(other)
other.update(zip(alignable, aligned[1:]))
return _merge_expand(aligned_self, other, overwrite_vars, compat)
def _assert_empty(args, msg='%s'):
if args:
raise ValueError(msg % args)
def as_dataset(obj):
"""Cast the given object to a Dataset.
Handles DataArrays, Datasets and dictionaries of variables. A new Dataset
object is only created in the last case.
"""
obj = getattr(obj, '_dataset', obj)
if not isinstance(obj, Dataset):
obj = Dataset(obj)
return obj
class Variables(Mapping):
def __init__(self, dataset):
self._dataset = dataset
def __iter__(self):
return (key for key in self._dataset._variables
if key not in self._dataset._coord_names)
def __len__(self):
return len(self._dataset._variables) - len(self._dataset._coord_names)
def __contains__(self, key):
return (key in self._dataset._variables
and key not in self._dataset._coord_names)
def __getitem__(self, key):
if key not in self._dataset._coord_names:
return self._dataset[key]
else:
raise KeyError(key)
def __repr__(self):
return formatting.vars_repr(self)
class _LocIndexer(object):
def __init__(self, dataset):
self.dataset = dataset
def __getitem__(self, key):
if not utils.is_dict_like(key):
raise TypeError('can only lookup dictionaries from Dataset.loc')
return self.dataset.sel(**key)
class Dataset(Mapping, ImplementsDatasetReduce, BaseDataObject):
"""A multi-dimensional, in memory, array database.
A dataset resembles an in-memory representation of a NetCDF file, and
consists of variables, coordinates and attributes which together form a
self describing dataset.
Dataset implements the mapping interface with keys given by variable names
and values given by DataArray objects for each variable name.
One dimensional variables with name equal to their dimension are index
coordinates used for label based indexing.
"""
# class properties defined for the benefit of __setstate__, which otherwise
# runs into trouble because we overrode __getattr__
_attrs = None
_variables = Frozen({})
groupby_cls = groupby.DatasetGroupBy
def __init__(self, variables=None, coords=None, attrs=None,
compat='broadcast_equals'):
"""To load data from a file or file-like object, use the `open_dataset`
function.
Parameters
----------
variables : dict-like, optional
A mapping from variable names to :py:class:`~xray.DataArray`
objects, :py:class:`~xray.Variable` objects or tuples of the
form ``(dims, data[, attrs])`` which can be used as arguments to
create a new ``Variable``. Each dimension must have the same length
in all variables in which it appears.
coords : dict-like, optional
Another mapping in the same form as the `variables` argument,
except the each item is saved on the dataset as a "coordinate".
These variables have an associated meaning: they describe
constant/fixed/independent quantities, unlike the
varying/measured/dependent quantities that belong in `variables`.
Coordinates values may be given by 1-dimensional arrays or scalars,
in which case `dims` do not need to be supplied: 1D arrays will be
assumed to give index values along the dimension with the same
name.
attrs : dict-like, optional
Global attributes to save on this dataset.
compat : {'broadcast_equals', 'equals', 'identical'}, optional
String indicating how to compare variables of the same name for
potential conflicts:
- 'broadcast_equals': all values must be equal when variables are
broadcast against each other to ensure common dimensions.
- 'equals': all values and dimensions must be the same.
- 'identical': all values, dimensions and attributes must be the
same.
"""
self._variables = OrderedDict()
self._coord_names = set()
self._dims = {}
self._attrs = None
self._file_obj = None
if variables is None:
variables = {}
if coords is None:
coords = set()
if variables or coords:
self._set_init_vars_and_dims(variables, coords, compat)
if attrs is not None:
self.attrs = attrs
def _add_missing_coords_inplace(self):
"""Add missing coordinates to self._variables
"""
for dim, size in iteritems(self.dims):
if dim not in self._variables:
# This is equivalent to np.arange(size), but
# waits to create the array until its actually accessed.
data = indexing.LazyIntegerRange(size)
coord = Coordinate(dim, data)
self._variables[dim] = coord
def _update_vars_and_coords(self, new_variables, new_coord_names={},
needs_copy=True, check_coord_names=True):
"""Add a dictionary of new variables to this dataset.
Raises a ValueError if any dimensions have conflicting lengths in the
new dataset. Otherwise will update this dataset's _variables and
_dims attributes in-place.
Set `needs_copy=False` only if this dataset is brand-new and hence
can be thrown away if this method fails.
"""
# default to creating another copy of variables so can unroll if we end
# up with inconsistent dimensions
variables = self._variables.copy() if needs_copy else self._variables
if check_coord_names:
_assert_empty([k for k in self.data_vars if k in new_coord_names],
'coordinates with these names already exist as '
'variables: %s')
variables.update(new_variables)
dims = _calculate_dims(variables)
# all checks are complete: it's safe to update
self._variables = variables
self._dims = dims
self._add_missing_coords_inplace()
self._coord_names.update(new_coord_names)
def _set_init_vars_and_dims(self, vars, coords, compat):
"""Set the initial value of Dataset variables and dimensions
"""
_assert_empty([k for k in vars if k in coords],
'redundant variables and coordinates: %s')
variables = ChainMap(vars, coords)
aligned = _align_variables(variables)
new_variables, new_coord_names = _expand_variables(aligned,
compat=compat)
new_coord_names.update(coords)
self._update_vars_and_coords(new_variables, new_coord_names,
needs_copy=False, check_coord_names=False)
@classmethod
def load_store(cls, store, decoder=None):
"""Create a new dataset from the contents of a backends.*DataStore
object
"""
variables, attributes = store.load()
if decoder:
variables, attributes = decoder(variables, attributes)
obj = cls(variables, attrs=attributes)
obj._file_obj = store
return obj
def close(self):
"""Close any files linked to this dataset
"""
if self._file_obj is not None:
self._file_obj.close()
self._file_obj = None
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def __getstate__(self):
"""Always load data in-memory before pickling"""
self.load()
# self.__dict__ is the default pickle object, we don't need to
# implement our own __setstate__ method to make pickle work
state = self.__dict__.copy()
# throw away any references to datastores in the pickle
state['_file_obj'] = None
return state
@property
def variables(self):
"""Frozen dictionary of xray.Variable objects constituting this
dataset's data
"""
return Frozen(self._variables)
def _attrs_copy(self):
return None if self._attrs is None else OrderedDict(self._attrs)
@property
def attrs(self):
"""Dictionary of global attributes on this dataset
"""
if self._attrs is None:
self._attrs = OrderedDict()
return self._attrs
@attrs.setter
def attrs(self, value):
self._attrs = OrderedDict(value)
@property
def dims(self):
"""Mapping from dimension names to lengths.
This dictionary cannot be modified directly, but is updated when adding
new variables.
"""
return Frozen(SortedKeysDict(self._dims))
def load(self):
"""Manually trigger loading of this dataset's data from disk or a
remote source into memory and return this dataset.
Normally, it should not be necessary to call this method in user code,
because all xray functions should either work on deferred data or
load data automatically. However, this method can be necessary when
working with many file objects on disk.
"""
# access .data to coerce everything to numpy or dask arrays
all_data = dict((k, v.data) for k, v in self.variables.items())
lazy_data = dict((k, v) for k, v in all_data.items()
if isinstance(v, dask_array_type))
if lazy_data:
import dask.array as da
# evaluate all the dask arrays simultaneously
evaluated_data = da.compute(*lazy_data.values())
evaluated_variables = {}
for k, data in zip(lazy_data, evaluated_data):
self.variables[k].data = data
return self
def load_data(self): # pragma: no cover
warnings.warn('the Dataset method `load_data` has been deprecated; '
'use `load` instead',
FutureWarning, stacklevel=2)
return self.load()
@classmethod
def _construct_direct(cls, variables, coord_names, dims, attrs,
file_obj=None):
"""Shortcut around __init__ for internal use when we want to skip
costly validation
"""
obj = object.__new__(cls)
obj._variables = variables
obj._coord_names = coord_names
obj._dims = dims
obj._attrs = attrs
obj._file_obj = file_obj
return obj
__default_attrs = object()
def _replace_vars_and_dims(self, variables, coord_names=None,
attrs=__default_attrs, inplace=False):
"""Fastpath constructor for internal use.
Preserves coord names and attributes; dimensions are recalculated from
the supplied variables.
The arguments are *not* copied when placed on the new dataset. It is up
to the caller to ensure that they have the right type and are not used
elsewhere.
Parameters
----------
variables : OrderedDict
coord_names : set or None, optional
attrs : OrderedDict or None, optional
Returns
-------
new : Dataset
"""
dims = _calculate_dims(variables)
if inplace:
self._dims = dims
self._variables = variables
if coord_names is not None:
self._coord_names = coord_names
if attrs is not self.__default_attrs:
self._attrs = attrs
obj = self
else:
if coord_names is None:
coord_names = self._coord_names.copy()
if attrs is self.__default_attrs:
attrs = self._attrs_copy()
obj = self._construct_direct(variables, coord_names, dims, attrs)
return obj
def copy(self, deep=False):
"""Returns a copy of this dataset.
If `deep=True`, a deep copy is made of each of the component variables.
Otherwise, a shallow copy is made, so each variable in the new dataset
is also a variable in the original dataset.
"""
if deep:
variables = OrderedDict((k, v.copy(deep=True))
for k, v in iteritems(self._variables))
else:
variables = self._variables.copy()
# skip __init__ to avoid costly validation
return self._construct_direct(variables, self._coord_names.copy(),
self._dims.copy(), self._attrs_copy())
def _copy_listed(self, names, keep_attrs=True):
"""Create a new Dataset with the listed variables from this dataset and
the all relevant coordinates. Skips all validation.
"""
variables = OrderedDict()
coord_names = set()
for name in names:
try:
variables[name] = self._variables[name]
except KeyError:
ref_name, var_name, var = _get_virtual_variable(
self._variables, name)
variables[var_name] = var
if ref_name in self._coord_names:
coord_names.add(var_name)
needed_dims = set()
for v in variables.values():
needed_dims.update(v._dims)
for k in self._coord_names:
if set(self._variables[k]._dims) <= needed_dims:
variables[k] = self._variables[k]
coord_names.add(k)
dims = dict((k, self._dims[k]) for k in needed_dims)
attrs = self.attrs.copy() if keep_attrs else None
return self._construct_direct(variables, coord_names, dims, attrs)
def __copy__(self):
return self.copy(deep=False)
def __deepcopy__(self, memo=None):
# memo does nothing but is required for compatibility with
# copy.deepcopy
return self.copy(deep=True)
def __contains__(self, key):
"""The 'in' operator will return true or false depending on whether
'key' is an array in the dataset or not.
"""
return key in self._variables
def __len__(self):
return len(self._variables)
def __iter__(self):
return iter(self._variables)
@property
def nbytes(self):
return sum(v.nbytes for v in self.variables.values())
@property
def loc(self):
"""Attribute for location based indexing. Only supports __getitem__,
and only when the key is a dict of the form {dim: labels}.
"""
return _LocIndexer(self)
def __getitem__(self, key):
"""Access variables or coordinates this dataset as a
:py:class:`~xray.DataArray`.
Indexing with a list of names will return a new ``Dataset`` object.
"""
from .dataarray import DataArray
if utils.is_dict_like(key):
return self.isel(**key)
key = np.asarray(key)
if key.ndim == 0:
return DataArray._new_from_dataset(self, key.item())
else:
return self._copy_listed(key)
def __setitem__(self, key, value):
"""Add an array to this dataset.
If value is a `DataArray`, call its `select_vars()` method, rename it
to `key` and merge the contents of the resulting dataset into this
dataset.
If value is an `Variable` object (or tuple of form
``(dims, data[, attrs])``), add it to this dataset as a new
variable.
"""
if utils.is_dict_like(key):
raise NotImplementedError('cannot yet use a dictionary as a key '
'to set Dataset values')
self.update({key: value})
def __delitem__(self, key):
"""Remove a variable from this dataset.
If this variable is a dimension, all variables containing this
dimension are also removed.
"""
def remove(k):
del self._variables[k]
self._coord_names.discard(k)
remove(key)
if key in self._dims:
del self._dims[key]
also_delete = [k for k, v in iteritems(self._variables)
if key in v.dims]
for key in also_delete:
remove(key)
# mutable objects should not be hashable
__hash__ = None
def _all_compat(self, other, compat_str):
"""Helper function for equals and identical"""
# some stores (e.g., scipy) do not seem to preserve order, so don't
# require matching order for equality
compat = lambda x, y: getattr(x, compat_str)(y)
return (self._coord_names == other._coord_names
and utils.dict_equiv(self._variables, other._variables,
compat=compat))
def broadcast_equals(self, other):
"""Two Datasets are broadcast equal if they are equal after
broadcasting all variables against each other.
For example, variables that are scalar in one dataset but non-scalar in
the other dataset can still be broadcast equal if the the non-scalar
variable is a constant.
See Also
--------
Dataset.equals
Dataset.identical
"""
try:
return self._all_compat(other, 'broadcast_equals')
except (TypeError, AttributeError):
return False
def equals(self, other):
"""Two Datasets are equal if they have matching variables and
coordinates, all of which are equal.
Datasets can still be equal (like pandas objects) if they have NaN
values in the same locations.
This method is necessary because `v1 == v2` for ``Dataset``
does element-wise comparisions (like numpy.ndarrays).
See Also
--------
Dataset.broadcast_equals
Dataset.identical
"""
try:
return self._all_compat(other, 'equals')
except (TypeError, AttributeError):
return False
def identical(self, other):
"""Like equals, but also checks all dataset attributes and the
attributes on all variables and coordinates.
See Also
--------
Dataset.broadcast_equals
Dataset.equals
"""
try:
return (utils.dict_equiv(self.attrs, other.attrs)
and self._all_compat(other, 'identical'))
except (TypeError, AttributeError):
return False
@property
def indexes(self):
"""OrderedDict of pandas.Index objects used for label based indexing
"""
return Indexes(self)
@property
def coords(self):
"""Dictionary of xray.DataArray objects corresponding to coordinate
variables
"""
return DatasetCoordinates(self)
@property
def data_vars(self):
"""Dictionary of xray.DataArray objects corresponding to data variables
"""
return Variables(self)
@property
def vars(self): # pragma: no cover
warnings.warn('the Dataset property `vars` has been deprecated; '
'use `data_vars` instead',
FutureWarning, stacklevel=2)
return self.data_vars
def set_coords(self, names, inplace=False):
"""Given names of one or more variables, set them as coordinates
Parameters
----------
names : str or list of str
Name(s) of variables in this dataset to convert into coordinates.
inplace : bool, optional
If True, modify this dataset inplace. Otherwise, create a new
object.
Returns
-------
Dataset
"""
# TODO: allow inserting new coordinates with this method, like
# DataFrame.set_index?
# nb. check in self._variables, not self.data_vars to insure that the
# operation is idempotent
if isinstance(names, basestring):
names = [names]
self._assert_all_in_dataset(names)
obj = self if inplace else self.copy()
obj._coord_names.update(names)
return obj
def reset_coords(self, names=None, drop=False, inplace=False):
"""Given names of coordinates, reset them to become variables
Parameters
----------
names : str or list of str, optional
Name(s) of non-index coordinates in this dataset to reset into
variables. By default, all non-index coordinates are reset.
drop : bool, optional
If True, remove coordinates instead of converting them into
variables.
inplace : bool, optional
If True, modify this dataset inplace. Otherwise, create a new
object.
Returns
-------
Dataset
"""
if names is None:
names = self._coord_names - set(self.dims)
else:
if isinstance(names, basestring):
names = [names]
self._assert_all_in_dataset(names)
_assert_empty(
set(names) & set(self.dims),
'cannot remove index coordinates with reset_coords: %s')
obj = self if inplace else self.copy()
obj._coord_names.difference_update(names)
if drop:
for name in names:
del obj._variables[name]
return obj
def dump_to_store(self, store, encoder=None, sync=True):
"""Store dataset contents to a backends.*DataStore object."""
variables, attrs = conventions.encode_dataset_coordinates(self)
if encoder:
variables, attrs = encoder(variables, attrs)
store.store(variables, attrs)
if sync:
store.sync()
def to_netcdf(self, path=None, mode='w', format=None, group=None,
engine=None):
"""Write dataset contents to a netCDF file.
Parameters
----------
path : str, optional
Path to which to save this dataset. If no path is provided, this
function returns the resulting netCDF file as a bytes object; in
this case, we need to use scipy.io.netcdf, which does not support
netCDF version 4 (the default format becomes NETCDF3_64BIT).
mode : {'w', 'a'}, optional
Write ('w') or append ('a') mode. If mode='w', any existing file at
this location will be overwritten.
format : {'NETCDF4', 'NETCDF4_CLASSIC', 'NETCDF3_64BIT', 'NETCDF3_CLASSIC'}, optional
File format for the resulting netCDF file:
* NETCDF4: Data is stored in an HDF5 file, using netCDF4 API
features.
* NETCDF4_CLASSIC: Data is stored in an HDF5 file, using only
netCDF 3 compatibile API features.
* NETCDF3_64BIT: 64-bit offset version of the netCDF 3 file format,
which fully supports 2+ GB files, but is only compatible with
clients linked against netCDF version 3.6.0 or later.
* NETCDF3_CLASSIC: The classic netCDF 3 file format. It does not
handle 2+ GB files very well.
All formats are supported by the netCDF4-python library.
scipy.io.netcdf only supports the last two formats.
The default format is NETCDF4 if you are saving a file to disk and
have the netCDF4-python library available. Otherwise, xray falls
back to using scipy to write netCDF files and defaults to the
NETCDF3_64BIT format (scipy does not support netCDF4).
group : str, optional
Path to the netCDF4 group in the given file to open (only works for
format='NETCDF4'). The group(s) will be created if necessary.
engine : {'netcdf4', 'scipy', 'h5netcdf'}, optional
Engine to use when writing netCDF files. If not provided, the
default engine is chosen based on available dependencies, with a
preference for 'netcdf4' if writing to a file on disk.
"""
from ..backends.api import to_netcdf
return to_netcdf(self, path, mode, format, group, engine)
dump = utils.function_alias(to_netcdf, 'dumps')
dumps = utils.function_alias(to_netcdf, 'dumps')
def __repr__(self):
return formatting.dataset_repr(self)
@property
def chunks(self):
"""Block dimensions for this dataset's data or None if it's not a dask
array.
"""
chunks = {}
for v in self.variables.values():
if v.chunks is not None:
new_chunks = list(zip(v.dims, v.chunks))
if any(chunk != chunks[d] for d, chunk in new_chunks
if d in chunks):
raise ValueError('inconsistent chunks')
chunks.update(new_chunks)
return Frozen(SortedKeysDict(chunks))
def chunk(self, chunks=None, lock=False):
"""Coerce all arrays in this dataset into dask arrays with the given
chunks.
Non-dask arrays in this dataset will be converted to dask arrays. Dask
arrays will be rechunked to the given chunk sizes.
If neither chunks is not provided for one or more dimensions, chunk
sizes along that dimension will not be updated; non-dask arrays will be
converted into dask arrays with a single block.
Parameters
----------
chunks : int or dict, optional
Chunk sizes along each dimension, e.g., ``5`` or
``{'x': 5, 'y': 5}``.
lock : optional
Passed on to :py:func:`dask.array.from_array`, if the array is not
already as dask array.
Returns
-------
chunked : xray.Dataset
"""
if isinstance(chunks, Number):
chunks = dict.fromkeys(self.dims, chunks)
if chunks is not None:
bad_dims = [d for d in chunks if d not in self.dims]
if bad_dims:
raise ValueError('some chunks keys are not dimensions on this '
'object: %s' % bad_dims)
def selkeys(dict_, keys):
if dict_ is None:
return None
return dict((d, dict_[d]) for d in keys if d in dict_)
def maybe_chunk(name, var, chunks):
chunks = selkeys(chunks, var.dims)
if not chunks:
chunks = None
if var.ndim > 0:
return var.chunk(chunks, name=name, lock=lock)
else:
return var
variables = OrderedDict([(k, maybe_chunk(k, v, chunks))
for k, v in self.variables.items()])
return self._replace_vars_and_dims(variables)
def isel(self, **indexers):
"""Returns a new dataset with each array indexed along the specified
dimension(s).
This method selects values from each array using its `__getitem__`
method, except this method does not require knowing the order of
each array's dimensions.
Parameters
----------
**indexers : {dim: indexer, ...}
Keyword arguments with names matching dimensions and values given
by integers, slice objects or arrays.
Returns
-------
obj : Dataset
A new Dataset with the same contents as this dataset, except each
array and dimension is indexed by the appropriate indexers. In
general, each array's data will be a view of the array's data
in this dataset, unless numpy fancy indexing was triggered by using
an array indexer, in which case the data will be a copy.
See Also
--------
Dataset.sel
DataArray.isel
DataArray.sel
"""
invalid = [k for k in indexers if not k in self.dims]
if invalid:
raise ValueError("dimensions %r do not exist" % invalid)
# all indexers should be int, slice or np.ndarrays
indexers = [(k, (np.asarray(v)
if not isinstance(v, (int, np.integer, slice))
else v))
for k, v in iteritems(indexers)]
variables = OrderedDict()
for name, var in iteritems(self._variables):
var_indexers = dict((k, v) for k, v in indexers if k in var.dims)
variables[name] = var.isel(**var_indexers)
return self._replace_vars_and_dims(variables)
def sel(self, method=None, **indexers):
"""Returns a new dataset with each array indexed by tick labels
along the specified dimension(s).
In contrast to `Dataset.isel`, indexers for this method should use
labels instead of integers.
Under the hood, this method is powered by using Panda's powerful Index
objects. This makes label based indexing essentially just as fast as
using integer indexing.
It also means this method uses pandas's (well documented) logic for
indexing. This means you can use string shortcuts for datetime indexes
(e.g., '2000-01' to select all values in January 2000). It also means
that slices are treated as inclusive of both the start and stop values,
unlike normal Python indexing.
Parameters
----------
method : {None, 'nearest', 'pad'/'ffill', 'backfill'/'bfill'}, optional
Method to use for inexact matches (requires pandas>=0.16):
* default: only exact matches
* pad / ffill: propgate last valid index value forward
* backfill / bfill: propagate next valid index value backward
* nearest: use nearest valid index value
**indexers : {dim: indexer, ...}
Keyword arguments with names matching dimensions and values given
by scalars, slices or arrays of tick labels.
Returns
-------
obj : Dataset
A new Dataset with the same contents as this dataset, except each
variable and dimension is indexed by the appropriate indexers. In
general, each variable's data will be a view of the variable's data
in this dataset, unless numpy fancy indexing was triggered by using
an array indexer, in which case the data will be a copy.
See Also
--------
Dataset.isel
DataArray.isel
DataArray.sel
"""
return self.isel(**indexing.remap_label_indexers(self, indexers,
method=method))
def isel_points(self, dim='points', **indexers):
"""Returns a new dataset with each array indexed pointwise along the
specified dimension(s).
This method selects pointwise values from each array and is akin to
the NumPy indexing behavior of `arr[[0, 1], [0, 1]]`, except this
method does not require knowing the order of each array's dimensions.
Parameters
----------
dim : str or DataArray or pandas.Index or other list-like object, optional
Name of the dimension to concatenate along. If dim is provided as a
string, it must be a new dimension name, in which case it is added
along axis=0. If dim is provided as a DataArray or Index or
list-like object, its name, which must not be present in the
dataset, is used as the dimension to concatenate along and the
values are added as a coordinate.
**indexers : {dim: indexer, ...}
Keyword arguments with names matching dimensions and values given
by array-like objects. All indexers must be the same length and
1 dimensional.
Returns
-------
obj : Dataset
A new Dataset with the same contents as this dataset, except each
array and dimension is indexed by the appropriate indexers. With
pointwise indexing, the new Dataset will always be a copy of the
original.
See Also
--------
Dataset.sel
DataArray.isel
DataArray.sel
DataArray.isel_points
"""
indexer_dims = set(indexers)
def relevant_keys(mapping):
return [k for k, v in mapping.items()
if any(d in indexer_dims for d in v.dims)]
data_vars = relevant_keys(self.data_vars)
coords = relevant_keys(self.coords)
# all the indexers should be iterables
keys = indexers.keys()
indexers = [(k, np.asarray(v)) for k, v in iteritems(indexers)]
# Check that indexers are valid dims, integers, and 1D
for k, v in indexers:
if k not in self.dims:
raise ValueError("dimension %s does not exist" % k)
if v.dtype.kind != 'i':
raise TypeError('Indexers must be integers')
if v.ndim != 1:
raise ValueError('Indexers must be 1 dimensional')
# all the indexers should have the same length
lengths = set(len(v) for k, v in indexers)
if len(lengths) > 1:
raise ValueError('All indexers must be the same length')
# Existing dimensions are not valid choices for the dim argument
if isinstance(dim, basestring):
if dim in self.dims:
# dim is an invalid string
raise ValueError('Existing dimension names are not valid '
'choices for the dim argument in sel_points')
elif hasattr(dim, 'dims'):
# dim is a DataArray or Coordinate
if dim.name in self.dims:
# dim already exists
raise ValueError('Existing dimensions are not valid choices '
'for the dim argument in sel_points')
else:
# try to cast dim to DataArray with name = points
from .dataarray import DataArray
dim = DataArray(dim, dims='points', name='points')
# TODO: This would be sped up with vectorized indexing. This will
# require dask to support pointwise indexing as well.
return concat([self.isel(**d) for d in
[dict(zip(keys, inds)) for inds in
zip(*[v for k, v in indexers])]],
dim=dim, coords=coords, data_vars=data_vars)
def reindex_like(self, other, method=None, copy=True):
"""Conform this object onto the indexes of another object, filling
in missing values with NaN.
Parameters
----------
other : Dataset or DataArray
Object with an 'indexes' attribute giving a mapping from dimension
names to pandas.Index objects, which provides coordinates upon
which to index the variables in this dataset. The indexes on this
other object need not be the same as the indexes on this
dataset. Any mis-matched index values will be filled in with
NaN, and any mis-matched dimension names will simply be ignored.
method : {None, 'nearest', 'pad'/'ffill', 'backfill'/'bfill'}, optional
Method to use for filling index values from other not found in this
dataset:
* default: don't fill gaps
* pad / ffill: propgate last valid index value forward
* backfill / bfill: propagate next valid index value backward
* nearest: use nearest valid index value (requires pandas>=0.16)
copy : bool, optional
If `copy=True`, the returned dataset contains only copied
variables. If `copy=False` and no reindexing is required then
original variables from this dataset are returned.
Returns
-------
reindexed : Dataset
Another dataset, with this dataset's data but coordinates from the
other object.
See Also
--------
Dataset.reindex
align
"""
return self.reindex(method=method, copy=copy, **other.indexes)
def reindex(self, indexers=None, method=None, copy=True, **kw_indexers):
"""Conform this object onto a new set of indexes, filling in
missing values with NaN.
Parameters
----------
indexers : dict. optional
Dictionary with keys given by dimension names and values given by
arrays of coordinates tick labels. Any mis-matched coordinate values
will be filled in with NaN, and any mis-matched dimension names will
simply be ignored.
method : {None, 'nearest', 'pad'/'ffill', 'backfill'/'bfill'}, optional
Method to use for filling index values in ``indexers`` not found in
this dataset:
* default: don't fill gaps
* pad / ffill: propgate last valid index value forward
* backfill / bfill: propagate next valid index value backward
* nearest: use nearest valid index value (requires pandas>=0.16)
copy : bool, optional
If `copy=True`, the returned dataset contains only copied
variables. If `copy=False` and no reindexing is required then
original variables from this dataset are returned.
**kw_indexers : optional
Keyword arguments in the same form as ``indexers``.
Returns
-------
reindexed : Dataset
Another dataset, with this dataset's data but replaced coordinates.
See Also
--------
Dataset.reindex_like
align
pandas.Index.get_indexer
"""
indexers = utils.combine_pos_and_kw_args(indexers, kw_indexers,
'reindex')
if not indexers:
# shortcut
return self.copy(deep=True) if copy else self
variables = alignment.reindex_variables(
self.variables, self.indexes, indexers, method, copy=copy)
return self._replace_vars_and_dims(variables)
def rename(self, name_dict, inplace=False):
"""Returns a new object with renamed variables and dimensions.
Parameters
----------
name_dict : dict-like
Dictionary whose keys are current variable or dimension names and
whose values are new names.
inplace : bool, optional
If True, rename variables and dimensions in-place. Otherwise,
return a new dataset object.
Returns
-------
renamed : Dataset
Dataset with renamed variables and dimensions.
See Also
--------
Dataset.swap_dims
DataArray.rename
"""
for k in name_dict:
if k not in self:
raise ValueError("cannot rename %r because it is not a "
"variable in this dataset" % k)
variables = OrderedDict()
coord_names = set()
for k, v in iteritems(self._variables):
name = name_dict.get(k, k)
dims = tuple(name_dict.get(dim, dim) for dim in v.dims)
var = v.copy(deep=False)
var.dims = dims
variables[name] = var
if k in self._coord_names:
coord_names.add(name)
return self._replace_vars_and_dims(variables, coord_names,
inplace=inplace)
def swap_dims(self, dims_dict, inplace=False):
"""Returns a new object with swapped dimensions.
Parameters
----------
dims_dict : dict-like
Dictionary whose keys are current dimension names and whose values
are new names. Each value must already be a variable in the
dataset.
inplace : bool, optional
If True, swap dimensions in-place. Otherwise, return a new dataset
object.
Returns
-------
renamed : Dataset
Dataset with swapped dimensions.
See Also
--------
Dataset.rename
DataArray.swap_dims
"""
for k, v in dims_dict.items():
if k not in self.dims:
raise ValueError('cannot swap from dimension %r because it is '
'not an existing dimension' % k)
if self.variables[v].dims != (k,):
raise ValueError('replacement dimension %r is not a 1D '
'variable along the old dimension %r'
% (v, k))
result_dims = set(dims_dict.get(dim, dim) for dim in self.dims)
variables = OrderedDict()
coord_names = self._coord_names.copy()
coord_names.update(dims_dict.values())
for k, v in iteritems(self.variables):
dims = tuple(dims_dict.get(dim, dim) for dim in v.dims)
var = v.to_coord() if k in result_dims else v.to_variable()
var.dims = dims
variables[k] = var
return self._replace_vars_and_dims(variables, coord_names,
inplace=inplace)
def update(self, other, inplace=True):
"""Update this dataset's variables with those from another dataset.
Parameters
----------
other : Dataset or castable to Dataset
Dataset or variables with which to update this dataset.
inplace : bool, optional
If True, merge the other dataset into this dataset in-place.
Otherwise, return a new dataset object.
Returns
-------
updated : Dataset
Updated dataset.
Raises
------
ValueError
If any dimensions would have inconsistent sizes in the updated
dataset.
"""
return self.merge(
other, inplace=inplace, overwrite_vars=list(other), join='left')
def merge(self, other, inplace=False, overwrite_vars=set(),
compat='broadcast_equals', join='outer'):
"""Merge the arrays of two datasets into a single dataset.
This method generally not allow for overriding data, with the exception
of attributes, which are ignored on the second dataset. Variables with
the same name are checked for conflicts via the equals or identical
methods.
Parameters
----------
other : Dataset or castable to Dataset
Dataset or variables to merge with this dataset.
inplace : bool, optional
If True, merge the other dataset into this dataset in-place.
Otherwise, return a new dataset object.
overwrite_vars : str or sequence, optional
If provided, update variables of these name(s) without checking for
conflicts in this dataset.
compat : {'broadcast_equals', 'equals', 'identical'}, optional
String indicating how to compare variables of the same name for
potential conflicts:
- 'broadcast_equals': all values must be equal when variables are
broadcast against each other to ensure common dimensions.
- 'equals': all values and dimensions must be the same.
- 'identical': all values, dimensions and attributes must be the
same.
join : {'outer', 'inner', 'left', 'right'}, optional
Method for joining ``self`` and ``other`` along shared dimensions:
- 'outer': use the union of the indexes
- 'inner': use the intersection of the indexes
- 'left': use indexes from ``self``
- 'right': use indexes from ``other``
Returns
-------
merged : Dataset
Merged dataset.
Raises
------
ValueError
If any variables conflict (see ``compat``).
"""
if compat not in ['broadcast_equals', 'equals', 'identical']:
raise ValueError("compat=%r invalid: must be 'broadcast_equals', "
"'equals' or 'identical'" % compat)
if isinstance(overwrite_vars, basestring):
overwrite_vars = [overwrite_vars]
overwrite_vars = set(overwrite_vars)
merge = _merge_dataset if isinstance(other, Dataset) else _merge_dict
replace_vars, new_vars, new_coord_names = merge(
self, other, overwrite_vars, compat=compat, join=join)
newly_coords = new_coord_names & (set(self) - set(self.coords))
no_longer_coords = set(self.coords) & (set(new_vars) - new_coord_names)
ambiguous_coords = (newly_coords | no_longer_coords) - overwrite_vars
if ambiguous_coords:
raise ValueError('cannot merge: the following variables are '
'coordinates on one dataset but not the other: %s'
% list(ambiguous_coords))
obj = self if inplace else self.copy()
obj._update_vars_and_coords(replace_vars, new_coord_names)
return obj
def _assert_all_in_dataset(self, names, virtual_okay=False):
bad_names = set(names) - set(self._variables)
if virtual_okay:
bad_names -= self.virtual_variables
if bad_names:
raise ValueError('One or more of the specified variables '
'cannot be found in this dataset')
def drop(self, labels, dim=None):
"""Drop variables or index labels from this dataset.
If a variable corresponding to a dimension is dropped, all variables
that use that dimension are also dropped.
Parameters
----------
labels : str
Names of variables or index labels to drop.
dim : None or str, optional
Dimension along which to drop index labels. By default (if
``dim is None``), drops variables rather than index labels.
Returns
-------
dropped : Dataset
"""
if utils.is_scalar(labels):
labels = [labels]
if dim is None:
return self._drop_vars(labels)
else:
new_index = self.indexes[dim].drop(labels)
return self.loc[{dim: new_index}]
def _drop_vars(self, names):
self._assert_all_in_dataset(names)
drop = set(names)
drop |= set(k for k, v in iteritems(self._variables)
if any(name in v.dims for name in names))
variables = OrderedDict((k, v) for k, v in iteritems(self._variables)
if k not in drop)
coord_names = set(k for k in self._coord_names if k in variables)
return self._replace_vars_and_dims(variables, coord_names)
def drop_vars(self, *names): # pragma: no cover
warnings.warn('the Dataset method `drop_vars` has been deprecated; '
'use `drop` instead',
FutureWarning, stacklevel=2)
return self.drop(names)
def transpose(self, *dims):
"""Return a new Dataset object with all array dimensions transposed.
Although the order of dimensions on each array will change, the dataset
dimensions themselves will remain in fixed (sorted) order.
Parameters
----------
*dims : str, optional
By default, reverse the dimensions on each array. Otherwise,
reorder the dimensions to this order.
Returns
-------
transposed : Dataset
Each array in the dataset (including) coordinates will be
transposed to the given order.
Notes
-----
Although this operation returns a view of each array's data, it
is not lazy -- the data will be fully loaded into memory.
See Also
--------
numpy.transpose
DataArray.transpose
"""
if dims:
if set(dims) ^ set(self.dims):
raise ValueError('arguments to transpose (%s) must be '
'permuted dataset dimensions (%s)'
% (dims, tuple(self.dims)))
ds = self.copy()
for name, var in iteritems(self._variables):
var_dims = tuple(dim for dim in dims if dim in var.dims)
ds._variables[name] = var.transpose(*var_dims)
return ds
@property
def T(self):
return self.transpose()
def squeeze(self, dim=None):
"""Returns a new dataset with squeezed data.
Parameters
----------
dim : None or str or tuple of str, optional
Selects a subset of the length one dimensions. If a dimension is
selected with length greater than one, an error is raised. If
None, all length one dimensions are squeezed.
Returns
-------
squeezed : Dataset
This dataset, but with with all or a subset of the dimensions of
length 1 removed.
Notes
-----
Although this operation returns a view of each variable's data, it is
not lazy -- all variable data will be fully loaded.
See Also
--------
numpy.squeeze
"""
return common.squeeze(self, self.dims, dim)
def dropna(self, dim, how='any', thresh=None, subset=None):
"""Returns a new dataset with dropped labels for missing values along
the provided dimension.
Parameters
----------
dim : str
Dimension along which to drop missing values. Dropping along
multiple dimensions simultaneously is not yet supported.
how : {'any', 'all'}, optional
* any : if any NA values are present, drop that label
* all : if all values are NA, drop that label
thresh : int, default None
If supplied, require this many non-NA values.
subset : sequence, optional
Subset of variables to check for missing values. By default, all
variables in the dataset are checked.
Returns
-------
Dataset
"""
# TODO: consider supporting multiple dimensions? Or not, given that
# there are some ugly edge cases, e.g., pandas's dropna differs
# depending on the order of the supplied axes.
if dim not in self.dims:
raise ValueError('%s must be a single dataset dimension' % dim)
if subset is None:
subset = list(self.data_vars)
count = np.zeros(self.dims[dim], dtype=np.int64)
size = 0
for k in subset:
array = self._variables[k]
if dim in array.dims:
dims = [d for d in array.dims if d != dim]
count += array.count(dims)
size += np.prod([self.dims[d] for d in dims])
if thresh is not None:
mask = count >= thresh
elif how == 'any':
mask = count == size
elif how == 'all':
mask = count > 0
elif how is not None:
raise ValueError('invalid how option: %s' % how)
else:
raise TypeError('must specify how or thresh')
return self.isel(**{dim: mask})
def fillna(self, value):
"""Fill missing values in this object.
This operation follows the normal broadcasting and alignment rules that
xray uses for binary arithmetic, except the result is aligned to this
object (``join='left'``) instead of aligned to the intersection of
index coordinates (``join='inner'``).
Parameters
----------
value : scalar, ndarray, DataArray, dict or Dataset
Used to fill all matching missing values in this dataset's data
variables. Scalars, ndarrays or DataArrays arguments are used to
fill all data with aligned coordinates (for DataArrays).
Dictionaries or datasets match data variables and then align
coordinates if necessary.
Returns
-------
Dataset
"""
return self._fillna(value)
def reduce(self, func, dim=None, keep_attrs=False, numeric_only=False,
allow_lazy=False, **kwargs):
"""Reduce this dataset by applying `func` along some dimension(s).
Parameters
----------
func : function
Function which can be called in the form
`f(x, axis=axis, **kwargs)` to return the result of reducing an
np.ndarray over an integer valued axis.
dim : str or sequence of str, optional
Dimension(s) over which to apply `func`. By default `func` is
applied over all dimensions.
keep_attrs : bool, optional
If True, the datasets's attributes (`attrs`) will be copied from
the original object to the new one. If False (default), the new
object will be returned without attributes.
numeric_only : bool, optional
If True, only apply ``func`` to variables with a numeric dtype.
**kwargs : dict
Additional keyword arguments passed on to ``func``.
Returns
-------
reduced : Dataset
Dataset with this object's DataArrays replaced with new DataArrays
of summarized data and the indicated dimension(s) removed.
"""
if isinstance(dim, basestring):
dims = set([dim])
elif dim is None:
dims = set(self.dims)
else:
dims = set(dim)
_assert_empty([dim for dim in dims if dim not in self.dims],
'Dataset does not contain the dimensions: %s')
variables = OrderedDict()
for name, var in iteritems(self._variables):
reduce_dims = [dim for dim in var.dims if dim in dims]
if reduce_dims or not var.dims:
if name not in self.coords:
if (not numeric_only
or np.issubdtype(var.dtype, np.number)
or var.dtype == np.bool_):
if len(reduce_dims) == 1:
# unpack dimensions for the benefit of functions
# like np.argmin which can't handle tuple arguments
reduce_dims, = reduce_dims
elif len(reduce_dims) == var.ndim:
# prefer to aggregate over axis=None rather than
# axis=(0, 1) if they will be equivalent, because
# the former is often more efficient
reduce_dims = None
variables[name] = var.reduce(func, dim=reduce_dims,
keep_attrs=keep_attrs,
allow_lazy=allow_lazy,
**kwargs)
else:
variables[name] = var
coord_names = set(k for k in self.coords if k in variables)
attrs = self.attrs if keep_attrs else None
return self._replace_vars_and_dims(variables, coord_names, attrs)
def apply(self, func, keep_attrs=False, args=(), **kwargs):
"""Apply a function over the data variables in this dataset.
Parameters
----------
func : function
Function which can be called in the form `f(x, **kwargs)` to
transform each DataArray `x` in this dataset into another
DataArray.
keep_attrs : bool, optional
If True, the dataset's attributes (`attrs`) will be copied from
the original object to the new one. If False, the new object will
be returned without attributes.
args : tuple, optional
Positional arguments passed on to `func`.
**kwargs : dict
Keyword arguments passed on to `func`.
Returns
-------
applied : Dataset
Resulting dataset from applying ``func`` over each data variable.
"""
variables = OrderedDict(
(k, maybe_wrap_array(v, func(v, *args, **kwargs)))
for k, v in iteritems(self.data_vars))
attrs = self.attrs if keep_attrs else None
return type(self)(variables, attrs=attrs)
def assign(self, **kwargs):
"""Assign new data variables to a Dataset, returning a new object
with all the original variables in addition to the new ones.
Parameters
----------
kwargs : keyword, value pairs
keywords are the variables names. If the values are callable, they
are computed on the Dataset and assigned to new data variables. If
the values are not callable, (e.g. a DataArray, scalar, or array),
they are simply assigned.
Returns
-------
ds : Dataset
A new Dataset with the new variables in addition to all the
existing variables.
Notes
-----
Since ``kwargs`` is a dictionary, the order of your arguments may not
be preserved, and so the order of the new variables is not well
defined. Assigning multiple variables within the same ``assign`` is
possible, but you cannot reference other variables created within the
same ``assign`` call.
See Also
--------
pandas.DataFrame.assign
"""
data = self.copy()
# do all calculations first...
results = data._calc_assign_results(kwargs)
# ... and then assign
data.update(results)
return data
def to_array(self, dim='variable', name=None):
"""Convert this dataset into an xray.DataArray
The data variables of this dataset will be broadcast against each other
and stacked along the first axis of the new array. All coordinates of
this dataset will remain coordinates.
Parameters
----------
dim : str, optional
Name of the new dimension.
name : str, optional
Name of the new data array.
Returns
-------
array : xray.DataArray
"""
from .dataarray import DataArray
data_vars = [self.variables[k] for k in self.data_vars]
broadcast_vars = broadcast_variables(*data_vars)
data = ops.stack([b.data for b in broadcast_vars], axis=0)
coords = dict(self.coords)
coords[dim] = list(self.data_vars)
dims = (dim,) + broadcast_vars[0].dims
return DataArray(data, coords, dims, attrs=self.attrs, name=name)
def _to_dataframe(self, ordered_dims):
columns = [k for k in self if k not in self.dims]
data = [self._variables[k].expand_dims(ordered_dims).values.reshape(-1)
for k in columns]
index = self.coords.to_index(ordered_dims)
return pd.DataFrame(OrderedDict(zip(columns, data)), index=index)
def to_dataframe(self):
"""Convert this dataset into a pandas.DataFrame.
Non-index variables in this dataset form the columns of the
DataFrame. The DataFrame is be indexed by the Cartesian product of
this dataset's indices.
"""
return self._to_dataframe(self.dims)
@classmethod
def from_dataframe(cls, dataframe):
"""Convert a pandas.DataFrame into an xray.Dataset
Each column will be converted into an independent variable in the
Dataset. If the dataframe's index is a MultiIndex, it will be expanded
into a tensor product of one-dimensional indices (filling in missing
values with NaN). This method will produce a Dataset very similar to
that on which the 'to_dataframe' method was called, except with
possibly redundant dimensions (since all dataset variables will have
the same dimensionality).
"""
# TODO: Add an option to remove dimensions along which the variables
# are constant, to enable consistent serialization to/from a dataframe,
# even if some variables have different dimensionality.
idx = dataframe.index
obj = cls()
if hasattr(idx, 'levels'):
# it's a multi-index
# expand the DataFrame to include the product of all levels
full_idx = pd.MultiIndex.from_product(idx.levels, names=idx.names)
dataframe = dataframe.reindex(full_idx)
dims = [name if name is not None else 'level_%i' % n
for n, name in enumerate(idx.names)]
for dim, lev in zip(dims, idx.levels):
obj[dim] = (dim, lev)
shape = [lev.size for lev in idx.levels]
else:
if idx.size:
dims = (idx.name if idx.name is not None else 'index',)
obj[dims[0]] = (dims, idx)
else:
dims = []
shape = -1
for name, series in iteritems(dataframe):
data = series.values.reshape(shape)
obj[name] = (dims, data)
return obj
@staticmethod
def _unary_op(f):
@functools.wraps(f)
def func(self, *args, **kwargs):
ds = self.coords.to_dataset()
for k in self.data_vars:
ds._variables[k] = f(self._variables[k], *args, **kwargs)
return ds
return func
@staticmethod
def _binary_op(f, reflexive=False, join='inner', drop_na_vars=True):
@functools.wraps(f)
def func(self, other):
if isinstance(other, groupby.GroupBy):
return NotImplemented
if hasattr(other, 'indexes'):
self, other = align(self, other, join=join, copy=False)
empty_indexes = [d for d, s in self.dims.items() if s == 0]
if empty_indexes:
raise ValueError('no overlapping labels for some '
'dimensions: %s' % empty_indexes)
g = f if not reflexive else lambda x, y: f(y, x)
ds = self._calculate_binary_op(g, other, drop_na_vars=drop_na_vars)
return ds
return func
@staticmethod
def _inplace_binary_op(f):
@functools.wraps(f)
def func(self, other):
if isinstance(other, groupby.GroupBy):
raise TypeError('in-place operations between a Dataset and '
'a grouped object are not permitted')
if hasattr(other, 'indexes'):
other = other.reindex_like(self, copy=False)
# we don't want to actually modify arrays in-place
g = ops.inplace_to_noninplace_op(f)
ds = self._calculate_binary_op(g, other, inplace=True)
self._replace_vars_and_dims(ds._variables, ds._coord_names,
ds._attrs, inplace=True)
return self
return func
def _calculate_binary_op(self, f, other, inplace=False, drop_na_vars=True):
def apply_over_both(lhs_data_vars, rhs_data_vars, lhs_vars, rhs_vars):
dest_vars = OrderedDict()
performed_op = False
for k in lhs_data_vars:
if k in rhs_data_vars:
dest_vars[k] = f(lhs_vars[k], rhs_vars[k])
performed_op = True
elif inplace:
raise ValueError(
'datasets must have the same data variables '
'for in-place arithmetic operations: %s, %s'
% (list(lhs_data_vars), list(rhs_data_vars)))
elif not drop_na_vars:
# this shortcuts left alignment of variables for fillna
dest_vars[k] = lhs_vars[k]
if not performed_op:
raise ValueError(
'datasets have no overlapping data variables: %s, %s'
% (list(lhs_data_vars), list(rhs_data_vars)))
return dest_vars
if utils.is_dict_like(other) and not isinstance(other, Dataset):
# can't use our shortcut of doing the binary operation with
# Variable objects, so apply over our data vars instead.
new_data_vars = apply_over_both(self.data_vars, other,
self.data_vars, other)
return Dataset(new_data_vars)
other_coords = getattr(other, 'coords', None)
ds = self.coords.merge(other_coords)
if isinstance(other, Dataset):
new_vars = apply_over_both(self.data_vars, other.data_vars,
self.variables, other.variables)
else:
other_variable = getattr(other, 'variable', other)
new_vars = OrderedDict((k, f(self.variables[k], other_variable))
for k in self.data_vars)
ds._variables.update(new_vars)
return ds
ops.inject_all_ops_and_reduce_methods(Dataset, array_only=False)
| kjordahl/xray | xray/core/dataset.py | Python | apache-2.0 | 72,806 |
<?php
App::uses('AppModel', 'Model');
/**
* ProdCodeDivn Model
*
* @property ProdCodeSection $ProdCodeSection
* @property ProdCodeGroup $ProdCodeGroup
*/
class ProdCodeDivn extends AppModel {
/**
* Display field
*
* @var string
*/
public $displayField = 'divn_desc_eng';
/**
* Validation rules
*
* @var array
*/
public $validate = array(
'prod_code_section_id' => array(
'notempty' => array(
'rule' => array('notempty'),
//'message' => 'Your custom message here',
//'allowEmpty' => false,
//'required' => false,
//'last' => false, // Stop validation after this rule
//'on' => 'create', // Limit validation to 'create' or 'update' operations
),
),
'divn_code' => array(
'numeric' => array(
'rule' => array('numeric'),
//'message' => 'Your custom message here',
//'allowEmpty' => false,
//'required' => false,
//'last' => false, // Stop validation after this rule
//'on' => 'create', // Limit validation to 'create' or 'update' operations
),
),
);
//The Associations below have been created with all possible keys, those that are not needed can be removed
/**
* belongsTo associations
*
* @var array
*/
public $belongsTo = array(
'ProdCodeSection' => array(
'className' => 'ProdCodeSection',
'foreignKey' => 'prod_code_section_id',
'conditions' => '',
'fields' => '',
'order' => ''
)
);
/**
* hasMany associations
*
* @var array
*/
/*
public $hasMany = array(
'ProdCodeGroup' => array(
'className' => 'ProdCodeGroup',
'foreignKey' => 'prod_code_divn_id',
'dependent' => false,
'conditions' => '',
'fields' => '',
'order' => '',
'limit' => '',
'offset' => '',
'exclusive' => '',
'finderQuery' => '',
'counterQuery' => ''
)
);*/
}
| hasanmbstu13/Project | Cakephp/ec2014/app/Model/ProdCodeDivn.php | PHP | apache-2.0 | 1,803 |
/*-
* See the file LICENSE for redistribution information.
*
* Copyright (c) 2002-2010 Oracle. All rights reserved.
*
* $Id: ReplicaSyncupReader.java,v 1.4 2010/01/11 20:00:48 linda Exp $
*/
package com.sleepycat.je.rep.stream;
import static com.sleepycat.je.utilint.DbLsn.NULL_LSN;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.sleepycat.je.DatabaseException;
import com.sleepycat.je.EnvironmentFailureException;
import com.sleepycat.je.dbi.EnvironmentImpl;
import com.sleepycat.je.log.LogEntryType;
import com.sleepycat.je.log.entry.LogEntry;
import com.sleepycat.je.recovery.CheckpointEnd;
import com.sleepycat.je.rep.impl.node.NameIdPair;
import com.sleepycat.je.rep.vlsn.VLSNIndex;
import com.sleepycat.je.rep.vlsn.VLSNRange;
import com.sleepycat.je.txn.TxnCommit;
import com.sleepycat.je.utilint.LoggerUtils;
import com.sleepycat.je.utilint.VLSN;
/**
* The ReplicaSyncupReader scans the log backwards for requested log entries.
* The reader must track whether it has passed a checkpoint, and therefore
* can not used the vlsn index to skip over entries.
*
* The ReplicaSyncupReader is not thread safe, and can only be used
* serially. It will stop at the finishLsn, which should be set using the
* GlobalCBVLSN.
*/
public class ReplicaSyncupReader extends VLSNReader {
/* True if this particular record retrieval is for a syncable record. */
private boolean syncableSearch;
private final LogEntry ckptEndLogEntry =
LogEntryType.LOG_CKPT_END.getNewLogEntry();
private final LogEntry commitLogEntry =
LogEntryType.LOG_TXN_COMMIT.getNewLogEntry();
/*
* SearchResults retains the information as to whether the found
* matchpoint is valid.
*/
private final MatchpointSearchResults searchResults;
private final Logger logger;
public ReplicaSyncupReader(EnvironmentImpl envImpl,
VLSNIndex vlsnIndex,
long endOfLogLsn,
int readBufferSize,
NameIdPair nameIdPair,
VLSN startVLSN,
long finishLsn,
MatchpointSearchResults searchResults)
throws IOException, DatabaseException {
/*
* If we go backwards, endOfFileLsn and startLsn must not be null.
* Make them the same, so we always start at the same very end.
*/
super(envImpl,
vlsnIndex,
false, // forward
endOfLogLsn,
readBufferSize,
nameIdPair,
finishLsn);
initScan(startVLSN, endOfLogLsn);
this.searchResults = searchResults;
logger = LoggerUtils.getLogger(getClass());
}
/**
* Set up the ReplicaSyncupReader to start scanning from this VLSN.
* @throws IOException
*/
private void initScan(VLSN startVLSN, long endOfLogLsn)
throws IOException {
if (startVLSN.equals(VLSN.NULL_VLSN)) {
throw EnvironmentFailureException.unexpectedState
("ReplicaSyncupReader start can't be NULL_VLSN");
}
startLsn = endOfLogLsn;
assert startLsn != NULL_LSN;
/*
* Flush the log so that syncup can assume that all log entries that
* are represented in the VLSNIndex are safely out of the log buffers
* and on disk. Simplifies this reader, so it can use the regular
* ReadWindow, which only works on a file.
*/
envImpl.getLogManager().flush();
window.initAtFileStart(startLsn);
currentEntryPrevOffset = window.getEndOffset();
currentEntryOffset = window.getEndOffset();
currentVLSN = startVLSN;
}
/**
* Backward scanning for the replica's part in syncup.
*/
public OutputWireRecord scanBackwards(VLSN vlsn)
throws DatabaseException {
syncableSearch = false;
VLSNRange range = vlsnIndex.getRange();
if (vlsn.compareTo(range.getFirst()) < 0) {
/*
* The requested VLSN is before the start of our range, we don't
* have this record.
*/
return null;
}
currentVLSN = vlsn;
if (readNextEntry()) {
return currentFeedRecord;
}
return null;
}
/**
* Backward scanning for finding an earlier candidate syncup matchpoint.
*/
public OutputWireRecord findPrevSyncEntry()
throws DatabaseException {
currentFeedRecord = null;
syncableSearch = true;
/* Start by looking at the entry before the current record. */
currentVLSN = currentVLSN.getPrev();
VLSNRange range = vlsnIndex.getRange();
if (currentVLSN.compareTo(range.getFirst()) < 0) {
/*
* We've walked off the end of the contiguous VLSN range.
*/
return null;
}
if (readNextEntry() == false) {
/*
* We scanned all the way to the front of the log, no
* other sync-able entry found.
*/
return null;
}
assert LogEntryType.isSyncPoint(currentFeedRecord.getEntryType()) :
"Unexpected log type= " + currentFeedRecord;
return currentFeedRecord;
}
/**
* @throw an EnvironmentFailureException if we were scanning for a
* particular VLSN and we have passed it by.
*/
private void checkForPassingTarget(int compareResult) {
if (compareResult < 0) {
/* Hey, we passed the VLSN we wanted. */
throw EnvironmentFailureException.unexpectedState
("want to read " + currentVLSN + " but reader at " +
currentEntryHeader.getVLSN());
}
}
/**
* Return true for ckpt entries, for syncable entries, and if we're in
* specific vlsn scan mode, any replicated entry. There is an additional
* level of filtering in processEntry.
*/
@Override
protected boolean isTargetEntry()
throws DatabaseException {
if (logger.isLoggable(Level.FINEST)) {
LoggerUtils.finest(logger, envImpl,
" isTargetEntry " + currentEntryHeader);
}
nScanned++;
/* Skip invisible entries. */
if (currentEntryHeader.isInvisible()) {
return false;
}
byte currentType = currentEntryHeader.getType();
/*
* Return true if this entry is replicated. All entries need to be
* perused by processEntry, when we are doing a vlsn based search,
* even if they are not a sync point, because:
* (a) If this is a vlsn-based search, it's possible that the replica
* and feeder are mismatched. The feeder will only propose a sync type
* entry as a matchpoint but it might be that the replica has a non-
* sync entry at that vlsn.
* (b) We need to note passed commits in processEntry.
*/
if (entryIsReplicated()) {
if (syncableSearch) {
if (LogEntryType.isSyncPoint(currentType)) {
return true;
}
currentVLSN = currentEntryHeader.getVLSN().getPrev();
} else {
return true;
}
}
/*
* We'll also need to read checkpoint end records to record their
* presence.
*/
if (LogEntryType.LOG_CKPT_END.equalsType(currentType)) {
return true;
}
return false;
}
/**
* ProcessEntry does additional filtering before deciding whether to
* return an entry as a candidate for matching.
*
* If this is a record we are submitting as a matchpoint candidate,
* instantiate a WireRecord to house this log entry. If this is a
* non-replicated entry or a txn end that follows the candidate matchpoint,
* record whatever status we need to, but don't use it for comparisons.
*
* For example, suppose the log is like this:f
*
* VLSN entry
* 10 LN
* 11 commit
* 12 LN
* -- ckpt end
* 13 commit
* 14 abort
*
* And that the master only has VLSNs 1-12. The replica will suggest vlsn
* 14 as the first matchpoint. The feeder will counter with a suggestion
* of vlsn 11, since it doe not have vlsn 14.
*
* At that point, the ReplicaSyncupReader will scan backwards in the log,
* looking for vlsn 11. Although the reader should only return an entry
* when it gets to vlsn 11. the reader must process commits and ckpts that
* follow 11, so that they can be recorded in the searchResults, so the
* number of rolled back commits can be accurately reported.
*/
@Override
protected boolean processEntry(ByteBuffer entryBuffer) {
if (logger.isLoggable(Level.FINEST)) {
LoggerUtils.finest(logger, envImpl,
" syncup reader saw " + currentEntryHeader);
}
byte currentType = currentEntryHeader.getType();
/*
* CheckpointEnd entries are tracked in order to see if a rollback
* must be done, but are not returned as possible matchpoints.
*/
if (LogEntryType.LOG_CKPT_END.equalsType(currentType)) {
/*
* Read the entry, which both lets us decipher its contents and
* also advances the file reader position.
*/
ckptEndLogEntry.readEntry(currentEntryHeader, entryBuffer,
true /*readFullItem*/);
if (logger.isLoggable(Level.FINEST)) {
LoggerUtils.finest(logger, envImpl,
" syncup reader read " +
currentEntryHeader + ckptEndLogEntry);
}
if (((CheckpointEnd) ckptEndLogEntry.getMainItem()).
getCleanedFilesToDelete()) {
searchResults.notePassedCheckpointEnd();
}
return false;
}
/*
* Setup the log entry as a wire record so we can compare it to
* the entry from the feeder as we look for a matchpoint. Do this
* before we change positions on the entry buffer by reading it.
*/
ByteBuffer buffer = entryBuffer.slice();
buffer.limit(currentEntryHeader.getItemSize());
currentFeedRecord = new OutputWireRecord(currentEntryHeader, buffer);
/*
* All commit records must be tracked to figure out if we've exceeded
* the txn rollback limit. For reporting reasons, we'll need to
* unmarshal the log entry, so we can read the timestamp in the commit
* record.
*/
if (LogEntryType.LOG_TXN_COMMIT.equalsType(currentType)) {
commitLogEntry.readEntry(currentEntryHeader, entryBuffer,
true /*readFullItem*/);
TxnCommit commit = (TxnCommit) commitLogEntry.getMainItem();
searchResults.notePassedCommits(commit.getTime(),
commit.getId(),
currentEntryHeader.getVLSN(),
getLastLsn());
if (logger.isLoggable(Level.FINEST)) {
LoggerUtils.finest(logger, envImpl,
"syncup reader read " +
currentEntryHeader + commitLogEntry);
}
} else {
entryBuffer.position(entryBuffer.position() +
currentEntryHeader.getItemSize());
}
if (syncableSearch) {
return true;
}
/* We're looking for a particular VLSN. */
int compareResult = currentEntryHeader.getVLSN().compareTo(currentVLSN);
checkForPassingTarget(compareResult);
/* return true if this is the entry we want. */
return (compareResult == 0);
}
}
| bjorndm/prebake | code/third_party/bdb/src/com/sleepycat/je/rep/stream/ReplicaSyncupReader.java | Java | apache-2.0 | 12,355 |
using De.Osthus.Ambeth.Bytecode.Visitor;
using De.Osthus.Ambeth.Ioc.Annotation;
using De.Osthus.Ambeth.Log;
using De.Osthus.Ambeth.Merge;
using De.Osthus.Ambeth.Merge.Model;
using De.Osthus.Ambeth.Proxy;
using System;
using System.Collections.Generic;
namespace De.Osthus.Ambeth.Bytecode.Behavior
{
public class EnhancedTypeBehavior : AbstractBehavior
{
[LogInstance]
public ILogger Log { private get; set; }
[Autowired]
public IEntityMetaDataProvider EntityMetaDataProvider { protected get; set; }
public override Type[] GetEnhancements()
{
return new Type[] { typeof(IEnhancedType), typeof(IEntityMetaDataHolder) };
}
public override IClassVisitor Extend(IClassVisitor visitor, IBytecodeBehaviorState state, IList<IBytecodeBehavior> remainingPendingBehaviors,
IList<IBytecodeBehavior> cascadePendingBehaviors)
{
if ((state.GetContext<EntityEnhancementHint>() == null && state.GetContext<EmbeddedEnhancementHint>() == null))
{
return visitor;
}
if (state.GetContext<EntityEnhancementHint>() != null)
{
IEntityMetaData metaData = EntityMetaDataProvider.GetMetaData(state.OriginalType);
visitor = new InterfaceAdder(visitor, typeof(IEntityMetaDataHolder));
visitor = new EntityMetaDataHolderVisitor(visitor, metaData);
}
visitor = new InterfaceAdder(visitor, typeof(IEnhancedType));
visitor = new GetBaseTypeMethodCreator(visitor);
return visitor;
}
}
} | Dennis-Koch/ambeth | ambeth/Ambeth.Cache.Bytecode/ambeth/bytecode/behavior/EnhancedTypeBehavior.cs | C# | apache-2.0 | 1,613 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jsecurity.authz.aop;
import org.jsecurity.aop.AnnotationMethodInterceptor;
import org.jsecurity.aop.MethodInvocation;
import org.jsecurity.authz.AuthorizationException;
import java.lang.annotation.Annotation;
/**
* An <tt>AnnotationMethodInterceptor</tt> that asserts the calling code is authorized to execute the method
* before allowing the invocation to continue.
*
* @author Les Hazlewood
* @since 0.1
*/
public abstract class AuthorizingAnnotationMethodInterceptor extends AnnotationMethodInterceptor {
public AuthorizingAnnotationMethodInterceptor(Class<? extends Annotation> annotationClass) {
super(annotationClass);
}
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
assertAuthorized(methodInvocation);
return methodInvocation.proceed();
}
public abstract void assertAuthorized(MethodInvocation mi) throws AuthorizationException;
}
| apache/jsecurity | src/org/jsecurity/authz/aop/AuthorizingAnnotationMethodInterceptor.java | Java | apache-2.0 | 1,745 |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0, (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tle.web.workflow.soap;
public interface TaskListSoapInterface {
String getTaskFilterCounts(boolean ignoreZero);
String[] getTaskFilterNames();
String getTaskList(String filterName, int start, int numResults) throws Exception;
}
| equella/Equella | Source/Plugins/Core/com.equella.core/src/com/tle/web/workflow/soap/TaskListSoapInterface.java | Java | apache-2.0 | 1,054 |
package com.soulkey.calltalent.db;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import com.soulkey.calltalent.db.model.SettingModel;
import com.soulkey.calltalent.db.populator.SettingPopulator;
public final class DbOpenHelper extends SQLiteOpenHelper {
public static final String DB_NAME = "calltalent.db";
private static final int DB_VERSION = 1;
private static DbOpenHelper instance;
public static DbOpenHelper getInstance(Context context) {
if (null == instance) {
instance = new DbOpenHelper(context);
}
return instance;
}
private DbOpenHelper(Context context) {
super(context, DB_NAME, null, DB_VERSION);
}
@Override
public void onCreate(SQLiteDatabase db) {
db.execSQL(SettingModel.CREATE_TABLE);
populateDb(db);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
}
private void populateDb(SQLiteDatabase db) {
SettingPopulator.populate(db);
}
}
| wpcfan/calltalent | app/src/main/java/com/soulkey/calltalent/db/DbOpenHelper.java | Java | apache-2.0 | 1,109 |
package com.splinter.graphing;
import org.junit.Assert;
import org.junit.Test;
import java.util.HashMap;
import java.util.Map;
public class SplinterLogTest {
@Test
public void testDisableLogs() {
try {
SLog.setEnabled(false);
String expected = "";
Assert.assertEquals(expected, new SLogStop("Coffee Time", "coffeeComplete")
.withOperationAlias("ensureCapacity")
.withComponentOverride("WaterReservoir")
.withUserData("size", "large")
.withInstrumentationOverride(0, null)
.toString());
} finally {
SLog.setEnabled(true);
}
}
@Test
public void testStaticUtilsVarArgs() {
String expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;";
Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", null));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;_MISSING_KEY_0=null;";
Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", null, null));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;";
Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", "size"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;size=null;";
Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", "size", null));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;_MISSING_KEY_0=large;";
Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", null, "large"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;_MISSING_KEY_0=large;";
Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", null, "large", "newkey"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;size=large;";
Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", "size", "large"));
}
@Test
public void testStaticUtils() {
String expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;size=large;";
Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", "size", "large"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;";
Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=A;size=large;";
Assert.assertEquals(expected, SLogStart.log("Coffee Time", "selectCupSize", "size", "large"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=A;";
Assert.assertEquals(expected, SLogStart.log("Coffee Time", "selectCupSize"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=F;size=large;";
Assert.assertEquals(expected, SLogStop.log("Coffee Time", "selectCupSize", "size", "large"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=F;";
Assert.assertEquals(expected, SLogStop.log("Coffee Time", "selectCupSize"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;+MC=1;size=large;";
Assert.assertEquals(expected, SLogBroadcastSend.log("Coffee Time", "selectCupSize", "size", "large"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;+MC=1;";
Assert.assertEquals(expected, SLogBroadcastSend.log("Coffee Time", "selectCupSize"));
expected = "$SPG$+T=Coffee Time;+O=bcastId;+M=A;+OA=selectCupSize;size=large;";
Assert.assertEquals(expected, SLogBroadcastStart.log("Coffee Time", "bcastId", "selectCupSize","size", "large"));
expected = "$SPG$+T=Coffee Time;+O=bcastId;+M=A;+OA=selectCupSize;";
Assert.assertEquals(expected, SLogBroadcastStart.log("Coffee Time", "bcastId", "selectCupSize"));
expected = "$SPG$+T=Coffee Time;+O=bcastId;+M=F;+OA=selectCupSize;size=large;";
Assert.assertEquals(expected, SLogBroadcastStop.log("Coffee Time", "bcastId", "selectCupSize","size", "large"));
expected = "$SPG$+T=Coffee Time;+O=bcastId;+M=F;+OA=selectCupSize;";
Assert.assertEquals(expected, SLogBroadcastStop.log("Coffee Time", "bcastId", "selectCupSize"));
}
@Test
public void testSunnyDay() {
String expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;size=large;";
Assert.assertEquals(expected, new SLogCall("Coffee Time", "selectCupSize")
.withUserData("size", "large").toString());
Map<String, String> userData = new HashMap<String, String>();
userData.put("size", "large");
Assert.assertEquals(expected, new SLogCall("Coffee Time", "selectCupSize")
.withUserData(userData).toString());
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;size=large;size1=large;size2=large;size3=large;size4=large;size5=large;";
Assert.assertEquals(expected, new SLogCall("Coffee Time", "selectCupSize")
.withUserData("size", "large")
.withUserData("size1", "large")
.withUserData("size2", "large")
.withUserData("size3", "large")
.withUserData("size4", "large")
.withUserData("size5", "large").toString());
}
@Test
public void testOptionalParams() {
String expected = "$SPG$+T=Coffee Time;+O=pumpWater;+M=A;+I^=100ms;";
Assert.assertEquals(expected, new SLogStart("Coffee Time", "pumpWater")
.withInstrumentationOverride(100, SLog.TimeNotation.MILLIS)
.toString());
expected = "$SPG$+T=Coffee Time;+O=coffeeComplete;+M=F;+OA=ensureCapacity;+C^=WaterReservoir;";
Assert.assertEquals(expected, new SLogStop("Coffee Time", "coffeeComplete")
.withOperationAlias("ensureCapacity")
.withComponentOverride("WaterReservoir")
.toString());
}
@Test
public void testMissingParams() {
String expected = "$SPG$+T=_MISSING_TASK_;+O=_MISSING_OPERATION_;+M=S;";
Assert.assertEquals(expected, new SLog(null, null, null)
.toString());
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;_MISSING_KEY_0=large;";
Assert.assertEquals(expected, new SLogCall("Coffee Time", "selectCupSize")
.withUserData(null, "large").toString());
}
@Test
public void testEscaping() {
Assert.assertEquals("abcd", SLog.escape("abcd"));
Assert.assertEquals("ab\\ncd", SLog.escape("ab\ncd"));
Assert.assertNull(SLog.escape(null));
Assert.assertEquals("", SLog.escape(""));
Assert.assertEquals("ab\\=cd", SLog.escape("ab=cd"));
Assert.assertEquals("ab\\;cd", SLog.escape("ab;cd"));
Assert.assertEquals("ab\\\\cd", SLog.escape("ab\\cd"));
}
@Test
public void testEscapingLog() {
String expected = "$SPG$+T=file\\; opened;+O=\\\\open;+M=S;+OA=\\=1;r\\=sr=/Users/dimitarz/\\;filename.log;";
Assert.assertEquals(expected, new SLog(null, null, null)
.withUserData("r=sr", "/Users/dimitarz/;filename.log")
.withOperation("\\open")
.withOperationAlias("=1")
.withTask("file; opened")
.toString());
}
}
| dimitarz/splinter | src/test/java/com/splinter/graphing/SplinterLogTest.java | Java | apache-2.0 | 7,294 |
package io.zrz.graphql.core.decl;
import java.util.List;
import org.eclipse.jdt.annotation.Nullable;
import io.zrz.graphql.core.doc.GQLDirective;
import io.zrz.graphql.core.parser.GQLSourceLocation;
public interface GQLDeclaration {
@Nullable
String description();
<R> R apply(GQLDeclarationVisitor<R> visitor);
List<GQLDirective> directives();
@Nullable
GQLSourceLocation location();
GQLDeclaration withDescription(String value);
GQLDeclaration withDirectives(GQLDirective... elements);
GQLDeclaration withDirectives(Iterable<? extends GQLDirective> elements);
GQLDeclaration withLocation(GQLSourceLocation value);
}
| zourzouvillys/graphql | graphql-core/src/main/java/io/zrz/graphql/core/decl/GQLDeclaration.java | Java | apache-2.0 | 651 |
/*
* @class TableExamplesService
*/
export default class TableExamplesService {
constructor($http) {
this.$http = $http;
}
static getClassName() { return 'TableExamplesService'; }
getClassName() { return TableExamplesService.getClassName(); }
/*
* @func getColumns
* @desc getes a list of columns representing the dataset that
* allows data tables to map the array of data to the table
*/
getColumns() {
return this.$http.get('http://localhost:3001/api/DataTable/Columns/People');
}
/*
* @func addColumn
* @desc adds a col
* allows data tables to map the array of data to the table
*/
addColumn(item) {
return this.$http.post('http://localhost:3001/api/DataTable/Columns/People', item);
}
/*
* @func getData
* @desc gets a list of items from the api
*/
getData() {
return this.$http.get('http://localhost:3001/api/People');
}
/*
* @func addData
* @desc adds an item to the api
* @param item
*/
addData(item) {
return this.$http.post('http://localhost:3001/api/People', item);
}
} | garrettwong/GDashboard | client/app/components/tableExamples/tableExamples.service.js | JavaScript | apache-2.0 | 1,084 |
/*
* Copyright ยฉ 2016 - 2017 Dominik Szalai (emptulik@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cz.muni.fi.editor.support;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.springframework.core.annotation.AliasFor;
import org.springframework.security.test.context.support.WithSecurityContext;
/**
* @author Dominik Szalai - emptulik at gmail.com on 10.8.2016.
*/
@Target({ElementType.METHOD, ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
@Inherited
@Documented
@WithSecurityContext(
factory = TestSecurityContextFactory.class
)
public @interface WithEditorUser {
// the owner is always user with ID 1
@AliasFor("value")
long id() default 1L;
@AliasFor("id")
long value() default 1L;
boolean mock() default false;
}
| empt-ak/meditor | editor-backend/src/test/java/cz/muni/fi/editor/support/WithEditorUser.java | Java | apache-2.0 | 1,489 |
/**
* Licensed to the Austrian Association for Software Tool Integration (AASTI)
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. The AASTI licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openengsb.ui.admin.tree.editablePanel;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.form.AjaxFormComponentUpdatingBehavior;
import org.apache.wicket.markup.html.form.TextField;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.model.IModel;
@SuppressWarnings("serial")
public class EditablePanel extends Panel {
public EditablePanel(String id, IModel<String> inputModel) {
super(id);
TextField<String> field = new TextField<String>("textfield", inputModel);
add(field);
field.add(new AjaxFormComponentUpdatingBehavior("onblur") {
@Override
protected void onUpdate(AjaxRequestTarget target) {
}
});
}
}
| openengsb/openengsb | ui/admin/src/main/java/org/openengsb/ui/admin/tree/editablePanel/EditablePanel.java | Java | apache-2.0 | 1,593 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: common.py
# Author: Yuxin Wu <ppwwyyxxc@gmail.com>
import random
import time
import threading
import multiprocessing
import numpy as np
from tqdm import tqdm
from six.moves import queue
from tensorpack import *
from tensorpack.utils.concurrency import *
from tensorpack.utils.stats import *
def play_one_episode(player, func, verbose=False):
def f(s):
spc = player.get_action_space()
act = func([[s]])[0][0].argmax()
if random.random() < 0.001:
act = spc.sample()
if verbose:
print(act)
return act
return np.mean(player.play_one_episode(f))
def play_model(cfg, player):
predfunc = OfflinePredictor(cfg)
while True:
score = play_one_episode(player, predfunc)
print("Total:", score)
def eval_with_funcs(predictors, nr_eval, get_player_fn):
class Worker(StoppableThread, ShareSessionThread):
def __init__(self, func, queue):
super(Worker, self).__init__()
self._func = func
self.q = queue
def func(self, *args, **kwargs):
if self.stopped():
raise RuntimeError("stopped!")
return self._func(*args, **kwargs)
def run(self):
with self.default_sess():
player = get_player_fn(train=False)
while not self.stopped():
try:
score = play_one_episode(player, self.func)
# print("Score, ", score)
except RuntimeError:
return
self.queue_put_stoppable(self.q, score)
q = queue.Queue()
threads = [Worker(f, q) for f in predictors]
for k in threads:
k.start()
time.sleep(0.1) # avoid simulator bugs
stat = StatCounter()
try:
for _ in tqdm(range(nr_eval), **get_tqdm_kwargs()):
r = q.get()
stat.feed(r)
logger.info("Waiting for all the workers to finish the last run...")
for k in threads:
k.stop()
for k in threads:
k.join()
while q.qsize():
r = q.get()
stat.feed(r)
except:
logger.exception("Eval")
finally:
if stat.count > 0:
return (stat.average, stat.max)
return (0, 0)
def eval_model_multithread(cfg, nr_eval, get_player_fn):
func = OfflinePredictor(cfg)
NR_PROC = min(multiprocessing.cpu_count() // 2, 8)
mean, max = eval_with_funcs([func] * NR_PROC, nr_eval, get_player_fn)
logger.info("Average Score: {}; Max Score: {}".format(mean, max))
class Evaluator(Triggerable):
def __init__(self, nr_eval, input_names, output_names, get_player_fn):
self.eval_episode = nr_eval
self.input_names = input_names
self.output_names = output_names
self.get_player_fn = get_player_fn
def _setup_graph(self):
NR_PROC = min(multiprocessing.cpu_count() // 2, 20)
self.pred_funcs = [self.trainer.get_predictor(
self.input_names, self.output_names)] * NR_PROC
def _trigger(self):
t = time.time()
mean, max = eval_with_funcs(
self.pred_funcs, self.eval_episode, self.get_player_fn)
t = time.time() - t
if t > 10 * 60: # eval takes too long
self.eval_episode = int(self.eval_episode * 0.94)
self.trainer.monitors.put_scalar('mean_score', mean)
self.trainer.monitors.put_scalar('max_score', max)
def play_n_episodes(player, predfunc, nr):
logger.info("Start evaluation: ")
for k in range(nr):
if k != 0:
player.restart_episode()
score = play_one_episode(player, predfunc)
print("{}/{}, score={}".format(k, nr, score))
| haamoon/tensorpack | examples/DeepQNetwork/common.py | Python | apache-2.0 | 3,829 |
package org.devocative.demeter.service.template;
import groovy.lang.Binding;
import groovy.lang.Script;
import org.devocative.demeter.iservice.template.BaseStringTemplate;
import java.util.Map;
public class GroovyScript extends BaseStringTemplate<Script> {
private Script script;
public GroovyScript(Script script) {
this.script = script;
}
@Override
public Object process(Map<String, Object> params) {
Binding binding = new Binding();
for (Map.Entry<String, Object> entry : params.entrySet()) {
binding.setVariable(entry.getKey(), entry.getValue());
}
script.setBinding(binding);
return script.run();
}
@Override
public Script unwrap() {
return script;
}
}
| mbizhani/Demeter | service/src/main/java/org/devocative/demeter/service/template/GroovyScript.java | Java | apache-2.0 | 690 |