code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
package net.graphical.model.causality.graph.model.AdjImpl; import net.graphical.model.causality.graph.model.Edge; import net.graphical.model.causality.graph.model.EdgeType; import net.graphical.model.causality.graph.model.Node; import java.util.*; import java.util.stream.Collectors; /** * Created by sli on 10/27/15. */ public class GraphBase { protected Vertex[] vertexes; protected ArrayList<Edge> edges; private Map<Integer, Integer> NodeNumberToArrayIndexMap = new HashMap<>(); public GraphBase(List<Node> nodes, List<Edge> inputEdges) { this.vertexes = new Vertex[nodes.size()]; int index = 0; for(Node node : nodes){ NodeNumberToArrayIndexMap.put(node.getNumber(), index); vertexes[index++] = new Vertex(this, node); } this.edges = new ArrayList<>() ; for(Edge oldEdge : inputEdges){//make sure levels are there Edge edge = new Edge(getVertexByNumber(oldEdge.getFirstNode().getNumber()).getNode() ,getVertexByNumber(oldEdge.getSecondNode().getNumber()).getNode() , oldEdge.getEdgeType() ); this.edges.add(edge); vertexes[NodeNumberToArrayIndexMap.get(edge.getFirstNode().getNumber())].addEdge(edge); vertexes[NodeNumberToArrayIndexMap.get(edge.getSecondNode().getNumber()) ].addEdge(edge); } } public List<Vertex> getVertexes() { return Arrays.asList(vertexes); } public Vertex getVertexByNumber(int nodeNumber){ return vertexes[NodeNumberToArrayIndexMap.get(nodeNumber)]; } public boolean isDirectedGraph(){ return edges.stream().filter(e->!e.getEdgeType().isDirected()).collect(Collectors.toList()).isEmpty(); } public boolean isUnDirectedGraph(){ return edges.stream().filter(e->e.getEdgeType().isDirected()).collect(Collectors.toList()).isEmpty(); } public List<Edge> getEdges() { return edges; } public void exploreReset() { for (int i = 0; i < vertexes.length; i++){ vertexes[i].setIsExplored(false); } } public List<Node> getNodes() { return Arrays.asList(vertexes).stream().map(v -> v.getNode()).collect(Collectors.toList()); } //follow algorithm presented in REF_UNO //Directed Edges are ignored. public List<List<Node>> getAllCliques(){ return getAllCliques(getNodes()); } public List<List<Node>> getAllCliques(List<Node> subNodes){ SortedSet<Node> CAND_Empty = new TreeSet<Node>() ;//vertexes neigbor to all vertexes of clique K CAND_Empty.addAll(subNodes); Map<List<Node>, SortedSet<Node>> cliqueToCandKMap = new HashMap<>(); cliqueToCandKMap.put(Arrays.asList(), CAND_Empty); List<List<Node>> result = new ArrayList<>(); while(!cliqueToCandKMap.isEmpty()){ List<List<Node>> cliques = new ArrayList<>(); Map<List<Node>, SortedSet<Node>> newCliqueToCandKMap = new HashMap<>(); for(List<Node> clique : cliqueToCandKMap.keySet()){ cliques.add(clique); //update newCliqueToCandKMap SortedSet<Node> cand = cliqueToCandKMap.get(clique); for(Node node : cand){ if(node.getNumber() > tail(clique)){ List<Node> newClique = new ArrayList<>(); newClique.addAll(clique); newClique.add(node); newCliqueToCandKMap.put(newClique, updateCand(cand, node)); } } } result.addAll(cliques); cliqueToCandKMap = newCliqueToCandKMap; } return result; } private SortedSet<Node> updateCand(SortedSet<Node> cand, Node node) { Vertex v = getVertexByNumber(node.getNumber()); Set<Node> neighbors = new TreeSet<>(v.getNeighbors()); TreeSet<Node> newCand = new TreeSet<>(cand); newCand.retainAll(neighbors); return newCand; } private int tail(List<Node> clique) { if(clique.isEmpty()) return -1; return clique.stream().map(node -> new Integer(node.getNumber())).max((i1,i2) -> i1 - i2).get().intValue(); } //Bron–Kerbosch algorithm: standard //Directed edges are ignored. public List<List<Node>> getAllMaxmalCliques_standard(){ List<List<Node>> result = new ArrayList<>(); doBronKerbosch_standard(new ArrayList<>(), new TreeSet<>(getNodes()), new TreeSet<>(), result); return result; } //Directed edges are ignored. public List<List<Node>> getAllMaximalCliques(List<Node> subNodes){ List<List<Node>> result = new ArrayList<>(); doBronKerbosch_standard(new ArrayList<>(), new TreeSet<>(subNodes), new TreeSet<>(), result); return result; } public List<ConnectedUndirectedGraph> getChainComponents(){ List<Edge> edges = new ArrayList<>(); for(Edge edge: getEdges()){ if(!edge.isDirected()){ edges.add(edge); } } UndirectedGraph ug = new UndirectedGraph(getNodes(), edges); return ug.getConnectedComponents(); } /** * algrothm: REF_CK * @param R * @param P * @param X * @param result * * R is a clique * Constraints between R,P,X: * P is subset of CAND(R) * R and X cannot be neigbors (any) * * Important: directed edges are ignored. */ public void doBronKerbosch_standard(List<Node> R, TreeSet<Node> P, TreeSet<Node> X, List<List<Node>> result ){ if(X.isEmpty() && P.isEmpty()){ List<Node> clique = new ArrayList<>(R); result.add (clique); R.clear(); } else { int size = P.size(); for(int i = 0; i < size; i++){ Node node = P.first(); P.remove(node); List<Node> newR = new ArrayList<>(R); newR.add(node); TreeSet newP = new TreeSet(P); newP.retainAll(getVertexByNumber(node.getNumber()).getNeighbors()); TreeSet newX = new TreeSet(X); newX.retainAll(getVertexByNumber(node.getNumber()).getNeighbors()); doBronKerbosch_standard(newR, newP, newX, result); X.add(node); } } } public List<List<Node>> getAllMaxmalCliques_pivot(){ List<List<Node>> result = new ArrayList<>(); doBronKerbosch_pivot(new ArrayList<>(), new TreeSet<>(getNodes()), new TreeSet<>(), result); return result; } //algrothm: REF_CK private void doBronKerbosch_pivot(List<Node> R, TreeSet<Node> P, TreeSet<Node> X, List<List<Node>> result ){ if(X.isEmpty() && P.isEmpty()){ List<Node> clique = new ArrayList<>(R); result.add (clique); R.clear(); } else { Node pivotNode = getPivotNode(P); int size = P.size(); for(int i = 0; i < size; i++){ if(getVertexByNumber(pivotNode.getNumber()).isNeighor(pivotNode)){ continue; } Node node = P.first(); P.remove(node); List<Node> newR = new ArrayList<>(R); newR.add(node); TreeSet newP = new TreeSet(P); newP.retainAll(getVertexByNumber(node.getNumber()).getNeighbors()); TreeSet newX = new TreeSet(X); newX.retainAll(getVertexByNumber(node.getNumber()).getNeighbors()); doBronKerbosch_pivot(newR, newP, newX, result); X.add(node); } } } private Node getPivotNode(TreeSet<Node> p) { //just one way of chosing pivot node; if(p.isEmpty()) return null; return p.stream().max((n1,n2)-> getVertexByNumber(n1.getNumber()).getDegree() - getVertexByNumber(n2.getNumber()).getDegree()).get(); } public GraphBase subtract(List<Node> cliqueC) { List<Node> nodeList = getNodes().stream().filter(n-> !cliqueC.contains(n)).collect(Collectors.toList()); List<Edge> edgeList = getEdges().stream().filter(e -> !e.hasNodeIn(cliqueC)).collect(Collectors.toList());; return new GraphBase(nodeList, edgeList); } public boolean hasSameEdges(ChainGraph g0) { Set<Edge> thisEdgeSet = new HashSet<>(this.getEdges()); Set<Edge> thatEdgeSet = new HashSet<>(g0.getEdges()); return thisEdgeSet.equals(thatEdgeSet); } public void removeEdge(Node v, Node u) { Edge found = findEdge(v, u); edges.remove(found); //update Vertex edge list; Vertex vV = getVertexByNumber(v.getNumber()); vV.removeEdge(found); Vertex uV = getVertexByNumber(u.getNumber()); uV.removeEdge(found); } public void addDirectedEdge(Node u, Node v) { //add u->u, where u, v is already in graph Edge newEdge = new Edge(u, v, EdgeType.DIRECTED_PLUS); edges.add(newEdge); //update Vertex edge list; Vertex vV = getVertexByNumber(v.getNumber()); vV.addEdge(newEdge); Vertex uV = getVertexByNumber(u.getNumber()); uV.addEdge(newEdge); } public Edge findEdge(Node v, Node u) { for(Edge edge : edges){ if(edge.hasNode(v) && edge.hasNode(u)){ return edge; } } return null; } public void turnDirectedEdge(Node v, Node u) { removeEdge(v, u); addDirectedEdge(u,v); } public int noOfUndirectEdge() { int count = 0; for(Edge edge : edges){ if(!edge.getEdgeType().isDirected()){ count ++; } } return count; } public int noOfDirectEdge() { int count = 0; for(Edge edge : edges){ if(edge.getEdgeType().isDirected()){ count ++; } } return count; } public int getArrayIndex(int number) { return NodeNumberToArrayIndexMap.get(number); } @Override public String toString(){ String str = "number of edges: " + edges.size() + "\n"; for(Edge edge : edges){ str += edge.toString() + "\n"; } return str; } }
lisongshan/causalGraphicalModel
causality/src/main/java/net/graphical/model/causality/graph/model/AdjImpl/GraphBase.java
Java
apache-2.0
10,564
import React from 'react'; import PropTypes from 'prop-types'; import createReactClass from 'create-react-class'; import _ from 'lodash'; import { api } from './Api'; import { formatJSON } from './utils'; export default createReactClass({ propTypes: { params: PropTypes.object, }, getInitialState() { return { data_container: null, }; }, componentDidMount() { const id = this.props.params.id; const payload = { aggregate: [ { $match: { _id: id } }, ], }; api.getDataContainers(payload) .then(response => this.setState({ data_container: _.get(response, 'data_containers[0]', {}) })); }, render() { if (this.state.data_container == null) { return (<h1>Loading Data Container<span className="loading" /></h1>); } if (_.isEmpty(this.state.data_container)) { return (<div className="alert alert-error">Data not available</div>); } return ( <div> <h1>Data Container</h1> <pre className="scroll"> {formatJSON(this.state.data_container)} </pre> </div> ); }, });
curious-containers/cc-ui
src/DataContainer.js
JavaScript
apache-2.0
1,124
package com.github.vivchar.rendererrecyclerviewadapter; import androidx.recyclerview.widget.DiffUtil; /** * Created by Vivchar Vitaly on 21.10.17. */ public abstract class DiffCallback <BM extends ViewModel> extends DiffUtil.ItemCallback<BM> {}
vivchar/RendererRecyclerViewAdapter
rendererrecyclerviewadapter/src/main/java/com/github/vivchar/rendererrecyclerviewadapter/DiffCallback.java
Java
apache-2.0
248
/* ************************************************************************ # # DivConq # # http://divconq.com/ # # Copyright: # Copyright 2014 eTimeline, LLC. All rights reserved. # # License: # See the license.txt file in the project's top-level directory for details. # # Authors: # * Andy White # ************************************************************************ */ package divconq.log; import io.netty.util.ResourceLeakDetector; import io.netty.util.ResourceLeakDetector.Level; import io.netty.util.internal.logging.InternalLoggerFactory; import java.io.File; import java.io.PrintWriter; import java.util.Locale; import java.util.concurrent.locks.ReentrantLock; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; import divconq.lang.Memory; import divconq.lang.op.OperationContext; import divconq.lang.op.OperationResult; import divconq.locale.LocaleUtil; import divconq.util.HexUtil; import divconq.xml.XElement; /** * When logging messages to the debug log each message has a debug level. * The logger has a filter level and messages of lower priority than the * current debug level will not be logged. * * Note that 99% of the time the "current" debug level is determined by * the current TaskContext. The preferred way to log messages is through * the TaskContext or through an OperationResult. Ultimately a filter * is used to determine what should go in the log. * * In fact, when you call "void error(String message, String... tags)" * and other logging methods, theses methods will lookup the current * task context. So it is more efficient to work directly with task * context, however, occasional calls to these global logger methods * are fine. * * @author Andy * */ public class Logger { static protected DebugLevel globalLevel = DebugLevel.Info; static protected String locale = Locale.getDefault().toString(); // typically task logging is handled by a service on the bus, but on occasions // we want it to log to the file as well, from settings change this to 'true' static protected boolean toFile = true; static protected boolean toConsole = true; static protected PrintWriter logWriter = null; static protected ReentrantLock writeLock = new ReentrantLock(); static protected long filestart = 0; static protected ILogHandler handler = null; static protected XElement config = null; static public DebugLevel getGlobalLevel() { return Logger.globalLevel; } static public void setGlobalLevel(DebugLevel v) { Logger.globalLevel = v; // keep hub context up to date OperationContext.updateHubContext(); } static public String getLocale() { return Logger.locale; } static public void setLocale(String v) { Logger.locale = v; // keep hub context up to date OperationContext.updateHubContext(); } static public void setLogHandler(ILogHandler v) { Logger.handler = v; } static public void setToConsole(boolean v) { Logger.toConsole = v; } /** * Called from Hub.start this method configures the logging features. * * @param config xml holding the configuration */ static public void init(XElement config) { Logger.config = config; // TODO return operation result // TODO load levels, path etc // include a setting for startup logging - if present set the TC log level directly Logger.startNewLogFile(); // set by operation context init //Logger.locale = LocaleUtil.getDefaultLocale(); // From here on we can use netty and so we need the logger setup InternalLoggerFactory.setDefaultFactory(new divconq.log.netty.LoggerFactory()); if (Logger.config != null) { // set by operation context init //if (Logger.config.hasAttribute("Level")) // Logger.globalLevel = DebugLevel.parse(Logger.config.getAttribute("Level")); if (Logger.config.hasAttribute("NettyLevel")) { ResourceLeakDetector.setLevel(Level.valueOf(Logger.config.getAttribute("NettyLevel"))); Logger.debug("Netty Level set to: " + ResourceLeakDetector.getLevel()); } else if (!"none".equals(System.getenv("dcnet"))) { // TODO anything more we should do here? maybe paranoid isn't helpful? } // set by operation context init //if (Logger.config.hasAttribute("Locale")) // Logger.locale = Logger.config.getAttribute("Locale"); } } static protected void startNewLogFile() { try { File logfile = new File("./logs/" + DateTimeFormat.forPattern("yyyyMMdd'_'HHmmss").print(new DateTime(DateTimeZone.UTC)) + ".log"); if (!logfile.getParentFile().exists()) if (!logfile.getParentFile().mkdirs()) Logger.error("Unable to create logs folder."); logfile.createNewFile(); if (Logger.logWriter != null) { Logger.logWriter.flush(); Logger.logWriter.close(); } Logger.trace("Opening log file: " + logfile.getCanonicalPath()); Logger.logWriter = new PrintWriter(logfile, "utf-8"); Logger.filestart = System.currentTimeMillis(); } catch (Exception x) { Logger.error("Unable to create log file: " + x); } } /* * In a distributed setup, DivConq may route logging to certain Hubs and * bypass the local log file. During shutdown logging returns to local * log file so that the dcBus can shutdown and stop routing the messages. * @param or */ static public void stop(OperationResult or) { // TODO return operation result Logger.toFile = true; // go back to logging to file // TODO say no to database } static public boolean isDebug() { OperationContext ctx = OperationContext.get(); DebugLevel setlevel = (ctx != null) ? ctx.getLevel() : Logger.globalLevel; return (setlevel.getCode() >= DebugLevel.Debug.getCode()); } static public boolean isTrace() { OperationContext ctx = OperationContext.get(); DebugLevel setlevel = (ctx != null) ? ctx.getLevel() : Logger.globalLevel; return (setlevel.getCode() >= DebugLevel.Trace.getCode()); } /* * Insert a (string) message into the log * * @param message error text * @param tags searchable values associated with the message, key-value pairs can be created by putting two tags adjacent */ static public void error(String message, String... tags) { Logger.log(OperationContext.get(), DebugLevel.Error, message, tags); } /* * Insert a (string) message into the log * * @param message warning text * @param tags searchable values associated with the message, key-value pairs can be created by putting two tags adjacent */ static public void warn(String message, String... tags) { Logger.log(OperationContext.get(), DebugLevel.Warn, message, tags); } /* * Insert a (string) message into the log * * @param message info text * @param tags searchable values associated with the message, key-value pairs can be created by putting two tags adjacent */ static public void info(String message, String... tags) { Logger.log(OperationContext.get(), DebugLevel.Info, message, tags); } /* * Insert a (string) message into the log * * @param accessCode to translate * @param locals for the translation */ static public void debug(String message, String... tags) { Logger.log(OperationContext.get(), DebugLevel.Debug, message, tags); } /* * Insert a (string) message into the log * * @param accessCode to translate * @param locals for the translation */ static public void trace(String message, String... tags) { Logger.log(OperationContext.get(), DebugLevel.Trace, message, tags); } /* * Insert a (string) message into the log * * @param code to translate * @param params for the translation */ static public void errorTr(long code, Object... params) { Logger.log(OperationContext.get(), DebugLevel.Error, code, params); } /* * Insert a (string) message into the log * * @param code to translate * @param params for the translation */ static public void warnTr(long code, Object... params) { Logger.log(OperationContext.get(), DebugLevel.Warn, code, params); } /* * Insert a (string) message into the log * * @param code to translate * @param params for the translation */ static public void infoTr(long code, Object... params) { Logger.log(OperationContext.get(), DebugLevel.Info, code, params); } /* * Insert a (string) message into the log * * @param code to translate * @param params for the translation */ static public void traceTr(long code, Object... params) { Logger.log(OperationContext.get(), DebugLevel.Trace, code, params); } /* * Insert a (string) translated message into the log * * @param ctx context for log settings, null for none * @param level message level * @param code to translate * @param params for the translation */ static public void log(OperationContext ctx, DebugLevel level, long code, Object... params) { Logger.log(ctx, level, LocaleUtil.tr(Logger.locale, "_code_" + code, params), "Code", code + ""); } /* * Insert a (string) message into the log * * @param ctx context for log settings, null for none * @param level message level * @param message text to store in log * @param tags searchable values associated with the message, key-value pairs can be created by putting two tags adjacent */ static public void log(OperationContext ctx, DebugLevel level, String message, String... tags) { DebugLevel setlevel = (ctx != null) ? ctx.getLevel() : Logger.globalLevel; // do not log, is being filtered if (setlevel.getCode() < level.getCode()) return; Logger.logWr((ctx != null) ? ctx.getOpId() : null, level, message, tags); } /* * Insert a (string) translated message into the log * * @param ctx context for log settings, null for none * @param level message level * @param code to translate * @param params for the translation */ static public void logWr(String taskid, DebugLevel level, long code, Object... params) { Logger.logWr(taskid, level, LocaleUtil.tr(Logger.locale, "_code_" + code, params), "Code", code + ""); } /* * don't check, just write * * @param taskid * @param level * @param message * @param tags */ static public void logWr(String taskid, DebugLevel level, String message, String... tags) { String indicate = "M" + level.getIndicator(); /* TODO if (Logger.toDatabase) { Message lmsg = new Message("Logger"); lmsg.addHeader("Op", "Log"); lmsg.addHeader("Indicator", indicate); lmsg.addHeader("Occurred", occur); lmsg.addHeader("Tags", tagvalue); lmsg.addStringAttachment(message); Hub.instance.getBus().sendMessage(lmsg); } */ // write to file if not a Task or if File Tasks is flagged if (Logger.toFile || Logger.toConsole) { if (message != null) message = message.replace("\n", "\n\t"); // tab sub-lines Logger.write(taskid, indicate, message, tags); } } /* * Insert a chunk of hex encoded memory into the log * * @param ctx context for log settings, null for none * @param level message level * @param data memory to hex encode and store * @param tags searchable values associated with the message, key-value pairs can be created by putting two tags adjacent */ static public void log(OperationContext ctx, DebugLevel level, Memory data, String... tags) { DebugLevel setlevel = (ctx != null) ? ctx.getLevel() : Logger.globalLevel; // do not log, is being filtered if (setlevel.getCode() < level.getCode()) return; String indicate = "H" + level.getIndicator(); /* TODO if (tc != null) { Message lmsg = new Message("Logger"); lmsg.addHeader("Op", "Log"); lmsg.addHeader("Indicator", indicate); lmsg.addHeader("Occurred", occur); lmsg.addHeader("Tags", tagvalue); lmsg.addAttachment(data); Hub.instance.getBus().sendMessage(lmsg); } */ // write to file if not a Task or if File Tasks is flagged if (Logger.toFile || Logger.toConsole) Logger.write((ctx != null) ? ctx.getOpId() : null, indicate, HexUtil.bufferToHex(data), tags); } /* * A boundary delineates in section of a task log from another, making it * easier for a log viewer to organize the content. Boundary's are treated * like "info" messages, if only errors or warnings are being logged then * the boundary entry will be skipped. * * @param ctx context for log settings, null for none * @param tags searchable values associated with the message, key-value pairs can be created by putting two tags adjacent */ static public void boundary(OperationContext ctx, String... tags) { DebugLevel setlevel = (ctx != null) ? ctx.getLevel() : Logger.globalLevel; // do not log, is being filtered if (setlevel.getCode() < DebugLevel.Info.getCode()) return; Logger.boundaryWr((ctx != null) ? ctx.getOpId() : null, tags); } /* * Don't check, just write * * @param taskid * @param tags */ static public void boundaryWr(String taskid, String... tags) { /* TODO if (tc != null) { Message lmsg = new Message("Logger"); lmsg.addHeader("Op", "Log"); lmsg.addHeader("Indicator", "B"); lmsg.addHeader("Occurred", occur); lmsg.addHeader("Tags", tagvalue); Hub.instance.getBus().sendMessage(lmsg); } */ // write to file if not a Task or if File Tasks is flagged if (Logger.toFile || Logger.toConsole) Logger.write(taskid, "B ", "", tags); } static protected void write(String taskid, String indicator, String message, String... tags) { if (taskid == null) taskid = "00000_19700101T000000000Z_000000000000000"; DateTime occur = new DateTime(DateTimeZone.UTC); String tagvalue = ""; if ((tags != null) && tags.length > 0) { tagvalue = "|"; for (String tag : tags) tagvalue += tag + "|"; } if (Logger.handler != null) Logger.handler.write(occur.toString(), taskid, indicator, tagvalue, message); if (tagvalue.length() > 0) tagvalue += " "; Logger.write(occur + " " + taskid + " " + indicator + " " + tagvalue + message); } static protected void write(String msg) { if (Logger.toConsole) System.out.println(msg); if (!Logger.toFile || (Logger.logWriter == null)) return; Logger.writeLock.lock(); // start a new log file every 24 hours if (System.currentTimeMillis() - Logger.filestart > 86400000) Logger.startNewLogFile(); try { Logger.logWriter.println(msg); Logger.logWriter.flush(); } catch (Exception x) { // ignore, logger is broken } Logger.writeLock.unlock(); } }
gspandy/divconq
divconq.core/src/main/java/divconq/log/Logger.java
Java
apache-2.0
16,322
package consulo.hub.backend.repository; import consulo.hub.shared.auth.Roles; import consulo.hub.shared.auth.domain.UserAccount; import consulo.hub.shared.repository.PluginChannel; import consulo.hub.shared.repository.PluginNode; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.io.FileSystemResource; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.security.core.annotation.AuthenticationPrincipal; import org.springframework.security.core.authority.SimpleGrantedAuthority; import org.springframework.web.bind.annotation.*; import org.springframework.web.multipart.MultipartFile; import javax.annotation.Nonnull; import java.io.File; /** * @author VISTALL * @since 24-Sep-16 */ @RestController public class PluginChannelRestController { @ResponseStatus(value = HttpStatus.UNAUTHORIZED) private static class NotAuthorizedException extends RuntimeException { } private final PluginChannelsService myUserConfigurationService; private final PluginDeployService myPluginDeployService; private final PluginStatisticsService myPluginStatisticsService; @Autowired public PluginChannelRestController(@Nonnull PluginChannelsService userConfigurationService, @Nonnull PluginDeployService pluginDeployService, @Nonnull PluginStatisticsService pluginStatisticsService) { myUserConfigurationService = userConfigurationService; myPluginDeployService = pluginDeployService; myPluginStatisticsService = pluginStatisticsService; } // api methods @RequestMapping("/api/repository/download") public ResponseEntity<?> download(@RequestParam("channel") PluginChannel channel, @RequestParam("platformVersion") String platformVersion, @Deprecated @RequestParam(value = "pluginId", required = false) final String pluginId, @RequestParam(value = "id", required = false /* TODO [VISTALL] remove it after dropping 'pluginId' parameter*/) final String id, @RequestParam(value = "noTracking", defaultValue = "false", required = false) boolean noTracking, @RequestParam(value = "platformBuildSelect", defaultValue = "false", required = false) boolean platformBuildSelect, @RequestParam(value = "zip", defaultValue = "false", required = false) boolean zip, @RequestParam(value = "viaUpdate", defaultValue = "false", required = false) boolean viaUpdate, @RequestParam(value = "version", required = false) String version) { if(id == null && pluginId == null) { throw new IllegalArgumentException("'id' is null"); } String idValue = id; if(pluginId != null) { idValue = pluginId; } PluginChannelService channelService = myUserConfigurationService.getRepositoryByChannel(channel); String idNew = idValue; if(zip) { idNew = idValue + "-zip"; } PluginNode select = channelService.select(platformVersion, idNew, version, platformBuildSelect); if(select == null) { idNew = idValue; select = channelService.select(platformVersion, idNew, version, platformBuildSelect); } if(select == null) { return ResponseEntity.notFound().build(); } if(!noTracking) { myPluginStatisticsService.increaseDownload(idNew, channel, select.version, platformVersion, viaUpdate); } File targetFile = select.targetFile; assert targetFile != null; return ResponseEntity.ok().header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + targetFile.getName() + "\"").body(new FileSystemResource(targetFile)); } @RequestMapping(value = "/api/repository/platformDeploy", method = RequestMethod.POST) public PluginNode platformDeploy(@RequestParam("channel") PluginChannel channel, @RequestParam("file") MultipartFile file, @RequestParam(value = "history", required = false) MultipartFile history, @RequestParam("platformVersion") int platformVersion, @AuthenticationPrincipal UserAccount userAccount) throws Exception { if(!hasRole(userAccount, Roles.ROLE_SUPERDEPLOYER)) { throw new NotAuthorizedException(); } return myPluginDeployService.deployPlatform(channel, platformVersion, file, history); } @RequestMapping(value = "/api/repository/pluginDeploy", method = RequestMethod.POST) public PluginNode pluginDeploy(@RequestParam("channel") PluginChannel channel, @RequestParam("file") MultipartFile file, @RequestParam(value = "history", required = false) MultipartFile history, @AuthenticationPrincipal UserAccount userAccount) throws Exception { if(!hasRole(userAccount, Roles.ROLE_SUPERDEPLOYER)) { throw new NotAuthorizedException(); } return myPluginDeployService.deployPlugin(channel, () -> history == null ? null : history.getInputStream(), file::getInputStream); } private static boolean hasRole(UserAccount userAccount, String role) { return userAccount.getAuthorities().contains(new SimpleGrantedAuthority(role)); } @RequestMapping("/api/repository/list") public PluginNode[] list(@RequestParam("channel") PluginChannel channel, @RequestParam("platformVersion") String platformVersion, @RequestParam(value = "platformBuildSelect", defaultValue = "false", required = false) boolean platformBuildSelect) { PluginChannelService channelService = myUserConfigurationService.getRepositoryByChannel(channel); return channelService.select(myPluginStatisticsService, platformVersion, platformBuildSelect); } @RequestMapping("/api/repository/info") public ResponseEntity<PluginNode> info(@RequestParam("channel") PluginChannel channel, @RequestParam("platformVersion") String platformVersion, @RequestParam("id") final String id, @RequestParam(value = "zip", defaultValue = "false", required = false) boolean zip, @RequestParam(value = "version") String version) { PluginChannelService channelService = myUserConfigurationService.getRepositoryByChannel(channel); String idNew = id; if(zip) { idNew = id + "-zip"; } PluginNode select = channelService.select(platformVersion, idNew, version, true); if(select == null) { idNew = id; select = channelService.select(platformVersion, idNew, version, true); } if(select == null) { return ResponseEntity.status(HttpStatus.NOT_FOUND).body(null); } return ResponseEntity.ok(select.clone()); } }
consulo/hub.consulo.io
backend/src/main/java/consulo/hub/backend/repository/PluginChannelRestController.java
Java
apache-2.0
6,497
/** * Copyright 2011 Diego Ceccarelli * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package it.cnr.isti.hpc.wikipedia.reader; import info.bliki.wiki.dump.IArticleFilter; import info.bliki.wiki.dump.Siteinfo; import info.bliki.wiki.dump.WikiArticle; import info.bliki.wiki.dump.WikiXMLParser; import it.cnr.isti.hpc.benchmark.Stopwatch; import it.cnr.isti.hpc.io.IOUtils; import it.cnr.isti.hpc.log.ProgressLogger; import it.cnr.isti.hpc.wikipedia.article.Article; import it.cnr.isti.hpc.wikipedia.article.Article.Type; import it.cnr.isti.hpc.wikipedia.parser.ArticleParser; import java.io.BufferedWriter; import java.io.File; import java.io.IOException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xml.sax.SAXException; /** * A reader that converts a Wikipedia dump in its json dump. The json dump will * contain all the article in the XML dump, one article per line. Each line will * be compose by the json serialization of the object Article. * * @see Article * * @author Diego Ceccarelli, diego.ceccarelli@isti.cnr.it created on 18/nov/2011 */ public class WikipediaArticleReader { /** * Logger for this class */ private static final Logger logger = LoggerFactory .getLogger(WikipediaArticleReader.class); private WikiXMLParser wxp; private BufferedWriter out; private ArticleParser parser; // private JsonRecordParser<Article> encoder; private static ProgressLogger pl = new ProgressLogger("parsed {} articles", 10000); private static Stopwatch sw = new Stopwatch(); /** * Generates a converter from the xml to json dump. * * @param inputFile * - the xml file (compressed) * @param outputFile * - the json output file, containing one article per line (if * the filename ends with <tt>.gz </tt> the output will be * compressed). * * @param lang * - the language of the dump * * */ public WikipediaArticleReader(String inputFile, String outputFile, String lang) { this(new File(inputFile), new File(outputFile), lang); } /** * Generates a converter from the xml to json dump. * * @param inputFile * - the xml file (compressed) * @param outputFile * - the json output file, containing one article per line (if * the filename ends with <tt>.gz </tt> the output will be * compressed). * * @param lang * - the language of the dump * * */ public WikipediaArticleReader(File inputFile, File outputFile, String lang) { JsonConverter handler = new JsonConverter(); // encoder = new JsonRecordParser<Article>(Article.class); parser = new ArticleParser(lang); try { wxp = new WikiXMLParser(inputFile.getAbsolutePath(), handler); } catch (Exception e) { logger.error("creating the parser {}", e.toString()); System.exit(-1); } out = IOUtils.getPlainOrCompressedUTF8Writer(outputFile .getAbsolutePath()); } /** * Starts the parsing */ public void start() throws IOException, SAXException { wxp.parse(); out.close(); logger.info(sw.stat("articles")); } private class JsonConverter implements IArticleFilter { public void process(WikiArticle page, Siteinfo si) { pl.up(); sw.start("articles"); String title = page.getTitle(); String id = page.getId(); String namespace = page.getNamespace(); Integer integerNamespace = page.getIntegerNamespace(); String timestamp = page.getTimeStamp(); Type type = Type.UNKNOWN; if (page.isCategory()) type = Type.CATEGORY; if (page.isTemplate()) { type = Type.TEMPLATE; // FIXME just to go fast; sw.stop("articles"); return; } if (page.isProject()) { type = Type.PROJECT; // FIXME just to go fast; sw.stop("articles"); return; } if (page.isFile()) { type = Type.FILE; // FIXME just to go fast; sw.stop("articles"); return; } if (page.isMain()) type = Type.ARTICLE; Article article = new Article(); article.setTitle(title); article.setWikiId(Integer.parseInt(id)); article.setNamespace(namespace); article.setIntegerNamespace(integerNamespace); article.setTimestamp(timestamp); article.setType(type); parser.parse(article, page.getText()); try { out.write(article.toJson()); out.write("\n"); } catch (IOException e) { logger.error("writing the output file {}", e.toString()); System.exit(-1); } sw.stop("articles"); return; } } }
naveenmadhire/json-wikipedia
src/main/java/it/cnr/isti/hpc/wikipedia/reader/WikipediaArticleReader.java
Java
apache-2.0
5,043
package com.netsun.labuy.utils; import java.io.IOException; import okhttp3.Callback; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.RequestBody; import okhttp3.Response; /** * Created by Administrator on 2017/2/24. */ public class HttpUtils { public static void get(String url, Callback callback) { OkHttpClient client = new OkHttpClient(); Request request = new Request.Builder().url(url).build(); client.newCall(request).enqueue(callback); } public static void post(String url, RequestBody body, Callback callback) { OkHttpClient client = new OkHttpClient(); Request request = new Request.Builder().url(url).post(body).build(); client.newCall(request).enqueue(callback); } public static Response get(String url){ OkHttpClient client = new OkHttpClient(); Request request = new Request.Builder().url(url).build(); try { Response response = client.newCall(request).execute(); return response; } catch (IOException e) { e.printStackTrace(); } return null; } }
yljnet/Labuy
app/src/main/java/com/netsun/labuy/utils/HttpUtils.java
Java
apache-2.0
1,145
#!/usr/bin/python # -*- coding: utf-8 -*- """ crawler.py ~~~~~~~~~~~~~~ A brief description goes here. """ import csv import urllib2 import urllib import re import os import urlparse import threading import logging import logging.handlers import time import random import bs4 MINIMUM_PDF_SIZE = 4506 TASKS = None def create_logger(filename, logger_name=None): logger = logging.getLogger(logger_name or filename) fmt = '[%(asctime)s] %(levelname)s %(message)s' datefmt = "%Y-%m-%d %H:%M:%S" formatter = logging.Formatter(fmt=fmt, datefmt=datefmt) handler = logging.handlers.RotatingFileHandler(filename, maxBytes=1024 * 1024 * 1024, backupCount=10) handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) return logger log = create_logger('crawl.log') class ExceedMaximumRetryError(Exception): def __init__(self, sbid, url): self.sbid = sbid self.url = url def retrieve(url, sbid, output_folder): """Download the PDF or search for the webpage for any PDF link Args: url, assuming the input url is valid """ def _urlfetch(url, sbid, filename=None, retry=10): """ A wrapper for either urlopen or urlretrieve. It depends on the whether there is a filename as input """ if filename and os.path.exists(filename): log.warn("%s\tDUPLICATED\t%s" % (sbid, url)) return None sleep_time = random.random() + 0.5 for i in range(1, retry+1): try: result = None if filename: result = urllib.urlretrieve(url, filename) log.info("%s\tOK\t%s" % (sbid, url)) else: # No log now, because later we would like to ensure # the existance of PDFs result = urllib2.urlopen(url).read() return result except urllib.ContentTooShortError as e: log.warn("%s\tContentTooShortError\t%s\tRetry:%i&Sleep:%.2f" % (sbid, url, i, sleep_time)) time.sleep(sleep_time) except urllib2.HTTPError as e: log.warn("%s\tHTTP%i\t%s\tRetry:%i&Sleep:%.2f\t%s" % (sbid, e.code, url, i, sleep_time, e.reason)) time.sleep(sleep_time) # Sleep longer if it is server error # http://en.wikipedia.org/wiki/Exponential_backoff if e.code / 100 == 5: sleep_time = random.randint(0, 2 ** i - 1) except urllib2.URLError as e: log.warn("%s\tURLError\t%s\tRetry:%i&Sleep:%.2f\t%s" % (sbid, url, i, sleep_time, e.reason)) time.sleep(sleep_time) raise ExceedMaximumRetryError(sbid=sbid, url=url) if url.endswith('.pdf'): #: sbid is not unique, so use sbid+pdfname as new name pdf_name = url.split('/')[-1].split('.')[0] _urlfetch(url, sbid, os.path.join(output_folder, "%s.%s.pdf" % (sbid, pdf_name))) else: page = _urlfetch(url, sbid) soup = bs4.BeautifulSoup(page) anchors = soup.findAll('a', attrs={'href': re.compile(".pdf$", re.I)}) if not anchors: log.warn("%s\tNO_PDF_DETECTED\t%s" % (sbid, url)) return None for a in anchors: href = a['href'] pdf_name = href.split('/')[-1] sub_url = urlparse.urljoin(url, href) _urlfetch(sub_url, sbid, os.path.join(output_folder, "%s.%s" % (sbid, pdf_name))) def get_tasks(csv_filepath): """ Returns: [{'ScienceBaseID': a1b2c3d4, 'webLinks__uri': 'http://balabala'}, {}] """ l = [] with open(csv_filepath, 'r') as f: reader = csv.DictReader(f, delimiter=',', quotechar='"') for row in reader: if 'Action' in row and row['Action'].lower() == 'ignore for now': continue else: l.append(row) return l def get_completed_tasks(output_folder): """ Return downloaded tasks """ completed = set() for f in os.listdir(output_folder): filepath = os.path.join(output_folder, f) with open(filepath, 'r') as ff: head_line = ff.readline() #if os.stat(filepath).st_size > MINIMUM_PDF_SIZE: if head_line.startswith("%PDF"): completed.add(f.split('.')[0]) else: os.remove(filepath) print 'deleted: ', filepath, head_line return completed def crawl(csv_filepath, output_folder='pdfs', exclude_downloaded=False): """main function """ global TASKS TASKS = get_tasks(csv_filepath) excluded = set() if exclude_downloaded: excluded = get_completed_tasks(output_folder) for i in range(128): t = threading.Thread(target=crawler, args=(output_folder, excluded)) t.start() main_thread = threading.current_thread() for t in threading.enumerate(): if t is main_thread: continue t.join() def crawler(output_folder, excluded=set()): """ Thread working function """ finished = 0 print "thread %i has started, exclude %i items" %\ (threading.current_thread().ident, len(excluded)) global TASKS while True: task = None try: task = TASKS.pop() except IndexError: print "thread %i finished %i tasks, exiting for no task available"\ % (threading.current_thread().ident, finished) break try: if not task: break sbid = task['ScienceBaseID'] # some webLinks__uri looks like: # http://www.springerlink.com/content/p543611u8317w447/?p=a0e7243d602f4bd3b33b2089b2ed92e4&pi=5 ; http://www.springerlink.com/content/p543611u8317w447/fulltext.pdf # since both url will redirect to the same url finally, I did not retrieve them twice url = task['webLinks__uri'] if sbid in excluded: continue retrieve(url, sbid, output_folder) finished += 1 if finished % 20 == 0: print "%i has finished %i" % (threading.current_thread().ident, finished) except ExceedMaximumRetryError as e: log.error("%s\tEXCEED_MAXIMUM_RETRY\t%s" % (e.sbid, e.url)) except Exception as e: print e, task log.error("%s\tUNEXPECTED\t%s\t%s" % (sbid, url, e)) def main(argv): print crawl(argv[1], '/scratch/pdfs') if __name__ == '__main__': import sys main(sys.argv)
luozhaoyu/deepdive
crawler.py
Python
apache-2.0
6,748
package Cheiron.Datasource; import java.util.Map; public abstract class Datasource { public abstract Map<String, Map<String, String>> getData() throws Exception; public Object get(String field) throws Exception { return this.getClass().getDeclaredField(field).get(this); } public void set(String field, Object value) throws Exception { this.getClass().getDeclaredField(field).set(this, value); } }
schlusslicht/Cheiron
src/Cheiron/Datasource/Datasource.java
Java
apache-2.0
413
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from functools import partial from typing import Callable, Iterable, Optional, Tuple, Union from absl import logging import numpy as np from jax import core from jax.interpreters import ad from jax.interpreters import partial_eval as pe # TODO(skye): separate pmap into it's own module? from jax.interpreters import mlir from jax.interpreters import pxla from jax.interpreters import xla from jax import linear_util as lu from jax._src import dispatch from jax._src.lib import xla_bridge as xb from jax._src.lib import xla_client as xc from jax._src.lib.mlir import ir from jax._src.lib.mlir.dialects import func as func_dialect from jax._src.api_util import (argnums_partial, flatten_axes, flatten_fun, _ensure_index_tuple) import jax._src.util as util from jax.tree_util import tree_flatten, tree_unflatten from jax._src.util import (new_name_stack, wrap_name, wraps, safe_map, safe_zip, HashableFunction) from jax._src.config import config xops = xc._xla.ops def _map(f, *xs): return tuple(map(f, *xs)) class ResultToPopulate: pass result_to_populate = ResultToPopulate() def _avals_to_results_handler(nrep, npart, partitions, out_avals): handlers = [_aval_to_result_handler(npart, parts, out_aval) for parts, out_aval in safe_zip(partitions, out_avals)] def handler(out_bufs): return [h(bufs) for h, bufs in zip(handlers, out_bufs)] return handler def _aval_to_result_handler(npart, parts, aval): if aval is not core.abstract_unit: spec = pxla.partitioned_sharding_spec(npart, parts, aval) indices = pxla.spec_to_indices(aval.shape, spec) else: spec = indices = None return pxla.local_aval_to_result_handler(aval, spec, indices) @lu.cache def _sharded_callable( fun: lu.WrappedFun, nparts: Optional[int], in_parts: Tuple[pxla.PartitionsOrReplicated, ...], out_parts_thunk: Callable[[], Tuple[pxla.PartitionsOrReplicated, ...]], local_in_parts: Optional[Tuple[pxla.PartitionsOrReplicated, ...]], local_out_parts_thunk: Callable[[], Optional[Tuple[pxla.PartitionsOrReplicated, ...]]], local_nparts: Optional[int], name: str, *abstract_args): nrep = 1 if local_in_parts is None: local_in_parts = in_parts global_abstract_args = [pxla.get_global_aval(arg, parts, lparts) for arg, parts, lparts in safe_zip(abstract_args, in_parts, local_in_parts)] if logging.vlog_is_on(2): logging.vlog(2, "abstract_args: %s", abstract_args) logging.vlog(2, "global_abstract_args: %s", global_abstract_args) logging.vlog(2, "in_parts: %s", in_parts) logging.vlog(2, "local_in_parts: %s", local_in_parts) jaxpr, global_out_avals, consts = pe.trace_to_jaxpr_final(fun, global_abstract_args) platform = xb.get_backend().platform if platform not in ["tpu", "gpu"]: # TODO(skye): fall back to regular jit? raise ValueError(f"sharded_jit not supported for {platform}") nparts = pxla.reconcile_num_partitions(jaxpr, nparts) assert nparts is not None if nparts > xb.device_count(): raise ValueError( f"sharded_jit computation requires {nparts} devices, " f"but only {xb.device_count()} devices are available.") if xb.local_device_count() < nparts < xb.device_count(): raise NotImplementedError( f"sharded_jit across multiple hosts must use all available devices. " f"Got {nparts} out of {xb.device_count()} requested devices " f"(local device count: {xb.local_device_count()})") if local_nparts is None: if nparts > xb.local_device_count(): raise ValueError( "Specify 'local_nparts' when using cross-process sharded_jit " "and all inputs and outputs are replicated.") else: local_nparts = nparts if local_nparts > xb.local_device_count(): raise ValueError( f"sharded_jit computation requires {local_nparts} local devices, " f"but only {xb.local_device_count()} local devices are available.") if logging.vlog_is_on(2): logging.vlog(2, "nparts: %d local_nparts: %d", nparts, local_nparts) out_parts = out_parts_thunk() local_out_parts = local_out_parts_thunk() if local_out_parts is None: local_out_parts = out_parts if logging.vlog_is_on(2): logging.vlog(2, "out_parts: %s", out_parts) logging.vlog(2, "local_out_parts: %s", local_out_parts) local_out_avals = [pxla.get_local_aval(out, parts, lparts) for out, parts, lparts in safe_zip(global_out_avals, out_parts, local_out_parts)] log_priority = logging.WARNING if config.jax_log_compiles else logging.DEBUG logging.log(log_priority, "Compiling %s for %d devices with args %s.", fun.__name__, nparts, global_abstract_args) c = xc.XlaBuilder("spjit_{}".format(fun.__name__)) xla_consts = _map(partial(xla.pyval_to_ir_constant, c), consts) xla_args = _xla_sharded_args(c, global_abstract_args, in_parts) axis_env = xla.AxisEnv(nrep, (), ()) ctx = xla.TranslationContext( c, platform, axis_env, new_name_stack(wrap_name(name, "sharded_jit"))) out_nodes = xla.jaxpr_subcomp(ctx, jaxpr, xla_consts, *xla_args) out_tuple = xla.with_sharding(c, out_parts, xops.Tuple, c, out_nodes) built = c.Build(out_tuple) if nparts <= xb.local_device_count(): devices = xb.local_devices()[:nparts] else: assert nparts == xb.device_count() devices = xb.devices() device_assignment = np.array([[d for d in devices]]) device_assignment = np.reshape(device_assignment, (-1, nparts)) # device_assignment = None # TODO(skye): replace with default device assignment? compiled = dispatch.backend_compile( xb.get_backend(), built, xb.get_compile_options(nrep, nparts, device_assignment)) input_specs = [ pxla.partitioned_sharding_spec(local_nparts, parts, aval) for parts, aval in zip(local_in_parts, abstract_args)] input_indices = [pxla.spec_to_indices(aval.shape, spec) if spec is not None else None for aval, spec in zip(abstract_args, input_specs)] handle_args = partial(pxla.shard_args, compiled.local_devices(), input_indices) handle_outs = _avals_to_results_handler(nrep, local_nparts, # type: ignore local_out_parts, local_out_avals) return partial(_execute_spatially_partitioned, compiled, handle_args, handle_outs) def _sharded_jit_translation_rule(ctx, avals_in, avals_out, *in_nodes, in_parts, out_parts_thunk, nparts, name, call_jaxpr, local_in_parts, local_out_parts_thunk, local_nparts): subc = xc.XlaBuilder(f"sharded_jit_{name}") # We assume any extra leading in_nodes are constants and replicate them. num_extra_nodes = len(in_nodes) - len(in_parts) assert num_extra_nodes >= 0 in_parts = (None,) * num_extra_nodes + in_parts args = [] for i, (n, sharding) in enumerate(safe_zip(in_nodes, in_parts)): # We use xla.set_sharding instead of xla.with_sharding because inlined calls # shouldn't have shardings set directly on the inputs or outputs. arg = xla.parameter(subc, i, ctx.builder.GetShape(n)) args.append(xla.set_sharding(subc, arg, sharding)) sub_ctx = ctx.replace( builder=subc, name_stack=new_name_stack(wrap_name(name, "sharded_jit"))) out_nodes = xla.jaxpr_subcomp(sub_ctx, call_jaxpr, (), *args) out_parts = out_parts_thunk() assert len(out_parts) == len(out_nodes) out_nodes = [xla.set_sharding(subc, out, sharding) for out, sharding in safe_zip(out_nodes, out_parts)] subc = subc.build(xops.Tuple(subc, out_nodes)) return xla.xla_destructure(ctx.builder, xops.Call(ctx.builder, subc, list(in_nodes))) def _sharded_jit_lowering(ctx, *in_nodes, in_parts, out_parts_thunk, nparts, name, call_jaxpr, local_in_parts, local_out_parts_thunk, local_nparts): # We assume any extra leading in_nodes are constants and replicate them. num_extra_nodes = len(in_nodes) - len(in_parts) assert num_extra_nodes >= 0 in_parts = (None,) * num_extra_nodes + in_parts args = [] for ns, sharding in safe_zip( safe_map(mlir.wrap_singleton_ir_values, in_nodes), in_parts): if sharding is not None: args.append( [mlir.wrap_with_sharding_op(n, xla.sharding_to_proto(sharding)) for n in ns]) else: args.append(ns) sub_ctx = ctx.module_context.replace( name_stack=new_name_stack(wrap_name(name, "sharded_jit"))) fn = mlir.lower_jaxpr_to_fun(sub_ctx, f"sharded_jit_{name}", core.ClosedJaxpr(call_jaxpr, ())) output_types = safe_map(mlir.aval_to_ir_types, ctx.avals_out) flat_output_types = util.flatten(output_types) call = func_dialect.CallOp(flat_output_types, ir.FlatSymbolRefAttr.get(fn.name.value), mlir.flatten_lowering_ir_args(args)) out_nodes = util.unflatten(call.results, safe_map(len, output_types)) out_parts = out_parts_thunk() outputs = [] for ns, sharding in safe_zip(out_nodes, out_parts): if sharding is not None: outputs.append( [mlir.wrap_with_sharding_op(n, xla.sharding_to_proto(sharding)) for n in ns]) else: outputs.append(ns) return outputs def _execute_spatially_partitioned(compiled, in_handler, out_handler, *args): input_bufs = in_handler(args) out_bufs = compiled.execute_sharded_on_local_devices(input_bufs) return out_handler(out_bufs) def _xla_sharded_args(c, avals, in_parts): xla_args = [] for i, (sharding, aval) in enumerate(safe_zip(in_parts, avals)): param = xla.with_sharding(c, sharding, xla.parameter, c, i, *xla.aval_to_xla_shapes(aval)) xla_args.append(param) return xla_args def _sharded_call_impl(fun, *args, nparts, in_parts, out_parts_thunk, local_in_parts, local_out_parts_thunk, local_nparts, name): compiled_fun = _sharded_callable(fun, nparts, in_parts, out_parts_thunk, local_in_parts, local_out_parts_thunk, local_nparts, name, *map(xla.abstractify, args)) return compiled_fun(*args) sharded_call_p = core.CallPrimitive("sharded_call") sharded_call = sharded_call_p.bind sharded_call_p.def_impl(_sharded_call_impl) xla.register_translation(sharded_call_p, _sharded_jit_translation_rule) mlir.register_lowering(sharded_call_p, _sharded_jit_lowering) class _UnconstrainedPartitionSingleton: def __str__(self): return "UNCONSTRAINED" # Unconstrained sentinel value for PartitionSpec, representing a dimension for # which the user wants XLA to assign the best partitioning. # TODO(yashkatariya): May rename to AUTO. _UNCONSTRAINED_PARTITION = _UnconstrainedPartitionSingleton() class PartitionSpec(tuple): """Tuple of integer specifying how a value should be partitioned. Each integer corresponds to how many ways a dimension is partitioned. We create a separate class for this so JAX's pytree utilities can distinguish it from a tuple that should be treated as a pytree. """ def __new__(cls, *partitions): return tuple.__new__(PartitionSpec, partitions) def __repr__(self): return "PartitionSpec%s" % tuple.__repr__(self) """A sentinel value representing a dim is unconstrained.""" UNCONSTRAINED = _UNCONSTRAINED_PARTITION def sharded_jit( fun: Callable, in_parts, out_parts, num_partitions: Optional[int] = None, local_in_parts=None, local_out_parts=None, local_num_partitions=None, static_argnums: Union[int, Iterable[int]] = (), ): """Like ``jit``, but partitions ``fun`` across multiple devices. WARNING: this feature is still under active development! It may not work well, and may change without warning! `sharded_jit` sets up ``fun`` for just-in-time compilation with XLA, but unlike ``jit``, the compiled function will run across multiple devices (e.g. multiple GPUs or multiple TPU cores). This is achieved by spatially partitioning the data that flows through the computation, so each operation is run across all devices and each device runs only a shard of the full data. (Some data can optionally be replicated, which is sometimes more efficient for small arrays when combined with larger spatially-partitioned arrays.) Communication between devices is automatically inserted as necessary. ``sharded_jit`` can be useful if the jitted version of ``fun`` would not fit in a single device's memory, or to speed up ``fun`` by running each operation in parallel across multiple devices. Note: ``sharded_jit`` is currently available on TPU only! Args: fun: Function to be jitted. in_parts: Specifications for how each argument to ``fun`` should be partitioned or replicated. This should be a PartitionSpec indicating into how many partitions each dimension should be sharded, ``None`` indicating replication, or (nested) standard Python containers thereof. For example, ``in_parts=PartitionSpec(2,1)`` means all arguments should be partitioned over two devices across the first dimension; ``in_parts=(PartitionSpec(2,2), PartitionSpec(4,1), None)`` means the first argument should be partitioned over four devices by splitting both of its dimensions in half, the second argument should be partitioned over the four devices across the first dimension, and the third argument is replicated across the four devices. All PartitionSpecs in a given ``sharded_jit`` call must correspond to the same total number of partitions, i.e. the product of all PartitionSpecs must be equal, and the number of dimensions in the PartitionSpec corresponding to an array ``a`` should equal ``a.ndim``. Arguments marked as static using ``static_argnums`` (see below) do not require a PartitionSpec. out_parts: The output partitions, i.e. how each output of ``fun`` should be partitioned or replicated. This follows the same convention as ``in_parts``. num_partitions: Optional. If set, explicitly specifies the number of devices ``fun`` should partitioned across (rather than inferring it from ``in_parts``, ``out_parts``, and/or any ``with_sharding_constraint`` calls). Setting this should usually be unnecessary, but can be used to maintain device persistence across multiple sharded_jit calls when some of those calls only involve replicated values. local_in_parts: Optional. This should be set when partitioning across multiple processes, and says how each process's worth of data should be partitioned (vs. in_parts which is the "global" partitioning across all processes). This API is likely to change in the future. local_out_parts: Optional. This should be set when partitioning across multiple processes, and says how each process's worth of data should be partitioned (vs. out_parts which is the "global" partitioning across all processes). This API is likely to change in the future. local_num_partitions: Optional. Explicitly specifies the numbers of local devices to partitions across in a multi-process setting. This API is likely to change in the future. static_argnums: An int or collection of ints specifying which positional arguments to treat as static (compile-time constant). Operations that only depend on static arguments will be constant-folded. Calling the jitted function with different values for these constants will trigger recompilation. If the jitted function is called with fewer positional arguments than indicated by ``static_argnums`` then an error is raised. Each of the static arguments will be broadcasted to all devices, and cannot be partitioned - these arguments will be removed from the *args list before matching each remaining argument with its corresponding PartitionSpec. Arguments that are not arrays or containers thereof must be marked as static. Defaults to ``()``. Returns: A version of ``fun`` that will be distributed across multiple devices. """ if num_partitions is not None: nparts = num_partitions else: nparts = pxla.get_num_partitions(in_parts, out_parts) if local_num_partitions is not None: local_nparts = local_num_partitions else: local_nparts = pxla.get_num_partitions(local_in_parts, local_out_parts) static_argnums = _ensure_index_tuple(static_argnums) @wraps(fun) def wrapped(*args, **kwargs): if kwargs: raise NotImplementedError("sharded_jit over kwargs not yet supported") f = lu.wrap_init(fun) if static_argnums: if max(static_argnums) >= len(args): raise ValueError( f"jitted function has static_argnums={static_argnums}" f" but was called with only {len(args)} positional " f"argument{'s' if len(args) > 1 else ''}. " "All static broadcasted arguments must be passed positionally.") dyn_argnums = [i for i in range(len(args)) if i not in static_argnums] f, args = argnums_partial(f, dyn_argnums, args) args_flat, in_tree = tree_flatten((args, kwargs)) in_parts_flat = tuple(flatten_axes("sharded_jit in_parts", in_tree.children()[0], in_parts)) if local_in_parts is not None: local_in_parts_flat = tuple(flatten_axes("sharded_jit local_in_parts", in_tree.children()[0], local_in_parts)) else: local_in_parts_flat = None flat_fun, out_tree = flatten_fun(f, in_tree) # TODO(skye): having a function-typed param in a primitive seems dicey, is # there a better way? out_parts_thunk = HashableFunction( lambda: tuple(flatten_axes("sharded_jit out_parts", out_tree(), out_parts)), closure=out_parts) if local_out_parts: local_out_parts_thunk = HashableFunction( lambda: tuple(flatten_axes("sharded_jit local_out_parts", out_tree(), local_out_parts)), closure=local_out_parts) else: local_out_parts_thunk = HashableFunction(lambda: None, closure=None) out = sharded_call( flat_fun, *args_flat, nparts=nparts, in_parts=in_parts_flat, out_parts_thunk=out_parts_thunk, local_in_parts=local_in_parts_flat, local_out_parts_thunk=local_out_parts_thunk, local_nparts=local_nparts, name=flat_fun.__name__) return tree_unflatten(out_tree(), out) return wrapped def _sharding_constraint_impl(x, partitions): # TODO(skye): can we also prevent this from being called in other # non-sharded_jit contexts? (e.g. pmap, control flow) raise NotImplementedError( "with_sharding_constraint() should only be called inside sharded_jit()") def _sharding_constraint_translation_rule(ctx, avals_in, avals_out, x_node, partitions): return [xla.set_sharding(ctx.builder, x_node, partitions)] sharding_constraint_p = core.Primitive("sharding_constraint") sharding_constraint_p.def_impl(_sharding_constraint_impl) sharding_constraint_p.def_abstract_eval(lambda x, partitions: x) ad.deflinear2(sharding_constraint_p, lambda ct, _, partitions: (with_sharding_constraint(ct, partitions),)) xla.register_translation(sharding_constraint_p, _sharding_constraint_translation_rule) def _sharding_constraint_lowering(ctx, x_node, partitions): return [mlir.wrap_with_sharding_op(x_node, xla.sharding_to_proto(partitions))] mlir.register_lowering(sharding_constraint_p, _sharding_constraint_lowering) def with_sharding_constraint(x, partitions: Optional[PartitionSpec]): """Identity-like function that specifies how ``x`` should be sharded. WARNING: this feature is still under active development! It may not work well, and may change without warning! This should only be called inside a function transformed by ``sharded_jit``. It constrains how the function is sharded: regardless of any other specified partitions, the compiler will make sure that ``x`` is sharded according to ``partitions``. Note that a ``with_sharding_constraint`` call doesn't necessarily correspond to a reshard, since the compiler is free to achieve this sharding as long as the constraint is met, e.g. it might insert a reshard earlier in the computation. Another way to think of this is that the ``with_sharding_constraint`` call may flow "up" the function to preceding operations as well as "down" to subsequent ones. ``partitions`` must correspond to the same number of total partitions dictated by the outer ``sharded_jit`` and any other ``with_sharding_constraint`` calls. In the case where only replication has been specified, any ``partitions`` are valid. Example usage: @partial(sharded_jit, in_parts=None, out_parts=None, num_shards=2 def f(x): y = x + 1 y = with_sharding_constraint(y, PartitionSpec(2,1)) return y * 2 In this example, the inputs and outputs of ``f`` will be replicated, but the inner value of ``y`` will be partitioned in half. ``f`` will run on two devices due to the with_sharding_constraint call. Args: x: Array value partitions: PartitionSpec indicating how ``x`` should be partitioned, or None for replication. Returns: A new version of ``x`` with the specified sharding applied. """ return sharding_constraint_p.bind(x, partitions=partitions)
google/jax
jax/interpreters/sharded_jit.py
Python
apache-2.0
22,527
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0, (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tle.web.sections.jquery.libraries; import com.tle.common.i18n.CurrentLocale; import com.tle.core.javascript.JavascriptModule; import com.tle.web.sections.jquery.JQueryLibraryInclude; import com.tle.web.sections.render.PreRenderable; @SuppressWarnings("nls") public class JQueryDroppable implements JavascriptModule { private static final long serialVersionUID = 1L; public static final PreRenderable PRERENDER = new JQueryLibraryInclude( "jquery.ui.droppable.js", JQueryUICore.PRERENDER, JQueryMouse.PRERENDER, JQueryUIWidget.PRERENDER, JQueryDraggable.PRERENDER) .hasMin(); @Override public String getDisplayName() { return CurrentLocale.get("com.tle.web.sections.jquery.modules.droppable.name"); } @Override public String getId() { return "droppable"; } @Override public Object getPreRenderer() { return PRERENDER; } }
equella/Equella
Source/Plugins/Core/com.equella.core/src/com/tle/web/sections/jquery/libraries/JQueryDroppable.java
Java
apache-2.0
1,764
package br.com.fuelclub.entity; import java.util.List; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.ManyToMany; import javax.persistence.Table; @Entity @Table (name = "Dias_da_Semana") public class Dias_Da_Semana { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long diasDaSemana_id; private String diasDaSemana_descricao; @ManyToMany private List<PostoCombustivel> postos; public Long getDiasDaSemana_id() { return diasDaSemana_id; } public void setDiasDaSemana_id(Long diasDaSemana_id) { this.diasDaSemana_id = diasDaSemana_id; } public String getDiasDaSemana_descricao() { return diasDaSemana_descricao; } public void setDiasDaSemana_descricao(String diasDaSemana_descricao) { this.diasDaSemana_descricao = diasDaSemana_descricao; } public List<PostoCombustivel> getPostos() { return postos; } public void setPostos(List<PostoCombustivel> postos) { this.postos = postos; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((diasDaSemana_descricao == null) ? 0 : diasDaSemana_descricao.hashCode()); result = prime * result + ((diasDaSemana_id == null) ? 0 : diasDaSemana_id.hashCode()); result = prime * result + ((postos == null) ? 0 : postos.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Dias_Da_Semana other = (Dias_Da_Semana) obj; if (diasDaSemana_descricao == null) { if (other.diasDaSemana_descricao != null) return false; } else if (!diasDaSemana_descricao.equals(other.diasDaSemana_descricao)) return false; if (diasDaSemana_id == null) { if (other.diasDaSemana_id != null) return false; } else if (!diasDaSemana_id.equals(other.diasDaSemana_id)) return false; if (postos == null) { if (other.postos != null) return false; } else if (!postos.equals(other.postos)) return false; return true; } @Override public String toString() { return "Dias_Da_Semana [diasDaSemana_descricao=" + diasDaSemana_descricao + "]"; } public Dias_Da_Semana(Long diasDaSemana_id, String diasDaSemana_descricao, List<PostoCombustivel> postos) { super(); this.diasDaSemana_id = diasDaSemana_id; this.diasDaSemana_descricao = diasDaSemana_descricao; this.postos = postos; } public Dias_Da_Semana() { super(); // TODO Auto-generated constructor stub } }
lucasgrittem/AppsCwb
FuelClubWeb/src/br/com/fuelclub/entity/Dias_Da_Semana.java
Java
apache-2.0
2,617
/* * Copyright 2006-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.consol.citrus.validation.json; import com.consol.citrus.context.TestContext; import com.consol.citrus.exceptions.CitrusRuntimeException; import com.consol.citrus.exceptions.UnknownElementException; import com.consol.citrus.message.Message; import com.consol.citrus.message.MessageType; import com.consol.citrus.validation.interceptor.AbstractMessageConstructionInterceptor; import com.jayway.jsonpath.*; import net.minidev.json.parser.JSONParser; import net.minidev.json.parser.ParseException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.util.StringUtils; import java.util.HashMap; import java.util.Map; /** * @author Christoph Deppisch * @since 2.3 */ public class JsonPathMessageConstructionInterceptor extends AbstractMessageConstructionInterceptor { /** Logger */ private static Logger log = LoggerFactory.getLogger(JsonPathMessageConstructionInterceptor.class); /** Overwrites message elements before validating (via JSONPath expressions) */ private Map<String, String> jsonPathExpressions = new HashMap<>(); /** * Default constructor. */ public JsonPathMessageConstructionInterceptor() { super(); } /** * Default constructor using fields. * @param jsonPathExpressions */ public JsonPathMessageConstructionInterceptor(Map<String, String> jsonPathExpressions) { super(); this.jsonPathExpressions = jsonPathExpressions; } /** * Intercept the message payload construction and replace elements identified * via XPath expressions. * * Method parses the message payload to DOM document representation, therefore message payload * needs to be XML here. */ @Override public Message interceptMessage(Message message, String messageType, TestContext context) { if (message.getPayload() == null || !StringUtils.hasText(message.getPayload(String.class))) { return message; } String jsonPathExpression = null; try { JSONParser parser = new JSONParser(JSONParser.MODE_JSON_SIMPLE); Object jsonData = parser.parse(message.getPayload(String.class)); DocumentContext documentContext = JsonPath.parse(jsonData); for (Map.Entry<String, String> entry : jsonPathExpressions.entrySet()) { jsonPathExpression = entry.getKey(); String valueExpression = context.replaceDynamicContentInString(entry.getValue()); documentContext.set(jsonPathExpression, valueExpression); if (log.isDebugEnabled()) { log.debug("Element " + jsonPathExpression + " was set to value: " + valueExpression); } } message.setPayload(jsonData.toString()); } catch (ParseException e) { throw new CitrusRuntimeException("Failed to parse JSON text", e); } catch (PathNotFoundException e) { throw new UnknownElementException(String.format("Could not find element for expression: %s", jsonPathExpression), e); } return message; } @Override public boolean supportsMessageType(String messageType) { return MessageType.JSON.toString().equalsIgnoreCase(messageType); } public void setJsonPathExpressions(Map<String, String> jsonPathExpressions) { this.jsonPathExpressions = jsonPathExpressions; } public Map<String, String> getJsonPathExpressions() { return jsonPathExpressions; } }
hmmlopez/citrus
modules/citrus-core/src/main/java/com/consol/citrus/validation/json/JsonPathMessageConstructionInterceptor.java
Java
apache-2.0
4,178
<?php function initialize_curl() { $ch = curl_init(); curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1); curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, false); curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false); curl_setopt($ch, CURLOPT_VERBOSE, true); return $ch; } /* function getVIMSFac () { $content = file('VIMS_facilities_codes.csv'); $rows = array_map('str_getcsv', $content,array_fill(0, count($content), ",")); return $rows; } */ function getVIMSFac () { $username = ""; $password = ""; $ch = initialize_curl(); curl_setopt($ch, CURLOPT_USERPWD, $username . ":" . $password); $url = 'https://vimstraining.elmis-dev.org:443/rest-api/facilities/'.$code; curl_setopt($ch, CURLOPT_URL, $url); $details = curl_exec($ch); $details = json_decode($details,true); return $details; } function createCSDFacility($facDetails) { $name = $facDetails['facility']['name']; $code = $facDetails['facility']['code']; $type = $facDetails['facility']['facilityType']; $geographicZone = $facDetails['facility']['geographicZone']; $gln = $facDetails['facility']['gln']; $id = $facDetails['facility']['id']; $post = '<csd:requestParams xmlns:csd="urn:ihe:iti:csd:2013"> <csd:facility id="'.$id.'"> <csd:code>'.$code.'</csd:code> <csd:name>'.$name.'</csd:name> <csd:district>'.$geographicZone.'</csd:district> <csd:gln>'.$gln.'</csd:gln> <csd:ftype>'.$type.'</csd:ftype> </csd:facility> </csd:requestParams>'; $ch = initialize_curl(); $url = "http://localhost:8984/CSD/csr/vims/careServicesRequest/update/urn:openhie.org:openinfoman-tz:facility_create_vims_tz"; curl_setopt($ch, CURLOPT_POST, true); curl_setopt($ch, CURLOPT_POSTFIELDS,$post); curl_setopt($ch, CURLOPT_HTTPHEADER, Array("Content-Type: text/xml")); curl_setopt($ch, CURLOPT_URL, $url); $data = curl_exec($ch); } function getDetails ($code) { $username = ""; $password = ""; $ch = initialize_curl(); curl_setopt($ch, CURLOPT_USERPWD, $username . ":" . $password); $url = 'https://vimstraining.elmis-dev.org:443/rest-api/facilities/'.$code; curl_setopt($ch, CURLOPT_URL, $url); $details = curl_exec($ch); $details = json_decode($details,true); return $details; } $facilities = getVIMSFac(); foreach ($facilities as $key => $facility) { echo "processing =====>".$key; $facDet = getDetails ($facility['0']); if(!array_key_exists("facility",$facDet)) { echo $key; print_r($facility); exit; } createCSDFacility($facDet); } ?>
openhie/openinfoman-tz
resources/scripts/fetch_VIMS_facilities.php
PHP
apache-2.0
2,484
package org.consumersunion.stories.dashboard.client.application.ui.block.configurator; import org.consumersunion.stories.common.client.util.Callback; import org.consumersunion.stories.dashboard.client.application.ui.block.HasValidation; import com.google.gwt.editor.client.Editor; /** * Interface for the various {@link org.consumersunion.stories.common.shared.model.questionnaire.Block} 'configurators' * used to specify parameters specific to each Block 'type' (e.g., text, * multi-select, etc.). */ public interface BlockConfigurator<T> extends Editor<T>, HasValidation<T> { void setDoneCallback(Callback<T> doneCallback); T getEditedValue(); }
stori-es/stori_es
dashboard/src/main/java/org/consumersunion/stories/dashboard/client/application/ui/block/configurator/BlockConfigurator.java
Java
apache-2.0
664
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.config.http; import java.util.Map; import org.springframework.beans.BeansException; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.HierarchicalBeanFactory; import org.springframework.beans.factory.ListableBeanFactory; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; import org.springframework.context.ApplicationContextException; import org.springframework.security.authentication.CachingUserDetailsService; import org.springframework.security.config.authentication.AbstractUserDetailsServiceBeanDefinitionParser; import org.springframework.security.core.userdetails.AuthenticationUserDetailsService; import org.springframework.security.core.userdetails.UserDetailsByNameServiceWrapper; import org.springframework.security.core.userdetails.UserDetailsService; import org.springframework.util.StringUtils; /** * Bean used to lookup a named UserDetailsService or AuthenticationUserDetailsService. * * @author Luke Taylor * @since 3.1 */ public class UserDetailsServiceFactoryBean implements ApplicationContextAware { private ApplicationContext beanFactory; UserDetailsService userDetailsService(String id) { if (!StringUtils.hasText(id)) { return getUserDetailsService(); } return (UserDetailsService) this.beanFactory.getBean(id); } UserDetailsService cachingUserDetailsService(String id) { if (!StringUtils.hasText(id)) { return getUserDetailsService(); } // Overwrite with the caching version if available String cachingId = id + AbstractUserDetailsServiceBeanDefinitionParser.CACHING_SUFFIX; if (this.beanFactory.containsBeanDefinition(cachingId)) { return (UserDetailsService) this.beanFactory.getBean(cachingId); } return (UserDetailsService) this.beanFactory.getBean(id); } @SuppressWarnings("unchecked") AuthenticationUserDetailsService authenticationUserDetailsService(String name) { UserDetailsService uds; if (!StringUtils.hasText(name)) { Map<String, ?> beans = getBeansOfType(AuthenticationUserDetailsService.class); if (!beans.isEmpty()) { if (beans.size() > 1) { throw new ApplicationContextException("More than one AuthenticationUserDetailsService registered." + " Please use a specific Id reference."); } return (AuthenticationUserDetailsService) beans.values().toArray()[0]; } uds = getUserDetailsService(); } else { Object bean = this.beanFactory.getBean(name); if (bean instanceof AuthenticationUserDetailsService) { return (AuthenticationUserDetailsService) bean; } else if (bean instanceof UserDetailsService) { uds = cachingUserDetailsService(name); if (uds == null) { uds = (UserDetailsService) bean; } } else { throw new ApplicationContextException( "Bean '" + name + "' must be a UserDetailsService or an" + " AuthenticationUserDetailsService"); } } return new UserDetailsByNameServiceWrapper(uds); } /** * Obtains a user details service for use in RememberMeServices etc. Will return a * caching version if available so should not be used for beans which need to separate * the two. */ private UserDetailsService getUserDetailsService() { Map<String, ?> beans = getBeansOfType(CachingUserDetailsService.class); if (beans.size() == 0) { beans = getBeansOfType(UserDetailsService.class); } if (beans.size() == 0) { throw new ApplicationContextException("No UserDetailsService registered."); } if (beans.size() > 1) { throw new ApplicationContextException("More than one UserDetailsService registered. Please " + "use a specific Id reference in <remember-me/> or <x509 /> elements."); } return (UserDetailsService) beans.values().toArray()[0]; } @Override public void setApplicationContext(ApplicationContext beanFactory) throws BeansException { this.beanFactory = beanFactory; } private Map<String, ?> getBeansOfType(Class<?> type) { Map<String, ?> beans = this.beanFactory.getBeansOfType(type); // Check ancestor bean factories if they exist and the current one has none of the // required type BeanFactory parent = this.beanFactory.getParentBeanFactory(); while (parent != null && beans.size() == 0) { if (parent instanceof ListableBeanFactory) { beans = ((ListableBeanFactory) parent).getBeansOfType(type); } if (parent instanceof HierarchicalBeanFactory) { parent = ((HierarchicalBeanFactory) parent).getParentBeanFactory(); } else { break; } } return beans; } }
spring-projects/spring-security
config/src/main/java/org/springframework/security/config/http/UserDetailsServiceFactoryBean.java
Java
apache-2.0
5,210
/* * Copyright 2011 Henri Kerola * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.vaadin.gwtgraphics.client; /** * Image represents a raster image that can be embedded into DrawingArea. * * @author Henri Kerola * */ public class Image extends AbstractDrawing implements Sizeable, Positionable, Animatable { /** * Create a new Image with the given properties. * * @param x * the x-coordinate position of the top-left corner of the image * in pixels * @param y * the y-coordinate position of the top-left corner of the image * in pixels * @param width * the width of the image in pixels * @param height * the height of the image in pixels * @param href * URL to an image to be shown. */ public Image(int x, int y, int width, int height, String href) { setX(x); setY(y); setWidth(width); setHeight(height); setHref(href); } @Override public Class<? extends Drawing> getType() { return Image.class; } @Override public int getX() { return getImpl().getX(getElement()); } @Override public void setX(int x) { getImpl().setX(getElement(), x, isAttached()); } @Override public int getY() { return getImpl().getY(getElement()); } @Override public void setY(int y) { getImpl().setY(getElement(), y, isAttached()); } /** * Returns the URL of the image currently shown. * * @return URL of the image */ public String getHref() { return getImpl().getImageHref(getElement()); } /** * Sets the URL of the image to be shown. * * @param href * URL of the image to be shown */ public void setHref(String href) { getImpl().setImageHref(getElement(), href); } /** * Returns the width of the Image in pixels. * * @return the width of the Image in pixels */ @Override public int getWidth() { return getImpl().getWidth(getElement()); } /** * Sets the width of the Image in pixels. * * @param width * the new width in pixels */ @Override public void setWidth(int width) { getImpl().setWidth(getElement(), width); } @Override public void setWidth(String width) { boolean successful = false; if (width != null && width.endsWith("px")) { try { setWidth(Integer.parseInt(width.substring(0, width.length() - 2))); successful = true; } catch (NumberFormatException e) { } } if (!successful) { throw new IllegalArgumentException("Only pixel units (px) are supported"); } } /** * Returns the height of the Image in pixels. * * @return the height of the Image in pixels */ @Override public int getHeight() { return getImpl().getHeight(getElement()); } /** * Sets the height of the Image in pixels. * * @param height * the new height in pixels */ @Override public void setHeight(int height) { getImpl().setHeight(getElement(), height); } @Override public void setHeight(String height) { boolean successful = false; if (height != null && height.endsWith("px")) { try { setHeight(Integer.parseInt(height.substring(0, height.length() - 2))); successful = true; } catch (NumberFormatException e) { } } if (!successful) { throw new IllegalArgumentException("Only pixel units (px) are supported"); } } @Override public void setPropertyDouble(String property, double value) { property = property.toLowerCase(); if ("x".equals(property)) { setX((int) value); } else if ("y".equals(property)) { setY((int) value); } else if ("width".equals(property)) { setWidth((int) value); } else if ("height".equals(property)) { setHeight((int) value); } else if ("rotation".equals(property)) { setRotation((int) value); } } }
tilioteo/vmaps
src/main/java/org/vaadin/gwtgraphics/client/Image.java
Java
apache-2.0
4,289
package eu.newsreader.eventcoreference.output; import eu.newsreader.eventcoreference.input.CorefSaxParser; import eu.newsreader.eventcoreference.objects.CoRefSetAgata; import eu.newsreader.eventcoreference.objects.CorefTargetAgata; import eu.newsreader.eventcoreference.objects.Triple; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.Set; /** * Created with IntelliJ IDEA. * User: kyoto * Date: 10/11/13 * Time: 11:17 AM * To change this template use File | Settings | File Templates. */ public class CorefSetToSem { static public void main (String[] args) { boolean STOP = false; int sentenceRange = 0; String eventFilePath = "/Users/kyoto/Desktop/Events/ECB/ECBcorpus_StanfordAnnotation/EECB1.0/results-1/lemma-cross-document-in-topic/" + "eecb-events-kyoto-first-n-v-token-3.xml.sim.word-baseline.0.coref.xml"; String participantFilePath = "/Users/kyoto/Desktop/Events/ECB/ECBcorpus_StanfordAnnotation/EECB1.0/results-1/lemma-cross-document-in-topic/" + "participants-30-july-2013.xml.sim.word-baseline.0.coref.xml"; String locationFilePath = "/Users/kyoto/Desktop/Events/ECB/ECBcorpus_StanfordAnnotation/EECB1.0/results-1/lemma-cross-document-in-topic/" + "Location-26-jul-2013.xml.sim.word-baseline.0.coref.xml"; String timeFilePath = "/Users/kyoto/Desktop/Events/ECB/ECBcorpus_StanfordAnnotation/EECB1.0/results-1/lemma-cross-document-in-topic/" + "Time-26-jul-2013.xml.sim.word-baseline.0.coref.xml"; String componentId = "."; String outputlabel = "test"; for (int i = 0; i < args.length; i++) { String arg = args[i]; if (arg.equals("--event") && (args.length>i)) { eventFilePath = args[i+1]; componentId+="e"; } else if (arg.equals("--range") && (args.length>i)) { try { sentenceRange = Integer.parseInt(args[i+1]); } catch (NumberFormatException e) { e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } } else if (arg.equals("--participant") && (args.length>i)) { participantFilePath = args[i+1]; componentId+="p"; } else if (arg.equals("--time") && (args.length>i)) { timeFilePath = args[i+1]; componentId+="t"; } else if (arg.equals("--location") && (args.length>i)) { locationFilePath = args[i+1]; componentId+="l"; } else if (arg.equals("--label") && (args.length>i)) { outputlabel = args[i+1]; } } if (eventFilePath.isEmpty()) { System.out.println("Missing argument --event <path to coreference events file"); STOP = true; } if (participantFilePath.isEmpty()) { System.out.println("Missing argument --participant <path to coreference participants file"); } if (timeFilePath.isEmpty()) { System.out.println("Missing argument --time <path to coreference time file"); } if (locationFilePath.isEmpty()) { System.out.println("Missing argument --location <path to coreference location file"); } if (!STOP) { try { // String outputFilePath = eventFilePath+componentId+".sentenceRange."+sentenceRange+"."+outputlabel+"-semevent.xml"; String outputFilePath = new File(eventFilePath).getParent()+"/"+outputlabel+".sentenceRange."+sentenceRange+"."+"semevent.xml"; FileOutputStream fos = new FileOutputStream(outputFilePath); String str ="<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n"; str += "<SEM>"+"\n"; fos.write(str.getBytes()); // we read the events and their components using a coreference parser // the parser builds up a HashMap with file identifiers as keys and arraylists with elements as data within that file CorefSaxParser events = new CorefSaxParser(); events.parseFile(eventFilePath); CorefSaxParser participants = new CorefSaxParser(); if (new File(participantFilePath).exists()) { participants.parseFile(participantFilePath); } CorefSaxParser times = new CorefSaxParser(); if (new File(timeFilePath).exists()) { times.parseFile(timeFilePath); } CorefSaxParser locations = new CorefSaxParser(); if (new File(locationFilePath).exists()) { locations.parseFile(locationFilePath); } /// we first iterate over the map with file identifiers and the event coref maps Set keySet = events.corefMap.keySet(); Iterator keys = keySet.iterator(); while (keys.hasNext()) { String key = (String) keys.next(); /// keys are file identifiers // We now get the components for the key (= particular file identifier), so just for one file ArrayList<CoRefSetAgata> coRefSetsEventAgatas = events.corefMap.get(key); ArrayList<CoRefSetAgata> participantSets = participants.corefMap.get(key); ArrayList<CoRefSetAgata> timeSets = times.corefMap.get(key); ArrayList<CoRefSetAgata> locationSets = locations.corefMap.get(key); /// we create the initial output string str = "<topic name=\""+key+"\" eventCount=\""+ coRefSetsEventAgatas.size()+"\""; str += " participantCount=\""; if (participantSets!=null) { str += participantSets.size()+"\""; } else { str += "0\""; } str += " timeCount=\""; if (timeSets!=null) { str += timeSets.size()+"\""; } else { str += "0\""; } str += " locationCount=\""; if (locationSets!=null) { str += locationSets.size()+"\""; } else { str += "0\""; } str += ">\n"; fos.write(str.getBytes()); if (coRefSetsEventAgatas !=null) { /// we iterate over the events of a single file str = "<semEvents>\n"; fos.write(str.getBytes()); for (int i = 0; i < coRefSetsEventAgatas.size(); i++) { CoRefSetAgata coRefSetAgata = coRefSetsEventAgatas.get(i); str = "\t<semEvent id=\""+key+"/e"+ coRefSetAgata.getId()+"\" lcs=\""+ coRefSetAgata.getLcs()+"\" score=\""+ coRefSetAgata.getScore()+"\" synset=\""+ coRefSetAgata.getMostFrequentSynset()+"\" label=\""+ coRefSetAgata.getMostFrequentLemma()+"\" mentions=\""+ coRefSetAgata.getTargets().size()+"\">\n"; fos.write(str.getBytes()); for (int j = 0; j < coRefSetAgata.getTargets().size(); j++) { CorefTargetAgata eventTarget = coRefSetAgata.getTargets().get(j); str = "\t"+eventTarget.toString(); fos.write(str.getBytes()); } str = "\t</semEvent>\n"; fos.write(str.getBytes()); } str = "</semEvents>\n"; fos.write(str.getBytes()); } if (participantSets!=null) { /// we iterate over the participants of a single file str = "<semAgents>\n"; fos.write(str.getBytes()); for (int i = 0; i < participantSets.size(); i++) { CoRefSetAgata coRefSetAgata = participantSets.get(i); str = "\t<semAgent id=\""+key+"/a"+ coRefSetAgata.getId()+"\" lcs=\""+ coRefSetAgata.getLcs()+"\" score=\""+ coRefSetAgata.getScore()+"\" synset=\""+ coRefSetAgata.getMostFrequentSynset()+"\" label=\""+ coRefSetAgata.getMostFrequentLemma()+"\" mentions=\""+ coRefSetAgata.getTargets().size()+"\">\n"; fos.write(str.getBytes()); for (int j = 0; j < coRefSetAgata.getTargets().size(); j++) { CorefTargetAgata eventTarget = coRefSetAgata.getTargets().get(j); str = "\t"+eventTarget.toString(); fos.write(str.getBytes()); } str = "\t</semAgent>\n"; fos.write(str.getBytes()); } str = "</semAgents>\n"; fos.write(str.getBytes()); } if (locationSets!=null) { /// we iterate over the locations of a single file str = "<semPlaces>\n"; fos.write(str.getBytes()); for (int i = 0; i < locationSets.size(); i++) { CoRefSetAgata coRefSetAgata = locationSets.get(i); str = "\t<semPlace id=\""+key+"/p"+ coRefSetAgata.getId()+"\" lcs=\""+ coRefSetAgata.getLcs()+"\" score=\""+ coRefSetAgata.getScore()+"\" synset=\""+ coRefSetAgata.getMostFrequentSynset()+"\" label=\""+ coRefSetAgata.getMostFrequentLemma()+"\" mentions=\""+ coRefSetAgata.getTargets().size()+"\">\n"; fos.write(str.getBytes()); for (int j = 0; j < coRefSetAgata.getTargets().size(); j++) { CorefTargetAgata eventTarget = coRefSetAgata.getTargets().get(j); str = "\t"+eventTarget.toString(); fos.write(str.getBytes()); } str = "\t</semPlace>\n"; fos.write(str.getBytes()); } str = "</semPlaces>\n"; fos.write(str.getBytes()); } if (timeSets!=null) { /// we iterate over the time of a single file str = "<semTimes>\n"; fos.write(str.getBytes()); for (int i = 0; i < timeSets.size(); i++) { CoRefSetAgata coRefSetAgata = timeSets.get(i); str = " <semTime id=\""+key+"/t"+ coRefSetAgata.getId()+"\" lcs=\""+ coRefSetAgata.getLcs()+"\" score=\""+ coRefSetAgata.getScore()+"\" synset=\""+ coRefSetAgata.getMostFrequentSynset()+"\" label=\""+ coRefSetAgata.getMostFrequentLemma()+"\" mentions=\""+ coRefSetAgata.getTargets().size()+"\">\n"; fos.write(str.getBytes()); for (int j = 0; j < coRefSetAgata.getTargets().size(); j++) { CorefTargetAgata eventTarget = coRefSetAgata.getTargets().get(j); str = "\t"+eventTarget.toString(); fos.write(str.getBytes()); } str = "\t</semTime>\n"; fos.write(str.getBytes()); } str = "</semTimes>\n"; fos.write(str.getBytes()); } /// now we get the relations getRelations(fos, events.fileName, coRefSetsEventAgatas, participantSets, timeSets, locationSets, sentenceRange, key); str = "</topic>\n"; fos.write(str.getBytes()); } str = "</SEM>\n"; fos.write(str.getBytes()); fos.close(); } catch (IOException e) { e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } } } /** * <semEvents> <semEvent id="eecb1.0/456> <target id="corpus/filename_url/t115"/> </semEvent> </semEvents> <semAgents> <semAgent id="eecb1.0/2"> <target id="corpus/filename_url/t111"/> </semAgent> </semAgents> <semTimes> <semTime id="eecb1.0/1"/> </semTimes> <semPlaces> <semPlace id="eecb1.0/6"/> </semPlaces> <semRelations> <semRelation id = "eecb1.0/75698" predicate="semHasAgent" subject="eecb1.0/456" object="eecb1.0/2> <target id="corpus/filename_url/pr23r3"/> </semRelation> <semRelation id = "eecb1.0/75698" predicate="semHasTime" subject="eecb1.0/456" object="eecb1.0/2/> <semRelation id = "eecb1.0/75698" predicate="semHasPlace" subject="eecb1.0/456" object="eecb1.0/2/> </semRelations> */ static void getRelations (FileOutputStream fos, String fileName, ArrayList<CoRefSetAgata> coRefSetsEventAgatas, ArrayList<CoRefSetAgata> participantSets, ArrayList<CoRefSetAgata> timeSets, ArrayList<CoRefSetAgata> locationSets, int sentenceRange, String key ) throws IOException { String str = "<semRelations>\n"; fos.write(str.getBytes()); int relationCounter = 0; /// we iterate over the events of a single file ArrayList<Triple> triplesA = new ArrayList<Triple>(); ArrayList<Triple> triplesP = new ArrayList<Triple>(); ArrayList<Triple> triplesT = new ArrayList<Triple>(); for (int i = 0; i < coRefSetsEventAgatas.size(); i++) { CoRefSetAgata coRefSetAgata = coRefSetsEventAgatas.get(i); for (int j = 0; j < coRefSetAgata.getTargets().size(); j++) { CorefTargetAgata eventTarget = coRefSetAgata.getTargets().get(j); /// we obtain the sentence ids for the targets of the coreference set of the events /// this sentence range determines which components belong to the event. /// by passing in the sentenceRange parameter you can indicate the number of sentences before and after that are also valid contexts //// if zero the context is restricted to the same sentence ArrayList<String> rangeOfSentenceIds = getSentenceRange(eventTarget.getSentenceId(), sentenceRange); if (participantSets!=null) { // System.out.println("PARTICIPANTS"); for (int s = 0; s < participantSets.size(); s++) { CoRefSetAgata refSet = participantSets.get(s); //// loop to add results for range of sentences for (int k = 0; k < rangeOfSentenceIds.size(); k++) { String sentenceId = rangeOfSentenceIds.get(k); if (refSet.containsTargetSentenceId(sentenceId)) { for (int l = 0; l < refSet.getTargets().size(); l++) { CorefTargetAgata corefTargetAgata = refSet.getTargets().get(l); if (eventTarget.getDocId().equals(corefTargetAgata.getDocId())) { String predicate = "semHasAgent"; String subject = key+"/e"+ coRefSetAgata.getId(); String object = key+"/a"+ refSet.getId(); Triple triple = new Triple(predicate, subject, object); String target = "\t\t<target id =\""+ eventTarget.getDocId()+"/"+eventTarget.getSentenceId()+"\""+"/>"; target += " <!-- "+eventTarget.getWord()+"--"+predicate+"--"+ corefTargetAgata.getWord()+" -->"; int givenTriple = getTriple(triplesA, triple); if (givenTriple==-1) { relationCounter++; String id = key+"/"+relationCounter; triple.setId(id); triple.addMentions(target); triplesA.add(triple); } else { if (!triplesA.get(givenTriple).getMentions().contains(target)) { triplesA.get(givenTriple).addMentions(target); } } } else { // System.out.println("corefTarget.getDocId() = " + corefTarget.getDocId()); // System.out.println("eventTarget.getDocId() = " + eventTarget.getDocId()); } } } } } } if (timeSets!=null) { // System.out.println("TIME"); for (int s = 0; s < timeSets.size(); s++) { CoRefSetAgata refSet = timeSets.get(s); //// loop to add results for range of sentences for (int k = 0; k < rangeOfSentenceIds.size(); k++) { String sentenceId = rangeOfSentenceIds.get(k); if (refSet.containsTargetSentenceId(sentenceId)) { for (int l = 0; l < refSet.getTargets().size(); l++) { CorefTargetAgata corefTargetAgata = refSet.getTargets().get(l); if (eventTarget.getDocId().equals(corefTargetAgata.getDocId())) { String predicate = "semHasTime"; String subject = key+"/e"+ coRefSetAgata.getId(); String object = key+"/t"+ refSet.getId(); Triple triple = new Triple(predicate, subject, object); String target = "\t\t<target id =\""+ eventTarget.getDocId()+"/"+eventTarget.getSentenceId()+"\""+"/>"; target += " <!-- "+eventTarget.getWord()+"--"+predicate+"--"+ corefTargetAgata.getWord()+" -->"; int givenTriple = getTriple(triplesA, triple); if (givenTriple==-1) { relationCounter++; String id = key+"/"+relationCounter; triple.setId(id); triple.addMentions(target); triplesA.add(triple); } else { if (!triplesA.get(givenTriple).getMentions().contains(target)) { triplesA.get(givenTriple).addMentions(target); } } } else { // System.out.println("corefTarget.getDocId() = " + corefTarget.getDocId()); // System.out.println("eventTarget.getDocId() = " + eventTarget.getDocId()); } } } } } } if (locationSets!=null) { // System.out.println("PLACES"); for (int s = 0; s < locationSets.size(); s++) { CoRefSetAgata refSet = locationSets.get(s); //// loop to add results for range of sentences for (int k = 0; k < rangeOfSentenceIds.size(); k++) { String sentenceId = rangeOfSentenceIds.get(k); if (refSet.containsTargetSentenceId(sentenceId)) { for (int l = 0; l < refSet.getTargets().size(); l++) { CorefTargetAgata corefTargetAgata = refSet.getTargets().get(l); if (eventTarget.getDocId().equals(corefTargetAgata.getDocId())) { String predicate = "semHasPlace"; String subject = key+"/e"+ coRefSetAgata.getId(); String object = key+"/p"+ refSet.getId(); Triple triple = new Triple(predicate, subject, object); String target = "\t\t<target id =\""+ eventTarget.getDocId()+"/"+eventTarget.getSentenceId()+"\""+"/>"; target += " <!-- "+eventTarget.getWord()+"--"+predicate+"--"+ corefTargetAgata.getWord()+" -->"; int givenTriple = getTriple(triplesA, triple); if (givenTriple==-1) { relationCounter++; String id = key+"/"+relationCounter; triple.setId(id); triple.addMentions(target); triplesA.add(triple); } else { if (!triplesA.get(givenTriple).getMentions().contains(target)) { triplesA.get(givenTriple).addMentions(target); } } } else { // System.out.println("corefTarget.getDocId() = " + corefTarget.getDocId()); // System.out.println("eventTarget.getDocId() = " + eventTarget.getDocId()); } } } } } } } } for (int k = 0; k < triplesA.size(); k++) { Triple triple = triplesA.get(k); str = triple.toString(); fos.write(str.getBytes()); } for (int k = 0; k < triplesP.size(); k++) { Triple triple = triplesP.get(k); str = triple.toString(); fos.write(str.getBytes()); } for (int k = 0; k < triplesT.size(); k++) { Triple triple = triplesT.get(k); str = triple.toString(); fos.write(str.getBytes()); } } static int getTriple (ArrayList<Triple> triples, Triple triple) { for (int i = 0; i < triples.size(); i++) { Triple triple1 = triples.get(i); /* System.out.println("triple1.toString() = " + triple1.toString()); System.out.println("triple.toString() = " + triple.toString()); */ /* if (triple.getSubject().equals("TOPIC_44_EVENT_COREFERENCE_CORPUS/e35")) { System.out.println(triple.getObject()+":"+triple1.getObject()); } */ if ((triple1.getPredicate().equalsIgnoreCase(triple.getPredicate())) && (triple1.getSubject().equalsIgnoreCase(triple.getSubject())) && (triple1.getObject().equalsIgnoreCase(triple.getObject())) ) { return i; } } return -1; } /** * * @param corefMap * @param sentenceIdString * @param sentenceRange * @return */ static ArrayList<CoRefSetAgata> getCorefSetFromRange (HashMap<String, ArrayList<CoRefSetAgata>> corefMap, String sentenceIdString, int sentenceRange) { ArrayList<CoRefSetAgata> coRefSetAgatas = null; coRefSetAgatas = corefMap.get(sentenceIdString); if (sentenceRange>0) { /// we assume that the sentence id is an integer Integer sentenceId = Integer.parseInt(sentenceIdString); if (sentenceId!=null) { for (int i = sentenceId; i < sentenceRange; i++) { ArrayList<CoRefSetAgata> nextSet = corefMap.get(sentenceId+i); if (nextSet!=null) { for (int j = 0; j < nextSet.size(); j++) { CoRefSetAgata coRefSetAgata = nextSet.get(j); coRefSetAgatas.add(coRefSetAgata); } } } /* for (int i = sentenceId; i < sentenceRange; i++) { ArrayList<CoRefSet> nextSet = corefMap.get(sentenceId-i); if (nextSet!=null) { for (int j = 0; j < nextSet.size(); j++) { CoRefSet coRefSet = nextSet.get(j); coRefSets.add(coRefSet); } } }*/ } } return coRefSetAgatas; } static ArrayList<String> getSentenceRange (String sentenceIdString, int sentenceRange) { ArrayList<String> sentenceIdRange = new ArrayList<String>(); sentenceIdRange.add(sentenceIdString); if (sentenceRange>0) { /// we assume that the sentence id is an integer Integer sentenceId = null; try { sentenceId = Integer.parseInt(sentenceIdString); } catch (NumberFormatException e) { e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } if (sentenceId!=null) { for (int i = sentenceId; i < sentenceRange; i++) { sentenceIdRange.add(new Integer(i).toString()); } for (int i = sentenceId; i < sentenceRange; i++) { sentenceIdRange.add(new Integer(i).toString()); } } } return sentenceIdRange; } }
newsreader/StreamEventCoreference
src/main/java/eu/newsreader/eventcoreference/output/CorefSetToSem.java
Java
apache-2.0
27,687
from transformers import RobertaTokenizerFast import scattertext as st tokenizer_fast = RobertaTokenizerFast.from_pretrained( "roberta-base", add_prefix_space=True) tokenizer = st.RobertaTokenizerWrapper(tokenizer_fast) df = st.SampleCorpora.ConventionData2012.get_data().assign( parse = lambda df: df.text.apply(tokenizer.tokenize) ) corpus = st.OffsetCorpusFactory( df, category_col='party', parsed_col='parse', feat_and_offset_getter=st.TokenFeatAndOffsetGetter() ).build() # Remove words occur less than 5 times corpus = corpus.remove_infrequent_words(5, non_text=True) plot_df = corpus.get_metadata_freq_df('').assign( Y=lambda df: df.democrat, X=lambda df: df.republican, Ypos=lambda df: st.Scalers.dense_rank(df.Y), Xpos=lambda df: st.Scalers.dense_rank(df.X), SuppressDisplay=False, ColorScore=lambda df: st.Scalers.scale_center_zero(df.Ypos - df.Xpos), ) html = st.dataframe_scattertext( corpus, plot_df=plot_df, category='democrat', category_name='Democratic', not_category_name='Republican', width_in_pixels=1000, suppress_text_column='Display', metadata=corpus.get_df()['speaker'], use_non_text_features=True, ignore_categories=False, use_offsets=True, unified_context=False, color_score_column='ColorScore', left_list_column='ColorScore', y_label='Democarats', x_label='Republicans', header_names={'upper': 'Top Democratic', 'lower': 'Top Republican', 'right': 'Most Frequent'}, subword_encoding='RoBERTa' ) fn = 'roberta_sentence_piece.html' with open(fn, 'w') as of: of.write(html) print("Open ./" + fn + ' in Chrome.')
JasonKessler/scattertext
demo_tokenizer_roberta.py
Python
apache-2.0
1,670
// Copyright 2015-2025 Finsa S.p.A. <finsa@finsa.it> // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except // in compliance with the License. You may obtain a copy of the License at: // // "http://www.apache.org/licenses/LICENSE-2.0" // // Unless required by applicable law or agreed to in writing, software distributed under the License // is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express // or implied. See the License for the specific language governing permissions and limitations under // the License. using Finsa.Caravan.Common; namespace Finsa.Caravan.WebApi.Core { /// <summary> /// Constanti usate nella libreria Finsa.Caravan.WebApi. /// </summary> public static class Constants { /// <summary> /// ID usato come correlazione nei log. /// </summary> public const string RequestIdVariable = "request_id"; /// <summary> /// Percorso nella request usato come guida nei log. /// </summary> public const string RequestPathVariable = "request_path"; /// <summary> /// Indica se la request debba essere ignorata dai componenti di middleware "opzionali" /// come quello di compressione e quello di log. /// </summary> public const string UriIgnoredVariable = "uri_ignored"; /// <summary> /// Header usato per facilitare il tracciamento dei log. /// </summary> public static string RequestIdHeader { get; } = $"X-{CaravanCommonConfiguration.Instance.AppName}-Request-ID"; /// <summary> /// La dimensione minima usata dai buffer di risposta dei componenti di middleware. /// </summary> public const int MinResponseBufferSize = 512; /// <summary> /// L'header usato per indicare il client ID usato. /// </summary> public const string OAuth2ClientIdHeader = "X-OAuth2-Client-ID"; /// <summary> /// ID usato come correlazione nell'importazione dei log. /// </summary> public const string LogImportIdVariable = "log_import_id"; /// <summary> /// Nome dell'applicazione per cui si stanno importando i log. /// </summary> public const string LogImportAppVariable = "log_import_app"; } }
finsaspa/Caravan
Caravan.WebApi/Core/Constants.cs
C#
apache-2.0
2,398
/* * Copyright 2012 Denis Neuling * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cloudcontrolled.api.model; /** * The version object contains informations about the latest available software * to access the cloudCOntrol API. * * @author Denis Neuling (denisneuling@gmail.com) * */ public class Version extends AbstractModel { private String pycclib; private String cctrl; /** * <p> * Constructor for Version. * </p> */ public Version() { } /** * <p> * Getter for the field <code>pycclib</code>. * </p> * * @return pycclib the latest pycclib release version */ public String getPycclib() { return pycclib; } /** * <p> * Setter for the field <code>pycclib</code>. * </p> * * @param pycclib * the latest pycclib release version */ public void setPycclib(String pycclib) { this.pycclib = pycclib; } /** * <p> * Getter for the field <code>cctrl</code>. * </p> * * @return cctrl the latest cctrl release version */ public String getCctrl() { return cctrl; } /** * <p> * Setter for the field <code>cctrl</code>. * </p> * * @param cctrl * the latest cctrl release version to set */ public void setCctrl(String cctrl) { this.cctrl = cctrl; } /** {@inheritDoc} */ @Override public String toString() { return "Version [pycclib=" + pycclib + ", cctrl=" + cctrl + "]"; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((cctrl == null) ? 0 : cctrl.hashCode()); result = prime * result + ((pycclib == null) ? 0 : pycclib.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Version other = (Version) obj; if (cctrl == null) { if (other.cctrl != null) return false; } else if (!cctrl.equals(other.cctrl)) return false; if (pycclib == null) { if (other.pycclib != null) return false; } else if (!pycclib.equals(other.pycclib)) return false; return true; } }
denisneuling/cctrl.jar
cctrl-api/src/main/java/com/cloudcontrolled/api/model/Version.java
Java
apache-2.0
2,646
# Brandon Michael # cis142 # checkForQuadrant.py # Goal: This program will keep asking for input values to check for the quadrant postion, # origin, x-axis and y axis postions # Notes: I used a while loop to make testing values easier and I used the input x,y # Display program instructions print("###################################################") print("Quadrant Finder 1.0") print("Enter the x and y coordinates to find the quadrant!") print("Type [exit] to quit the program") print("###################################################") # Setup the x and y variables xValue = None yValue = None # Setup a loop that breaks when you type exit while True: # Get the input values in a X,Y format inputCoordinates = input("Type in coordinates [x,y]: ") # Check if exit was typed, if so then exit the loop and end if inputCoordinates == "exit": break # stops the loop # We want to make sure we can only strip out 2 input values # and make sure there is a comma separating them elif len(inputCoordinates.strip().split(',')) == 2 and inputCoordinates.count(',') == 1: # Loop over the two numbers that are stripped out by the comma value for coordinate in inputCoordinates.strip().split(','): # This checks to see if we have set a value for x # If it is still set to None then the first value is going to be xValue if xValue is None: xValue = int(coordinate) # Since we are checking the xValue we can assume when the loop comes back # a second time we can set it to yValue else: yValue = int(coordinate) # If its a 0,0 value then its the Origin if xValue == 0 and yValue == 0: print("Origin") else: # If x = 0 and the y is greater or less than 0 its on the Y axis if xValue == 0 and (yValue < 0 or yValue > 0): print("Y - Axis") # If x is greater or less than 0 and y = 0 its on the X axis elif (xValue < 0 or xValue > 0) and yValue == 0: print("X - Axis") # Anything else and we need to check for quadrants else: # If x is a positive number and y is a negative positive its in Quadrant 1 if xValue > 0 and yValue > 0: print("Quadrant I") # If x is a negative number and y is a positive number then its in Quadrant 2 elif xValue < 0 and yValue > 0: print("Quadrant II") # If x is a negative number and y is negative number then its in Quadrant 3 elif xValue < 0 and yValue < 0: print("Quadrant III") # If x is a positive number and y is a negative number then its in Quadrant 4 elif xValue > 0 and yValue < 0: print("Quadrant IV") # If they typed anything but 2 numbers separated by a comma then ask for the input again else: print("Please type the input value as x,y") print("Example: 1,-9")
bwmichael/jccc-cis142-python
old/check-quadrant.py
Python
apache-2.0
3,124
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.jms.example; import javax.jms.Connection; import javax.jms.ConnectionFactory; import javax.jms.MessageConsumer; import javax.jms.MessageProducer; import javax.jms.Queue; import javax.jms.Session; import javax.jms.TextMessage; import javax.naming.InitialContext; /** * A simple JMS Queue example that uses dual broker authentication mechanisms for SSL and non-SSL connections. */ public class SSLDualAuthenticationExample { public static void main(final String[] args) throws Exception { Connection producerConnection = null; Connection consumerConnection = null; InitialContext initialContext = null; try { // Step 1. Create an initial context to perform the JNDI lookup. initialContext = new InitialContext(); // Step 2. Perfom a lookup on the queue Queue queue = (Queue) initialContext.lookup("queue/exampleQueue"); // Step 3. Perform a lookup on the producer's SSL Connection Factory ConnectionFactory producerConnectionFactory = (ConnectionFactory) initialContext.lookup("SslConnectionFactory"); // Step 4. Perform a lookup on the consumer's Connection Factory ConnectionFactory consumerConnectionFactory = (ConnectionFactory) initialContext.lookup("ConnectionFactory"); // Step 5.Create a JMS Connection for the producer producerConnection = producerConnectionFactory.createConnection(); // Step 6.Create a JMS Connection for the consumer consumerConnection = consumerConnectionFactory.createConnection("consumer", "activemq"); // Step 7. Create a JMS Session for the producer Session producerSession = producerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); // Step 8. Create a JMS Session for the consumer Session consumerSession = consumerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); // Step 9. Create a JMS Message Producer MessageProducer producer = producerSession.createProducer(queue); // Step 10. Create a Text Message TextMessage message = producerSession.createTextMessage("This is a text message"); System.out.println("Sent message: " + message.getText()); // Step 11. Send the Message producer.send(message); // Step 12. Create a JMS Message Consumer MessageConsumer messageConsumer = consumerSession.createConsumer(queue); // Step 13. Start the Connection consumerConnection.start(); // Step 14. Receive the message TextMessage messageReceived = (TextMessage) messageConsumer.receive(5000); System.out.println("Received message: " + messageReceived.getText()); initialContext.close(); } finally { // Step 15. Be sure to close our JMS resources! if (initialContext != null) { initialContext.close(); } if (producerConnection != null) { producerConnection.close(); } if (consumerConnection != null) { consumerConnection.close(); } } } }
lburgazzoli/apache-activemq-artemis
examples/features/standard/ssl-enabled-dual-authentication/src/main/java/org/apache/activemq/artemis/jms/example/SSLDualAuthenticationExample.java
Java
apache-2.0
3,962
(function() { 'use strict'; function HomeController($scope, $q, $timeout) { var self = this; self.time = 50; self.message = "Mensagem para mostrar"; var promiseTimeout; var deferredToast; self.mostrarToast = function() { esconderToast(); deferredToast = $q.defer(); deferredToast.promise.then(toastResolved, toastRejected); self.activeShow = true; self.mensagemAtual = self.message; promiseTimeout = $timeout(timeoutToast, self.time * 1000); }; function timeoutToast() { deferredToast.reject(); esconderToast(); } function esconderToast() { if (promiseTimeout) { $timeout.cancel(promiseTimeout); promiseTimeout = undefined; } self.activeShow = false; } self.clickBotaoFechar = function() { if (deferredToast) { // verifica pra evitar problema com duplo clique rápido deferredToast.resolve(); deferredToast = undefined; } esconderToast(); }; self.clickNoToast = function() { if (deferredToast) { timeoutToast(); // clicar no toast equivale ao timeout } }; function toastResolved() { console.log('Resolved'); self.ultimaResposta = 'Resolved'; } function toastRejected() { console.log('Rejected'); self.ultimaResposta = 'Rejected'; } } angular .module('hotMilhasApp') .controller('HomeController', ['$scope', '$q', '$timeout', HomeController]); })();
fabiosandias/hotmilhas-recru
app/scripts/controllers/HomeController.js
JavaScript
apache-2.0
1,786
package com.formulasearchengine.mathosphere.mlp.text; import com.google.common.collect.Lists; import org.junit.Assert; import org.junit.Test; import java.io.File; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; /** * Created by Moritz on 12.12.2015. */ public class WikidataInterfaceTest { String[] qIds = new String[] {"Q7913892", "Q12503", "Q3176558", "Q36161", "Q739925", "Q49008", "Q12503", "Q5156597", "Q11567", "Q1413083", "Q50700", "Q50701", "Q935944", "Q50701", "Q935944", "Q1144319", "Q50700", "Q3150667", "Q2256802", "Q729113", "Q21199", "Q33456", "Q44946", "Q230883", "Q21199", "Q21199", "Q50700", "Q50700", "Q50700", "Q50700", "Q378201", "Q302462", "Q3913", "Q3913", "Q3913", "Q12916", "Q12916", "Q11352", "Q2303886", "Q526719", "Q11348", "Q1027788", "Q12916", "Q12916", "Q946764", "Q19033", "Q126017", "Q230963", "Q2303886", "Q168698", "Q917476", "Q17285", "Q1663694", "Q1663694", "Q1663694", "Q1663694", "Q5597315", "Q5597315", "Q2303886", "Q46276", "Q2140940", "Q36253", "Q1096885", "Q189569", "Q3176558", "Q188889", "Q188889", "Q13824", "Q2111", "Q174102", "Q1440227", "Q167", "Q1515261", "Q1128317", "Q111059", "Q111059", "Q43260", "Q3150667", "Q43260", "Q11567", "Q2095069", "Q21199", "Q21199", "Q2303886", "Q2303886", "Q1137759", "Q193796", "Q12916", "Q6520159", "Q11471", "Q167", "Q12916", "Q12916", "Q21199", "Q21199", "Q3686031", "Q11471", "Q9492", "Q12916", "Q4440864", "Q12916", "Q18373", "Q2111", "Q1289248", "Q876346", "Q1289248", "Q464794", "Q193794", "Q192826", "Q11471", "Q929043", "Q2518235", "Q782566", "Q1074380", "Q1413083", "Q1413083", "Q1008943", "Q1256787", "Q13471665", "Q1289248", "Q2337858", "Q11348", "Q11348", "Q11348", "Q11471", "Q2918589", "Q1045555", "Q21199", "Q82580", "Q18848", "Q18848", "Q1952404", "Q11703678", "Q11703678", "Q2303886", "Q1096885", "Q4440864", "Q2362761", "Q11471", "Q3176558", "Q30006", "Q11567", "Q3258885", "Q131030", "Q21406831", "Q131030", "Q186290", "Q1591095", "Q11348", "Q3150667", "Q474715", "Q379825", "Q379825", "Q192704", "Q44432", "Q44432", "Q319913", "Q12916", "Q12916", "Q2627460", "Q2627460", "Q190109", "Q83478", "Q18848", "Q379825", "Q844128", "Q2608202", "Q29539", "Q11465", "Q176737", "Q176737", "Q176737", "Q1413083", "Q1759756", "Q900231", "Q39297", "Q39297", "Q39552", "Q39297", "Q1948412", "Q3554818", "Q21199", "Q12916", "Q168698", "Q50701", "Q11053", "Q12916", "Q12916", "Q12916", "Q12503", "Q12503", "Q176623", "Q10290214", "Q10290214", "Q505735", "Q1057607", "Q11471", "Q1057607", "Q5227327", "Q6901742", "Q159375", "Q2858846", "Q1134404", "Q12916", "Q4440864", "Q838611", "Q44946", "Q173817", "Q12916", "Q21199", "Q12916", "Q190056", "Q10290214", "Q10290214", "Q506041", "Q2858846"}; @Test public void testGetAliases() throws Exception { for (String qid : qIds) { Path file = Paths.get(File.createTempFile("temp", Long.toString(System.nanoTime())).getPath()); List<String> aliases = WikidataInterface.getAliases(qid); aliases = aliases.stream().map(a -> "\"" + a + "\"").collect(Collectors.toList()); Files.write(file, aliases, Charset.forName("UTF-8")); } } @Test public void testGetEntities() throws Exception { final ArrayList<String> expected = Lists.newArrayList("Q12916"); Assert.assertEquals(expected.get(0), WikidataInterface.getEntities("real number").get(0)); } }
TU-Berlin/mathosphere
mathosphere-core/src/test/java/com/formulasearchengine/mathosphere/mlp/text/WikidataInterfaceTest.java
Java
apache-2.0
6,118
package rpcclient import ( "bytes" "encoding/json" "fmt" "io/ioutil" "net" "net/http" "net/url" "reflect" "strings" "github.com/pkg/errors" types "github.com/tendermint/tendermint/rpc/lib/types" ) // HTTPClient is a common interface for JSONRPCClient and URIClient. type HTTPClient interface { Call(method string, params map[string]interface{}, result interface{}) (interface{}, error) } // TODO: Deprecate support for IP:PORT or /path/to/socket func makeHTTPDialer(remoteAddr string) (string, func(string, string) (net.Conn, error)) { parts := strings.SplitN(remoteAddr, "://", 2) var protocol, address string if len(parts) == 1 { // default to tcp if nothing specified protocol, address = "tcp", remoteAddr } else if len(parts) == 2 { protocol, address = parts[0], parts[1] } else { // return a invalid message msg := fmt.Sprintf("Invalid addr: %s", remoteAddr) return msg, func(_ string, _ string) (net.Conn, error) { return nil, errors.New(msg) } } // accept http as an alias for tcp if protocol == "http" { protocol = "tcp" } trimmedAddress := strings.Replace(address, "/", ".", -1) // replace / with . for http requests (dummy domain) return trimmedAddress, func(proto, addr string) (net.Conn, error) { return net.Dial(protocol, address) } } // We overwrite the http.Client.Dial so we can do http over tcp or unix. // remoteAddr should be fully featured (eg. with tcp:// or unix://) func makeHTTPClient(remoteAddr string) (string, *http.Client) { address, dialer := makeHTTPDialer(remoteAddr) return "http://" + address, &http.Client{ Transport: &http.Transport{ Dial: dialer, }, } } //------------------------------------------------------------------------------------ // JSON rpc takes params as a slice type JSONRPCClient struct { address string client *http.Client } func NewJSONRPCClient(remote string) *JSONRPCClient { address, client := makeHTTPClient(remote) return &JSONRPCClient{ address: address, client: client, } } func (c *JSONRPCClient) Call(method string, params map[string]interface{}, result interface{}) (interface{}, error) { request, err := types.MapToRequest("", method, params) if err != nil { return nil, err } requestBytes, err := json.Marshal(request) if err != nil { return nil, err } // log.Info(string(requestBytes)) requestBuf := bytes.NewBuffer(requestBytes) // log.Info(Fmt("RPC request to %v (%v): %v", c.remote, method, string(requestBytes))) httpResponse, err := c.client.Post(c.address, "text/json", requestBuf) if err != nil { return nil, err } defer httpResponse.Body.Close() responseBytes, err := ioutil.ReadAll(httpResponse.Body) if err != nil { return nil, err } // log.Info(Fmt("RPC response: %v", string(responseBytes))) return unmarshalResponseBytes(responseBytes, result) } //------------------------------------------------------------- // URI takes params as a map type URIClient struct { address string client *http.Client } func NewURIClient(remote string) *URIClient { address, client := makeHTTPClient(remote) return &URIClient{ address: address, client: client, } } func (c *URIClient) Call(method string, params map[string]interface{}, result interface{}) (interface{}, error) { values, err := argsToURLValues(params) if err != nil { return nil, err } // log.Info(Fmt("URI request to %v (%v): %v", c.address, method, values)) resp, err := c.client.PostForm(c.address+"/"+method, values) if err != nil { return nil, err } defer resp.Body.Close() responseBytes, err := ioutil.ReadAll(resp.Body) if err != nil { return nil, err } return unmarshalResponseBytes(responseBytes, result) } //------------------------------------------------ func unmarshalResponseBytes(responseBytes []byte, result interface{}) (interface{}, error) { // read response // if rpc/core/types is imported, the result will unmarshal // into the correct type // log.Notice("response", "response", string(responseBytes)) var err error response := &types.RPCResponse{} err = json.Unmarshal(responseBytes, response) if err != nil { return nil, errors.Errorf("Error unmarshalling rpc response: %v", err) } errorStr := response.Error if errorStr != "" { return nil, errors.Errorf("Response error: %v", errorStr) } // unmarshal the RawMessage into the result err = json.Unmarshal(*response.Result, result) if err != nil { return nil, errors.Errorf("Error unmarshalling rpc response result: %v", err) } return result, nil } func argsToURLValues(args map[string]interface{}) (url.Values, error) { values := make(url.Values) if len(args) == 0 { return values, nil } err := argsToJson(args) if err != nil { return nil, err } for key, val := range args { values.Set(key, val.(string)) } return values, nil } func argsToJson(args map[string]interface{}) error { for k, v := range args { rt := reflect.TypeOf(v) isByteSlice := rt.Kind() == reflect.Slice && rt.Elem().Kind() == reflect.Uint8 if isByteSlice { bytes := reflect.ValueOf(v).Bytes() args[k] = fmt.Sprintf("0x%X", bytes) continue } // Pass everything else to go-wire data, err := json.Marshal(v) if err != nil { return err } args[k] = string(data) } return nil }
x2cn/mintnet
vendor/github.com/tendermint/tendermint/rpc/lib/client/http_client.go
GO
apache-2.0
5,251
<?php // if(isset($_GET['nya'])) // $nya = $_GET['nya']; // else $nya='order'; if(isset($_GET['act'])) $act=$_GET['act']; else $act='new'; if(isset($_GET['id'])) $id=$_GET['id']; //echo "$nya $act<br><br>"; ?> <script type="text/javascript"> //rawr(); var currItem; //var total=(0.00).toFixed(2); $('#itemSearch').autocomplete({ source: "../core/query.php?nya=stock&act=search", minLength: 2, focus: function( event, ui ) { $( "#itemSearch" ).val( ui.item.label ); //$( "#result" ).val( ui.item.value ); return false; }, select: function( event, ui ) { $( "#itemSearch" ).val( ui.item.label ); $( "#itemAdd" ).val( ui.item.value ); currItem=ui.item; $('#additem').click(); $(this).val(""); return false; }, change: function( event, ui ){ if($("#itemAdd").val()==""){ $(this).css('color','#aaa'); $(this).val("Buscar Artículo"); } return false; }, search: function(event, ui) {} }).data( "autocomplete" )._renderItem = function( ul, item ) { return $( "<li></li>" ) .data( "item.autocomplete", item ) .append( "<a><b>" + item.label + "</b> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp; Talla " + item.size +" &nbsp;&nbsp;&nbsp;&nbsp;&nbsp; $" +item.price + "<br>"+item.schools+"</a>" ) .appendTo( ul ); }; $('#clientSearch').autocomplete({ source: "../core/query.php?nya=client&act=search", minLength: 2, focus: function( event, ui ) { $( "#clientSearch" ).val( ui.item.label ); return false; }, select: function( event, ui ) { $( "#clientSearch" ).val( ui.item.label ); $( "#clientID" ).val( ui.item.value ); $('#clientSearch').valid(); return false; } }); $('#itemSearch').focus(function(){ if($("#itemAdd").val()==""||$("#itemSearch").val()=="Buscar Artículo"){ $(this).css('color','#2d2d2d'); $(this).val(""); } }); // .blur(function(){ // if($("#itemAdd").val()==""){ // $(this).css('color','#aaa'); // $(this).val("Buscar Artículo"); // } // }); $('#clientSearch').focus(function(){ if($("#clientSearch").val()==""||$("#clientSearch").val()=="Buscar Cliente"){ $(this).css('color','#2d2d2d'); $(this).val(""); } }).blur(function(){ if(!$("#clientID").val()){ $(this).css('color','#aaa'); $(this).val("Buscar Cliente"); } }); $('#additem').click(function(){ if($('#itemAdd').val()){ var rawr = $('<div></div>') //.attr('class','angie') .attr('class','orderItem') .html( '<table class="angie"><td>' +currItem.label+'</td><td> Talla ' +currItem.size+'</td><td class="right">$'+currItem.price +'<input type="hidden" name="items[]" value="' +currItem.value+'" readonly="readonly"/></td></table>'); var mhm = currItem.price; rawr.appendTo('#items').toggle(500,function(){$('#itemSearch').valid();}).click(function(){ $(this).toggle(500,function(){ $(this).remove(); }); updateTotal("remove",mhm); }).hover(function(){ //$(this).effect( "pulsate"); //$('#total').text((parseFloat($('#total').text())+1).toFixed(2)); }); updateTotal("add",mhm,function(){currItem=null;}); } $('#itemAdd').val(''); }); $('#addExtra').click(function(){ var desc = $('#extra_desc').val(); var price = $('#extra').val(); var rawr = $('<div></div>') .attr('class','orderItem') .html( "<table class='angie'><td>" +desc+"</td><td class='right'>$" +parseFloat(price).toFixed(2)+"</td></table> <input type='hidden' name='extra_desc[]' value='" +escape(desc)+"' readonly='readonly'/> <input type='hidden' name='extra[]' value='" +price+"' readonly='readonly'/>"); rawr.appendTo('#items').toggle(500).click(function(){ $(this).toggle(500,function(){ $(this).remove(); }); updateTotal("remove",price); }).hover(function(){ //$('#total').text((parseFloat($('#total').text())+1).toFixed(2)); }); updateTotal("add",price); $('#extra_desc').val(''); $('#extra_desc').css('color','#ccc'); $('#extra').css('color','#ccc'); $('#extra').val('0.00'); $('#addExtra').attr("disabled","disabled"); }); $('#extra').keyup(function(){ var esta = $(this); if(!isNaN(parseFloat(esta.val())) && parseFloat(esta.val()) && $('#extra_desc').val()) { $('#addExtra').removeAttr("disabled"); } else { $('#addExtra').attr("disabled","disabled"); } }).keydown(function(){ if (event.keyCode == 13) { if(!$('#addExtra:disabled').length) $('#addExtra').click(); return false; } }); function updateToPay(){ $("#topay").fadeOut("fast",function(){ acuenta = ( parseFloat($("#total").text()) - ( parseFloat($("#paid").val())+parseFloat($("#coupon").val() ) ) ).toFixed(2); $(this).text(acuenta).fadeIn("fast"); }); } function updateTotal(type,price,callback){ if(type=="remove") total = (parseFloat($('#total').text())-parseFloat(price)).toFixed(2); else if(type=="add") total = (parseFloat($('#total').text())+parseFloat(price)).toFixed(2); else total = "0.00"; $('#total').fadeOut("fast",function(){ $(this).text(total).fadeIn("fast"); }); updateToPay(); if(callback) callback(); } $('#reset').click(function(){ $('#clientID,#itemAdd').val(''); $(".search").css('color','#aaa'); $('#items').toggle(500,function(){ $(this).html('').show(); }); updateTotal("clear"); resetdesu.resetForm(); }); $('.numberos').keyup(function(){ if(this.value=="") this.value="0.00"; }).click(function(){ $(this).select(); }).focus(function(){ $(this).css('color','#2d2d2d'); }).blur(function(){ if(parseFloat(this.value)==0){ $(this).css('color','#aaa'); this.value="0.00"; } }); $('#paid,#coupon').keyup(updateToPay); $('textarea').focus(function(){ $(this).css('color','#2d2d2d'); }).blur(function(){ if(!$(this).val()) $(this).css('color','#ccc'); }); jQuery.validator.addMethod("completed", function(value, element,param) { if( param == "client" && $('#clientID').val() ) return true; else if( param == "item" && parseFloat($('#total').text()) ) return true; else return false; }, "Rawr"); var resetdesu = $('#newOrder').validate({ rules: { clientSearch: { completed: "client" }, itemSearch: { completed: "item" } }, messages: { clientSearch: "Por favor elija un cliente o cree uno nuevo.", itemSearch: "Por favor agregue al menos un artículo al pedido.", }, submitHandler: function(form) { $(form).ajaxSubmit({ beforeSubmit: function() { }, target: '#debug', success: function() { return false; } }); }, onfocusout: false }); $('#addclient').click(function(){ $('#overlay').show().animate({ opacity: 0.5, },200,function(){ $(this).click(function(){ $(this).unbind('click'); hideoverlay2(); }); $(document).keyup(function(e) { $(this).unbind('keyup'); if (e.keyCode == 27) { //on ESC $(document).unbind('keyup'); hideoverlay2(); } }); }); $('#newClientOrder').load("ui/client-add.php",function(){ var lolioffset= $('#addclient').offset(); $(this).css({ top: $(window).height()-20, }).show().animate({ left: lolioffset.left-4, top: lolioffset.top-4, width: 260, height: 100, opacity: 1, }); }); }); function hideoverlay2(){ $('#newClientOrder').animate({ left: 0, top: $(window).height()-20, width: 0, height: 0, opacity: 0, },200,function(){ $(this).hide(); $('#overlay').hide().css({opacity:0,}); //$('#overlay').animate({opacity:0,},200,function(){$(this).hide();}); }); } </script> <h1>Pedido</h1> <br> <div id="orderNew"> <form name="order" id="newOrder" action="../core/query.php" method="post"> <input type="hidden" name="nya" value="order"/> <input type="hidden" name="act" value="add"/> <input type="text" value="Buscar Cliente" id="clientSearch" name="clientSearch" class="search"/> <input type="hidden" name="clientID" id="clientID"/> <button type="button" class="boton" id="addclient">Nuevo Cliente</button> <br> <input type="text" value="Buscar Artículo" id="itemSearch" name="itemSearch" class="search"/> <input type="hidden" id="itemAdd"/> <button type="button" class="boton" id="additem" style="display:none;">Agregar a Pedido</button> <div id="items"></div> <div id="separator"></div> <label for="paid">Efectivo</label> <input id="paid" name="paid" type="text" style="width:40px;text-align:right;" value="0.00" onKeyPress="return numbersonly(event, true)" class="numberos"/> <label for="coupon">Vales</label> <input id="coupon" name="coupon" type="text" style="width:40px;text-align:right;" value="0.00" onKeyPress="return numbersonly(event, true)" class="numberos"/> <input type="checkbox" name="delivered" id="delivered"/> <label for="delivered">Entregado</label> <br> <p class="totals">Total: $<span id="total">0.00</span>&nbsp;&nbsp; A cuenta: $<span id="topay">0.00</span></p> <button type="button" class="boton" onclick="$('#extraDiv').toggle(500);">Cargos Extra</button> <button type="button" class="boton" onclick="$('#note').toggle(500);">Notas</button> <div id="extraDiv" style="display:none;"> <textarea id="extra_desc" cols="25" rows="3" style="float:left;" placeholder="Descripción del cargo extra, bordados etc."></textarea> <input id="extra" type="text" style="width:50px;text-align:right;" onKeyPress="return numbersonly(event, true)" value="0.00" class="numberos"/> <br> <button id="addExtra" type="button" class="boton" disabled="disabled">Agregar</button> </div> <div id="note" style="display:none; clear:both;"> <textarea name="notes" cols="25" rows="3" placeholder="Notas adicionales."></textarea> </div> <div style="clear:both;"> <button type="submit" class="boton" > Procesar Orden</button> <button id="reset" type="reset" class="boton" style="float:right;"> Cancelar Orden</button> </div> </form> </div> <div id="newClientOrder"></div> <div id="ticket"></div> <div id="query"></div>
spsnk/sky
sky/admin/ui/order.php
PHP
apache-2.0
9,721
package com.sgu.infowksporga.jfx.menu.action.edit; import java.awt.event.ActionEvent; import com.sgu.apt.annotation.AnnotationConfig; import com.sgu.apt.annotation.i18n.I18n; import com.sgu.apt.annotation.i18n.I18nProperty; import com.sgu.core.framework.gui.swing.util.UtilGUI; import com.sgu.infowksporga.jfx.menu.action.AbstractInfoWrkspOrgaAction; import com.sgu.infowksporga.jfx.views.file.explorer.rules.IsAtLeastViewFileSelectedRule; /** * Description : RemoveDirectoryDeskViewFilesAction class<br> * Move to trash all workspace selected files from directory view * * @author SGU */ public class RemoveFileExplorerViewFilesAction extends AbstractInfoWrkspOrgaAction { /** * The attribute serialVersionUID */ private static final long serialVersionUID = -3651435084049489336L; /** * Constructor<br> */ @I18n(baseProject = AnnotationConfig.I18N_TARGET_APPLICATION_PROPERTIES_FOLDER, filePackage = "i18n", fileName = "application-prez", properties = { // Action buttons @I18nProperty(key = "menu.edit.remove.file.explorer.view.files.text", value = "Supprimer"), // Force /n @I18nProperty(key = "menu.edit.remove.file.explorer.view.files.name", value = "DELETE_SELECTED_DIRECTORIES_FILES"), // Force /n @I18nProperty(key = "menu.edit.remove.file.explorer.view.files.tooltip", value = "Supprime les fichiers/dossiers sélectionnés de toutes les vues (même non visible)"), // Force /n @I18nProperty(key = "menu.edit.remove.file.explorer.view.files.mnemonic", value = "p"), // Force /n @I18nProperty(key = "menu.edit.remove.file.explorer.view.files.shortcut", value = "control D"), // Force /n @I18nProperty(key = "menu.edit.remove.file.explorer.view.files.icon", value = "/icons/delete.png"), // Force /n }) public RemoveFileExplorerViewFilesAction() { super("menu.edit.remove.file.explorer.view.files"); setRule(new IsAtLeastViewFileSelectedRule(null)); } /** {@inheritDoc} */ @Override public void actionPerformed(final ActionEvent evt) { //final RemoveFilesToTrashServiceUI facade = SpringBeanHelper.getImplementationByInterface(RemoveFilesToTrashServiceUI.class); //facade.execute(facade, null); UtilGUI.showNotYetImplementedDlg(); } }
sguisse/InfoWkspOrga
10-Application/Application/Swing-Sample-APP/src/main/java/com/sgu/infowksporga/jfx/menu/action/edit/RemoveFileExplorerViewFilesAction.java
Java
apache-2.0
2,323
package org.canova.api.berkeley; /** * A function wrapping interface. * @author John DeNero */ public interface MyMethod<I, O> { public O call(I obj); }
jpatanooga/Canova
canova-api/src/main/java/org/canova/api/berkeley/MyMethod.java
Java
apache-2.0
157
package edu.indiana.soic.spidal.dapwc; public class PlotTools { // TODO - fix PlotTools /* public static void CreatePlotWithCenters(String centerFile, String pointsFile, String clusterNumberFile, int numberOfCenterPointsToIncludeInEachCenterType, String centerPlotFile, String plotDescription) { *//* Generate all types of center clusters per cluster * * Center clusters are, * 1. Original Min Mean * 2. MDS Min Mean * 3. MDS Center of Gravity (CoG) * 4. Overall Best * 5. Bucket Fraction 0 * Bucket Fraction 1 and so on * * Number of center points to include in each center type = n * n <= N, which is the number of center points found for each center type by PWC * N is specified through NumberOfCenters parameter in PWC * * Assumes a center file from a PWC center finding run * Assumes a points file, which has each point mapped to its cluster in the format * PointNumber<TAB>Xcoord<TAB>Ycoord<TAB>Zcoord<TAB>ClusterNumber *//* *//* Colors to use with PlotViz reads color info from Matlab50.txt file *//* java.util.ArrayList<Color> matlab50Colors = GenerateMatlab50Colors(); *//* XML elements to hold points and clusters to be used in PlotViz file *//* XElement clustersElement = new XElement("clusters"); XElement pointsElement = new XElement("points"); *//* Hashtable mapping point number to a PlotVizPoint data structure for the points in the given points file *//* java.util.Hashtable existingPointsTable = new java.util.Hashtable(); *//* Maximum number of points int the points file *//* int maxpnum = 0; *//* Maximum number of clusters that points are mapped to in the points file*//* int maxcnum = 0; edu.indiana.soic.spidal.Boxspidal.general.Box<Integer> boxmaxpnum = new edu.indiana.soic.spidal.Boxspidal.general.Box<Integer>(maxpnum); edu.indiana.soic.spidal.generaloic.spidal.Box<Integer> boxmaxcnum = new edu.indiana.soic.spidal.Boxspidal.general.Box<Integer>(maxcnum); ProcessPointsFile(pointsFile, clusterNumberFile, clustersElement, pointsElement, boxmaxpnum, boxmaxcnum, existingPointsTable, matlab50Colors); maxpnum = boxmaxpnum.content; maxcnum = boxmaxcnum.content; *//* Table mapping each cluster (i.e. group) number to another table called method table * method table maps each method (e.g. smallest distance mean, smallest MDS distance mean, etc.) name to the list center points for that particular method * the order of points in the list is as same as in the given center file *//* java.util.Hashtable groupTable = ProcessCenterFile(centerFile); CreatePlotWithCentersInternal(centerPlotFile, plotDescription, clustersElement, pointsElement, maxpnum, existingPointsTable, maxcnum, matlab50Colors, groupTable, numberOfCenterPointsToIncludeInEachCenterType); } private static void CreatePlotWithCentersInternal(String centerPlotFile, String plotDescription, XElement clustersElement, XElement pointsElement, int maxpnum, java.util.Hashtable existingPointsTable, int maxcnum, java.util.ArrayList<Color> matlab50Colors, java.util.Hashtable groupTable, int numberOfCenterPointsToIncludeInEachCenterType) { ++maxcnum; for (DictionaryEntry groupToMethodTable : groupTable) { int group = (int)groupToMethodTable.Key; // group is the original cluster number java.util.Hashtable methodTable = (java.util.Hashtable)groupToMethodTable.Value; int methodCount = methodTable.size(); int tempCount = methodCount; for (DictionaryEntry methodToCenterPoints : methodTable) { String method = (String)methodToCenterPoints.Key; // method is one of smallest distance mean, smallest MDS mean, etc. // cluster number to be used in PlotViz for this center type int methodNumber = methodCount - tempCount--; int clusterNumberForCenterType = group * methodCount + methodNumber + maxcnum; // cluster name to be used in PlotViz for this center type String centerTypeName = group + "" + method + ".centerpoints"; // add an XML element to represent this center type as a cluster in PlotViz clustersElement.Add(CreateClusterElement(clusterNumberForCenterType, centerTypeName, matlab50Colors.get(group % matlab50Colors.size()), false, 2.0, methodNumber)); java.util.ArrayList<CenterInfo> cps = (java.util.ArrayList<CenterInfo>)methodToCenterPoints.Value; // Picking the topmost n point for each method for (int i = 0; i < numberOfCenterPointsToIncludeInEachCenterType; i++) { CenterInfo cp = cps.get(i); PlotVizPoint p = (PlotVizPoint)existingPointsTable.get(cp.getPnum()); pointsElement.Add(CreatePointElement(++maxpnum, clusterNumberForCenterType, ("cluster:" + group + "-idx:" + p.getIndex() + "method:" + method), p.getX(), p.getY(), p.getZ())); } } } XElement plotElement = CreatePlotElement(plotDescription, true); XElement plotvizElement = new XElement("plotviz"); plotvizElement.Add(plotElement); plotvizElement.Add(clustersElement); plotvizElement.Add(pointsElement); plotvizElement.Save(centerPlotFile); } private static void ProcessPointsFile(String pointsFile, String clusterNumberFile, XElement clusters, XElement points, edu.indiana.soic.spidal.generaloic.spidal.Box<Integer> maxpnum, edu.indiana.soic.spidal.generaloic.spidal.Box<Integer> maxcnum, java.util.Hashtable pointsTable, java.util.ArrayList<Color> matlab50Colors) { //C# TO JAVA CONVERTER NOTE: The following 'using' block is replaced by its Java equivalent: // using (StreamReader preader = new StreamReader(pointsFile), creader = new StreamReader(clusterNumberFile)) StreamReader preader = new StreamReader(pointsFile); StreamReader creader = new StreamReader(clusterNumberFile); try { java.util.HashSet<Integer> clusterNumbers = new java.util.HashSet<Integer>(); maxpnum.content = -1; while (!preader.EndOfStream) { String pline = preader.ReadLine(); String cline = creader.ReadLine(); if (!tangible.DotNetToJavaStringHelper.isNullOrEmpty(pline) && !tangible.DotNetToJavaStringHelper.isNullOrEmpty(cline)) { PlotVizPoint p = ReadPointLine(pline.trim()); if (maxpnum.content < p.getIndex()) { maxpnum.content = p.getIndex(); } pointsTable.put(p.getIndex(), p); int cnum = ReadCnum(cline); p.setCluster(cnum); if (!clusterNumbers.contains(p.getCluster())) { clusterNumbers.add(p.getCluster()); clusters.Add(CreateClusterElement(p.getCluster(), (new Integer(p.getCluster())).toString(CultureInfo.InvariantCulture), matlab50Colors.get(p.getCluster() % matlab50Colors.size()), true, 0.1, Glyphs.Hexagon2D)); } points.Add(CreatePointElement(p.getIndex(), p.getCluster(), "", p.getX(), p.getY(), p.getZ())); } } maxcnum.content = clusterNumbers.Max(); } finally { preader.dispose(); creader.dispose(); } } private static int ReadCnum(String line) { char[] sep = new char[] {' ', '\t'}; String[] splits = line.split(sep, StringSplitOptions.RemoveEmptyEntries); return splits.length == 2 ? Integer.parseInt(splits[1]) : splits.length == 5 ? Integer.parseInt(splits[4]) : 0; } private static java.util.ArrayList<Color> GenerateMatlab50Colors() { //C# TO JAVA CONVERTER NOTE: The following 'using' block is replaced by its Java equivalent: // using (Stream stream = Assembly.GetExecutingAssembly().GetManifestResourceStream("Salsa.PairwiseClusteringTPL.Matlab50.txt")) Stream stream = Assembly.GetExecutingAssembly().GetManifestResourceStream("Salsa.PairwiseClusteringTPL.Matlab50.txt"); try { if (stream != null) { //C# TO JAVA CONVERTER NOTE: The following 'using' block is replaced by its Java equivalent: // using (StreamReader reader = new StreamReader(stream)) StreamReader reader = new StreamReader(stream); try { java.util.ArrayList<Color> colors = new java.util.ArrayList<Color>(); char[] sep = new char[] {' ', '\t'}; String[] splits; String split; int startIdx = 3; int r, g, b, a; while (!reader.EndOfStream) { String line = reader.ReadLine(); if (!tangible.DotNetToJavaStringHelper.isNullOrEmpty(line)) { splits = line.trim().split(java.util.regex.Pattern.quote(sep.toString()), -1); split = splits[0]; r = Integer.parseInt(split.substring(startIdx, startIdx + (split.length() - (startIdx + 1)))); split = splits[1]; g = Integer.parseInt(split.substring(startIdx, startIdx + (split.length() - (startIdx + 1)))); split = splits[2]; b = Integer.parseInt(split.substring(startIdx, startIdx + (split.length() - (startIdx + 1)))); split = splits[3]; a = Integer.parseInt(split.substring(startIdx, startIdx + (split.length() - (startIdx + 1)))); colors.add(Color.FromArgb(a, r, g, b)); } } return colors; } finally { reader.dispose(); } } else { throw new RuntimeException("Unable to load embedded resource: Matlab50.txt"); } } finally { stream.dispose(); } } private static PlotVizPoint ReadPointLine(String line) { char[] sep = new char[] {' ', '\t'}; String[] splits = line.split(sep, StringSplitOptions.RemoveEmptyEntries); PlotVizPoint p = new PlotVizPoint(Double.parseDouble(splits[1]), Double.parseDouble(splits[2]), Double.parseDouble(splits[3]), Integer.parseInt(splits[0]), Integer.parseInt(splits[4])); return p; } private static CenterInfo ReadCenterLine(String line) { char[] sep = new char[] {' ', '\t'}; char[] eqsep = new char[] {'='}; String[] splits = line.split(sep, StringSplitOptions.RemoveEmptyEntries); int pnum = Integer.parseInt(splits[0].split(java.util.regex.Pattern.quote(eqsep.toString()), -1)[1]); double measure = Double.parseDouble(splits[1].split(java.util.regex.Pattern.quote(eqsep.toString()), -1)[1]); int methodIdx = 2; String source = ""; double count = 0.0; if (splits[2].startsWith("Count")) { methodIdx = 4; count = Double.parseDouble(splits[2].split(java.util.regex.Pattern.quote(eqsep.toString()), -1)[1]); source = splits[3].split(java.util.regex.Pattern.quote(eqsep.toString()), -1)[1]; } String method = splits[methodIdx].split(java.util.regex.Pattern.quote(eqsep.toString()), -1)[1]; int group = Integer.parseInt(splits[methodIdx + 1].split(java.util.regex.Pattern.quote(eqsep.toString()), -1)[1]); String seqName = splits[methodIdx + 2].split(java.util.regex.Pattern.quote(eqsep.toString()), -1)[1]; for (int i = methodIdx + 3; i < splits.length - 4; ++i) { seqName += (" " + splits[i]); } int seqLength = Integer.parseInt(splits[splits.length - 4].split(java.util.regex.Pattern.quote(eqsep.toString()), -1)[1]); return new CenterInfo(pnum, measure, method, group, seqName, seqLength, source, count); } private static java.util.Hashtable ProcessCenterFile(String centerFile) { //C# TO JAVA CONVERTER NOTE: The following 'using' block is replaced by its Java equivalent: // using (StreamReader reader = new StreamReader(centerFile)) StreamReader reader = new StreamReader(centerFile); try { java.util.Hashtable groupTable = new java.util.Hashtable(); while (!reader.EndOfStream) { CenterInfo cp = ReadCenterLine(reader.ReadLine()); AddToGroupTable(groupTable, cp); } return groupTable; } finally { reader.dispose(); } } private static void AddToGroupTable(java.util.Hashtable groupTable, CenterInfo cp) { if (groupTable.containsKey(cp.getCluster())) { java.util.Hashtable methodTable = (java.util.Hashtable)groupTable.get(cp.getCluster()); if (methodTable.containsKey(cp.getMethod())) { // Need a list to maintain the order of points java.util.ArrayList<CenterInfo> cps = (java.util.ArrayList<CenterInfo>)methodTable.get(cp.getMethod()); cps.add(cp); } else { // Need a list to maintain the order of points java.util.ArrayList<CenterInfo> cps = new java.util.ArrayList<CenterInfo>(java.util.Arrays.asList(new CenterInfo[] {cp})); methodTable.put(cp.getMethod(), cps); } } else { // Need a list to maintain the order of points java.util.ArrayList<CenterInfo> cps = new java.util.ArrayList<CenterInfo>(java.util.Arrays.asList(new CenterInfo[] {cp})); java.util.Hashtable methodTable = new java.util.Hashtable(); methodTable.put(cp.getMethod(), cps); groupTable.put(cp.getCluster(), methodTable); } } private static XElement CreatePlotElement(String name, boolean glyphVisible) { XElement plot = new XElement("plot", new XElement("title", name), new XElement("pointsize", 1), new XElement("glyph", new XElement("visible", glyphVisible ? 1 : 0), new XElement("scale", 1)), new XElement("camera", new XElement("focumode", 0), new XElement("focus", new XAttribute("x", 0), new XAttribute("y", 0), new XAttribute("z", 0)))); return plot; } private static XElement CreateClusterElement(int key, String label, Color color, boolean isDefault, double size, int shape) { XElement cluster = new XElement("cluster", new XElement("key", key), new XElement("label", label), new XElement("visible", 1), new XElement("default", isDefault ? 1 : 0), new XElement("color", new XAttribute("r", color.R), new XAttribute("g", color.G), new XAttribute("b", color.B), new XAttribute("a", color.A)), new XElement("size", size), new XElement("shape", shape)); return cluster; } private static XElement CreatePointElement(int key, int clusterKey, String label, double x, double y, double z) { XElement point = new XElement("point", new XElement("key", key), new XElement("clusterkey", clusterKey), new XElement("label", label), new XElement("location", new XAttribute("x", x), new XAttribute("y", y), new XAttribute("z", z))); return point; } //C# TO JAVA CONVERTER WARNING: Java does not allow user-defined value types. The behavior of this class will differ from the original: //ORIGINAL LINE: struct Glyphs private final static class Glyphs { public static int Triangle2D = 0; public static int Rectangle2D = 1; public static int Pentagon2D = 2; public static int Hexagon2D = 3; public static int Tetrahedron3D = 4; public static int Cube3D = 5; public static int Sphere3D = 6; public static int Cylinder3D = 7; }*/ }
DSC-SPIDAL/dapwc
src/main/java/edu/indiana/soic/spidal/dapwc/PlotTools.java
Java
apache-2.0
14,272
package com.limagiran.hearthstone.heroi.view; import com.limagiran.hearthstone.heroi.control.Heroi; import com.limagiran.hearthstone.util.AbsolutesConstraints; import com.limagiran.hearthstone.util.Images; import javax.swing.ImageIcon; import javax.swing.JLabel; import javax.swing.JPanel; import org.netbeans.lib.awtextra.AbsoluteConstraints; import org.netbeans.lib.awtextra.AbsoluteLayout; /** * * @author Vinicius */ public class PanelHeroi extends JPanel { private Animacao animacao; private Congelado congelado; private JLabel heroi; private final ImageIcon imagemHeroi; private final Heroi hero; public PanelHeroi(Heroi hero, ImageIcon image) { super(new AbsoluteLayout()); this.hero = hero; imagemHeroi = image; init(); } private void init() { setOpaque(false); congelado = new Congelado(); heroi = new JLabel(imagemHeroi, JLabel.CENTER); animacao = new Animacao(hero); add(animacao, new AbsoluteConstraints(0, 0, imagemHeroi.getIconWidth(), imagemHeroi.getIconHeight())); add(congelado, AbsolutesConstraints.ZERO); add(heroi, AbsolutesConstraints.ZERO); } public void atualizar() { congelado.repaint(); heroi.repaint(); } public void setFreeze(boolean flag) { congelado.setVisible(flag); } public Animacao getAnimacao() { return animacao; } @Override public String toString() { return hero.getToString(); } } class Congelado extends JLabel { public Congelado() { super(Images.HEROI_CONGELADO, JLabel.CENTER); init(); } private void init() { setOpaque(false); setVisible(false); } }
limagiran/hearthstone
src/com/limagiran/hearthstone/heroi/view/PanelHeroi.java
Java
apache-2.0
1,754
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.v1.stub; import com.google.api.core.BetaApi; import com.google.api.gax.httpjson.HttpJsonCallSettings; import com.google.api.gax.httpjson.HttpJsonCallableFactory; import com.google.api.gax.httpjson.HttpJsonOperationSnapshotCallable; import com.google.api.gax.httpjson.HttpJsonStubCallableFactory; import com.google.api.gax.httpjson.longrunning.stub.OperationsStub; import com.google.api.gax.rpc.BatchingCallSettings; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.ServerStreamingCallSettings; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.longrunning.Operation; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * REST callable factory implementation for the RegionDiskTypes service API. * * <p>This class is for advanced usage. */ @Generated("by gapic-generator-java") @BetaApi public class HttpJsonRegionDiskTypesCallableFactory implements HttpJsonStubCallableFactory<Operation, OperationsStub> { @Override public <RequestT, ResponseT> UnaryCallable<RequestT, ResponseT> createUnaryCallable( HttpJsonCallSettings<RequestT, ResponseT> httpJsonCallSettings, UnaryCallSettings<RequestT, ResponseT> callSettings, ClientContext clientContext) { return HttpJsonCallableFactory.createUnaryCallable( httpJsonCallSettings, callSettings, clientContext); } @Override public <RequestT, ResponseT, PagedListResponseT> UnaryCallable<RequestT, PagedListResponseT> createPagedCallable( HttpJsonCallSettings<RequestT, ResponseT> httpJsonCallSettings, PagedCallSettings<RequestT, ResponseT, PagedListResponseT> callSettings, ClientContext clientContext) { return HttpJsonCallableFactory.createPagedCallable( httpJsonCallSettings, callSettings, clientContext); } @Override public <RequestT, ResponseT> UnaryCallable<RequestT, ResponseT> createBatchingCallable( HttpJsonCallSettings<RequestT, ResponseT> httpJsonCallSettings, BatchingCallSettings<RequestT, ResponseT> callSettings, ClientContext clientContext) { return HttpJsonCallableFactory.createBatchingCallable( httpJsonCallSettings, callSettings, clientContext); } @BetaApi( "The surface for long-running operations is not stable yet and may change in the future.") @Override public <RequestT, ResponseT, MetadataT> OperationCallable<RequestT, ResponseT, MetadataT> createOperationCallable( HttpJsonCallSettings<RequestT, Operation> httpJsonCallSettings, OperationCallSettings<RequestT, ResponseT, MetadataT> callSettings, ClientContext clientContext, OperationsStub operationsStub) { UnaryCallable<RequestT, Operation> innerCallable = HttpJsonCallableFactory.createBaseUnaryCallable( httpJsonCallSettings, callSettings.getInitialCallSettings(), clientContext); HttpJsonOperationSnapshotCallable<RequestT, Operation> initialCallable = new HttpJsonOperationSnapshotCallable<RequestT, Operation>( innerCallable, httpJsonCallSettings.getMethodDescriptor().getOperationSnapshotFactory()); return HttpJsonCallableFactory.createOperationCallable( callSettings, clientContext, operationsStub.longRunningClient(), initialCallable); } @Override public <RequestT, ResponseT> ServerStreamingCallable<RequestT, ResponseT> createServerStreamingCallable( HttpJsonCallSettings<RequestT, ResponseT> httpJsonCallSettings, ServerStreamingCallSettings<RequestT, ResponseT> callSettings, ClientContext clientContext) { return HttpJsonCallableFactory.createServerStreamingCallable( httpJsonCallSettings, callSettings, clientContext); } }
googleapis/java-compute
google-cloud-compute/src/main/java/com/google/cloud/compute/v1/stub/HttpJsonRegionDiskTypesCallableFactory.java
Java
apache-2.0
4,644
package android.coolweather.com.coolweather.service; import android.app.AlarmManager; import android.app.PendingIntent; import android.app.Service; import android.content.Intent; import android.content.SharedPreferences; import android.coolweather.com.coolweather.gson.Weather; import android.coolweather.com.coolweather.util.HttpUtil; import android.coolweather.com.coolweather.util.Utility; import android.os.IBinder; import android.os.SystemClock; import android.preference.PreferenceManager; import android.util.Log; import java.io.IOException; import okhttp3.Call; import okhttp3.Callback; import okhttp3.Response; public class AutoUpdateService extends Service { @Override public IBinder onBind(Intent intent) { throw null; } @Override public int onStartCommand(Intent intent, int flags, int startId) { Log.e("syz","更新天气"); updateWeather(); updateBingPic(); AlarmManager manager = (AlarmManager)getSystemService(ALARM_SERVICE); int anHour = 8*60*60*1000; Long triggerAtTime = SystemClock.elapsedRealtime()+anHour; Intent i = new Intent(this,AutoUpdateService.class); PendingIntent pi = PendingIntent.getService(this,0,i,0); manager.cancel(pi); manager.set(AlarmManager.ELAPSED_REALTIME_WAKEUP,triggerAtTime,pi); return super.onStartCommand(intent, flags, startId); } /* *更新天气信息 */ public void updateWeather(){ SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); String weatherString = prefs.getString("weather",null); if(weatherString != null){ Weather weather = Utility.handleWeatherResponse(weatherString); String weatherId= weather.basic.weatherId; String weatherUrl = "http://guolin.tech/api/weather?cityid="+weatherId+"&key=bc0418b57b2d4918819d3974ac1285d9"; HttpUtil.sendOKHTTPRequest(weatherUrl, new Callback() { @Override public void onFailure(Call call, IOException e) { e.printStackTrace(); } @Override public void onResponse(Call call, Response response) throws IOException { final String responseText = response.body().string(); final Weather weather = Utility.handleWeatherResponse(responseText); if(weather!=null&"ok".equals(weather.status)){ SharedPreferences.Editor editor =PreferenceManager.getDefaultSharedPreferences(AutoUpdateService.this).edit(); editor.putString("weather",responseText); editor.apply(); } } }); } } /* *加载bing每日一图 */ private void updateBingPic(){ String requestBingPic = "http://guolin.tech/api/bing_pic"; HttpUtil.sendOKHTTPRequest(requestBingPic, new Callback() { @Override public void onFailure(Call call, IOException e) { e.printStackTrace(); } @Override public void onResponse(Call call, Response response) throws IOException { final String bingPic = response.body().string(); SharedPreferences.Editor editor = PreferenceManager.getDefaultSharedPreferences(AutoUpdateService.this).edit(); editor.putString("bing_pic",bingPic); editor.apply(); } }); } }
shiyuzhe/coolweather
app/src/main/java/android/coolweather/com/coolweather/service/AutoUpdateService.java
Java
apache-2.0
3,589
# Copyright (c) 2013 Bull. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime from unittest import mock import ddt from novaclient import client as nova_client from novaclient import exceptions as nova_exceptions from oslo_config import cfg from oslo_config import fixture as conf_fixture import testtools from blazar import context from blazar.db import api as db_api from blazar.db import exceptions as db_exceptions from blazar.db import utils as db_utils from blazar.manager import exceptions as manager_exceptions from blazar.manager import service from blazar.plugins import oshosts as plugin from blazar.plugins.oshosts import host_plugin from blazar import tests from blazar.utils.openstack import base from blazar.utils.openstack import nova from blazar.utils.openstack import placement from blazar.utils import trusts CONF = cfg.CONF class AggregateFake(object): def __init__(self, i, name, hosts): self.id = i self.name = name self.hosts = hosts class PhysicalHostPluginSetupOnlyTestCase(tests.TestCase): def setUp(self): super(PhysicalHostPluginSetupOnlyTestCase, self).setUp() self.cfg = self.useFixture(conf_fixture.Config(CONF)) self.cfg.config(os_admin_username='fake-user') self.cfg.config(os_admin_password='fake-passwd') self.cfg.config(os_admin_user_domain_name='fake-user-domain') self.cfg.config(os_admin_project_name='fake-pj-name') self.cfg.config(os_admin_project_domain_name='fake-pj-domain') self.context = context self.patch(self.context, 'BlazarContext') self.patch(base, 'url_for').return_value = 'http://foo.bar' self.host_plugin = host_plugin self.fake_phys_plugin = self.host_plugin.PhysicalHostPlugin() self.nova = nova self.rp_create = self.patch(self.nova.ReservationPool, 'create') self.db_api = db_api self.db_host_extra_capability_get_all_per_host = ( self.patch(self.db_api, 'host_extra_capability_get_all_per_host')) def test_configuration(self): self.assertEqual("fake-user", self.fake_phys_plugin.username) self.assertEqual("fake-passwd", self.fake_phys_plugin.password) self.assertEqual("fake-user-domain", self.fake_phys_plugin.user_domain_name) self.assertEqual("fake-pj-name", self.fake_phys_plugin.project_name) self.assertEqual("fake-pj-domain", self.fake_phys_plugin.project_domain_name) def test__get_extra_capabilities_with_values(self): self.db_host_extra_capability_get_all_per_host.return_value = [ {'id': 1, 'capability_name': 'foo', 'capability_value': 'bar', 'other': 'value', 'computehost_id': 1 }, {'id': 2, 'capability_name': 'buzz', 'capability_value': 'word', 'computehost_id': 1 }] res = self.fake_phys_plugin._get_extra_capabilities(1) self.assertEqual({'foo': 'bar', 'buzz': 'word'}, res) def test__get_extra_capabilities_with_no_capabilities(self): self.db_host_extra_capability_get_all_per_host.return_value = [] res = self.fake_phys_plugin._get_extra_capabilities(1) self.assertEqual({}, res) @ddt.ddt class PhysicalHostPluginTestCase(tests.TestCase): def setUp(self): super(PhysicalHostPluginTestCase, self).setUp() self.cfg = cfg self.context = context self.patch(self.context, 'BlazarContext') self.nova_client = nova_client self.nova_client = self.patch(self.nova_client, 'Client').return_value self.service = service self.manager = self.service.ManagerService() self.fake_host_id = '1' self.fake_host = { 'id': self.fake_host_id, 'hypervisor_hostname': 'hypvsr1', 'service_name': 'compute1', 'vcpus': 4, 'cpu_info': 'foo', 'hypervisor_type': 'xen', 'hypervisor_version': 1, 'memory_mb': 8192, 'local_gb': 10, 'trust_id': 'exxee111qwwwwe', } self.patch(base, 'url_for').return_value = 'http://foo.bar' self.host_plugin = host_plugin self.fake_phys_plugin = self.host_plugin.PhysicalHostPlugin() self.db_api = db_api self.db_utils = db_utils self.db_host_get = self.patch(self.db_api, 'host_get') self.db_host_get.return_value = self.fake_host self.db_host_list = self.patch(self.db_api, 'host_list') self.db_host_create = self.patch(self.db_api, 'host_create') self.db_host_update = self.patch(self.db_api, 'host_update') self.db_host_destroy = self.patch(self.db_api, 'host_destroy') self.db_host_extra_capability_get_all_per_host = self.patch( self.db_api, 'host_extra_capability_get_all_per_host') self.db_host_extra_capability_get_all_per_name = self.patch( self.db_api, 'host_extra_capability_get_all_per_name') self.db_host_extra_capability_create = self.patch( self.db_api, 'host_extra_capability_create') self.db_host_extra_capability_update = self.patch( self.db_api, 'host_extra_capability_update') self.nova = nova self.rp_create = self.patch(self.nova.ReservationPool, 'create') self.patch(self.nova.ReservationPool, 'get_aggregate_from_name_or_id') self.add_compute_host = self.patch(self.nova.ReservationPool, 'add_computehost') self.remove_compute_host = self.patch(self.nova.ReservationPool, 'remove_computehost') self.get_host_details = self.patch(self.nova.NovaInventory, 'get_host_details') self.get_host_details.return_value = self.fake_host self.get_servers_per_host = self.patch( self.nova.NovaInventory, 'get_servers_per_host') self.get_servers_per_host.return_value = None self.get_extra_capabilities = self.patch( self.fake_phys_plugin, '_get_extra_capabilities') self.get_extra_capabilities.return_value = { 'foo': 'bar', 'buzz': 'word', } self.placement = placement self.prov_create = self.patch(self.placement.BlazarPlacementClient, 'create_reservation_provider') self.prov_create.return_value = { "generation": 0, "name": "blazar_foo", "uuid": "7d2590ae-fb85-4080-9306-058b4c915e3f", "parent_provider_uuid": "542df8ed-9be2-49b9-b4db-6d3183ff8ec8", "root_provider_uuid": "542df8ed-9be2-49b9-b4db-6d3183ff8ec8" } self.prov_delete = self.patch(self.placement.BlazarPlacementClient, 'delete_reservation_provider') self.fake_phys_plugin.setup(None) self.trusts = trusts self.trust_ctx = self.patch(self.trusts, 'create_ctx_from_trust') self.trust_create = self.patch(self.trusts, 'create_trust') self.ServerManager = nova.ServerManager def test_get_host(self): host = self.fake_phys_plugin.get_computehost(self.fake_host_id) self.db_host_get.assert_called_once_with('1') expected = self.fake_host.copy() expected.update({'foo': 'bar', 'buzz': 'word'}) self.assertEqual(expected, host) def test_get_host_without_extracapabilities(self): self.get_extra_capabilities.return_value = {} host = self.fake_phys_plugin.get_computehost(self.fake_host_id) self.db_host_get.assert_called_once_with('1') self.assertEqual(self.fake_host, host) @testtools.skip('incorrect decorator') def test_list_hosts(self): self.fake_phys_plugin.list_computehosts({}) self.db_host_list.assert_called_once_with() del self.service_utils def test_create_host_without_extra_capabilities(self): self.get_extra_capabilities.return_value = {} host = self.fake_phys_plugin.create_computehost(self.fake_host) self.db_host_create.assert_called_once_with(self.fake_host) self.prov_create.assert_called_once_with('hypvsr1') self.assertEqual(self.fake_host, host) def test_create_host_with_extra_capabilities(self): fake_host = self.fake_host.copy() fake_host.update({'foo': 'bar'}) # NOTE(sbauza): 'id' will be pop'd, we need to keep track of it fake_request = fake_host.copy() fake_capa = {'computehost_id': '1', 'capability_name': 'foo', 'capability_value': 'bar', } self.get_extra_capabilities.return_value = {'foo': 'bar'} self.db_host_create.return_value = self.fake_host host = self.fake_phys_plugin.create_computehost(fake_request) self.db_host_create.assert_called_once_with(self.fake_host) self.prov_create.assert_called_once_with('hypvsr1') self.db_host_extra_capability_create.assert_called_once_with(fake_capa) self.assertEqual(fake_host, host) def test_create_host_with_capabilities_too_long(self): fake_host = self.fake_host.copy() fake_host.update({'foo': 'bar'}) # NOTE(sbauza): 'id' will be pop'd, we need to keep track of it fake_request = fake_host.copy() long_key = "" for i in range(65): long_key += "0" fake_request[long_key] = "foo" self.assertRaises(manager_exceptions.ExtraCapabilityTooLong, self.fake_phys_plugin.create_computehost, fake_request) def test_create_host_without_trust_id(self): self.assertRaises(manager_exceptions.MissingTrustId, self.fake_phys_plugin.create_computehost, {}) def test_create_host_without_host_id(self): self.assertRaises(manager_exceptions.InvalidHost, self.fake_phys_plugin.create_computehost, {'trust_id': 'exxee111qwwwwe'}) def test_create_host_with_existing_vms(self): self.get_servers_per_host.return_value = ['server1', 'server2'] self.assertRaises(manager_exceptions.HostHavingServers, self.fake_phys_plugin.create_computehost, self.fake_host) def test_create_host_issuing_rollback(self): def fake_db_host_create(*args, **kwargs): raise db_exceptions.BlazarDBException self.db_host_create.side_effect = fake_db_host_create self.assertRaises(db_exceptions.BlazarDBException, self.fake_phys_plugin.create_computehost, self.fake_host) self.prov_create.assert_called_once_with('hypvsr1') self.prov_delete.assert_called_once_with('hypvsr1') def test_create_host_having_issue_when_storing_extra_capability(self): def fake_db_host_extra_capability_create(*args, **kwargs): raise db_exceptions.BlazarDBException fake_host = self.fake_host.copy() fake_host.update({'foo': 'bar'}) fake_request = fake_host.copy() self.get_extra_capabilities.return_value = {'foo': 'bar'} self.db_host_create.return_value = self.fake_host fake = self.db_host_extra_capability_create fake.side_effect = fake_db_host_extra_capability_create self.assertRaises(manager_exceptions.CantAddExtraCapability, self.fake_phys_plugin.create_computehost, fake_request) def test_update_host(self): host_values = {'foo': 'baz'} self.db_host_extra_capability_get_all_per_name.return_value = [ {'id': 'extra_id1', 'computehost_id': self.fake_host_id, 'capability_name': 'foo', 'capability_value': 'bar' }, ] self.get_reservations_by_host = self.patch( self.db_utils, 'get_reservations_by_host_id') self.get_reservations_by_host.return_value = [] self.fake_phys_plugin.update_computehost(self.fake_host_id, host_values) self.db_host_extra_capability_update.assert_called_once_with( 'extra_id1', {'capability_name': 'foo', 'capability_value': 'baz'}) def test_update_host_having_issue_when_storing_extra_capability(self): def fake_db_host_extra_capability_update(*args, **kwargs): raise RuntimeError host_values = {'foo': 'baz'} self.get_reservations_by_host = self.patch( self.db_utils, 'get_reservations_by_host_id') self.get_reservations_by_host.return_value = [] self.db_host_extra_capability_get_all_per_name.return_value = [ {'id': 'extra_id1', 'computehost_id': self.fake_host_id, 'capability_name': 'foo', 'capability_value': 'bar' }, ] fake = self.db_host_extra_capability_update fake.side_effect = fake_db_host_extra_capability_update self.assertRaises(manager_exceptions.CantAddExtraCapability, self.fake_phys_plugin.update_computehost, self.fake_host_id, host_values) def test_update_host_with_new_extra_capability(self): host_values = {'qux': 'word'} self.db_host_extra_capability_get_all_per_host.return_value = [] self.fake_phys_plugin.update_computehost(self.fake_host_id, host_values) self.db_host_extra_capability_create.assert_called_once_with({ 'computehost_id': '1', 'capability_name': 'qux', 'capability_value': 'word' }) def test_update_host_with_used_capability(self): host_values = {'foo': 'buzz'} self.db_host_extra_capability_get_all_per_name.return_value = [ {'id': 'extra_id1', 'computehost_id': self.fake_host_id, 'capability_name': 'foo', 'capability_value': 'bar' }, ] fake_phys_reservation = { 'resource_type': plugin.RESOURCE_TYPE, 'resource_id': 'resource-1', } fake_get_reservations = self.patch(self.db_utils, 'get_reservations_by_host_id') fake_get_reservations.return_value = [fake_phys_reservation] fake_get_plugin_reservation = self.patch(self.db_utils, 'get_plugin_reservation') fake_get_plugin_reservation.return_value = { 'resource_properties': '["==", "$foo", "bar"]' } self.assertRaises(manager_exceptions.CantAddExtraCapability, self.fake_phys_plugin.update_computehost, self.fake_host_id, host_values) fake_get_plugin_reservation.assert_called_once_with( plugin.RESOURCE_TYPE, 'resource-1') def test_delete_host(self): host_allocation_get_all = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all.return_value = [] self.fake_phys_plugin.delete_computehost(self.fake_host_id) self.db_host_destroy.assert_called_once_with(self.fake_host_id) self.prov_delete.assert_called_once_with('hypvsr1') self.get_servers_per_host.assert_called_once_with( self.fake_host["hypervisor_hostname"]) def test_delete_host_reserved(self): host_allocation_get_all = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all.return_value = [ { 'id': u'dd305477-4df8-4547-87f6-69069ee546a6', 'compute_host_id': self.fake_host_id } ] self.assertRaises(manager_exceptions.CantDeleteHost, self.fake_phys_plugin.delete_computehost, self.fake_host_id) def test_delete_host_having_vms(self): host_allocation_get_all = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all.return_value = [] self.get_servers_per_host.return_value = ['server1', 'server2'] self.assertRaises(manager_exceptions.HostHavingServers, self.fake_phys_plugin.delete_computehost, self.fake_host_id) self.get_servers_per_host.assert_called_once_with( self.fake_host["hypervisor_hostname"]) def test_delete_host_not_existing_in_db(self): self.db_host_get.return_value = None self.assertRaises(manager_exceptions.HostNotFound, self.fake_phys_plugin.delete_computehost, self.fake_host_id) def test_delete_host_issuing_rollback(self): def fake_db_host_destroy(*args, **kwargs): raise db_exceptions.BlazarDBException host_allocation_get_all = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all.return_value = [] self.db_host_destroy.side_effect = fake_db_host_destroy self.assertRaises(manager_exceptions.CantDeleteHost, self.fake_phys_plugin.delete_computehost, self.fake_host_id) def test_list_allocations(self): self.db_get_reserv_allocs = self.patch( self.db_utils, 'get_reservation_allocations_by_host_ids') # Expecting a list of (Reservation, Allocation) self.db_get_reserv_allocs.return_value = [ ('reservation-1', 'lease-1', 'host-1'), ('reservation-1', 'lease-1', 'host-2'), ('reservation-2', 'lease-1', 'host-2'), ('reservation-2', 'lease-1', 'host-3'), ('reservation-3', 'lease-2', 'host-1'), ] expected = [ { 'resource_id': 'host-1', 'reservations': [ {'id': 'reservation-1', 'lease_id': 'lease-1'}, {'id': 'reservation-3', 'lease_id': 'lease-2'}, ] }, { 'resource_id': 'host-2', 'reservations': [ {'id': 'reservation-1', 'lease_id': 'lease-1'}, {'id': 'reservation-2', 'lease_id': 'lease-1'}, ] }, { 'resource_id': 'host-3', 'reservations': [ {'id': 'reservation-2', 'lease_id': 'lease-1'}, ] } ] ret = self.fake_phys_plugin.list_allocations({}) # Sort returned value to use assertListEqual for r in ret: r['reservations'].sort(key=lambda x: x['id']) ret.sort(key=lambda x: x['resource_id']) self.assertListEqual(expected, ret) def test_list_allocations_with_lease_id(self): self.db_get_reserv_allocs = self.patch( self.db_utils, 'get_reservation_allocations_by_host_ids') # Expecting a list of (Reservation, Allocation) self.db_get_reserv_allocs.return_value = [ ('reservation-1', 'lease-1', 'host-1'), ('reservation-1', 'lease-1', 'host-2'), ('reservation-2', 'lease-1', 'host-2'), ('reservation-2', 'lease-1', 'host-3'), ] expected = [ { 'resource_id': 'host-1', 'reservations': [ {'id': 'reservation-1', 'lease_id': 'lease-1'}, ] }, { 'resource_id': 'host-2', 'reservations': [ {'id': 'reservation-1', 'lease_id': 'lease-1'}, {'id': 'reservation-2', 'lease_id': 'lease-1'}, ] }, { 'resource_id': 'host-3', 'reservations': [ {'id': 'reservation-2', 'lease_id': 'lease-1'}, ] } ] ret = self.fake_phys_plugin.list_allocations({'lease_id': 'lease-1'}) # Sort returned value to use assertListEqual for r in ret: r['reservations'].sort(key=lambda x: x['id']) ret.sort(key=lambda x: x['resource_id']) self.assertListEqual(expected, ret) def test_list_allocations_with_reservation_id(self): self.db_get_reserv_allocs = self.patch( self.db_utils, 'get_reservation_allocations_by_host_ids') # Expecting a list of (Reservation, Allocation) self.db_get_reserv_allocs.return_value = [ ('reservation-1', 'lease-1', 'host-1'), ('reservation-1', 'lease-1', 'host-2'), ] expected = [ { 'resource_id': 'host-1', 'reservations': [ {'id': 'reservation-1', 'lease_id': 'lease-1'}, ] }, { 'resource_id': 'host-2', 'reservations': [ {'id': 'reservation-1', 'lease_id': 'lease-1'}, ] }, ] ret = self.fake_phys_plugin.list_allocations( {'reservation_id': 'reservation-1'}) # Sort returned value to use assertListEqual for r in ret: r['reservations'].sort(key=lambda x: x['id']) ret.sort(key=lambda x: x['resource_id']) self.assertListEqual(expected, ret) def test_get_allocations(self): self.db_get_reserv_allocs = self.patch( self.db_utils, 'get_reservation_allocations_by_host_ids') # Expecting a list of (Reservation, Allocation) self.db_get_reserv_allocs.return_value = [ ('reservation-1', 'lease-1', 'host-1'), ('reservation-1', 'lease-1', 'host-2'), ('reservation-2', 'lease-1', 'host-2'), ('reservation-2', 'lease-1', 'host-3'), ('reservation-3', 'lease-2', 'host-1'), ] expected = { 'resource_id': 'host-1', 'reservations': [ {'id': 'reservation-1', 'lease_id': 'lease-1'}, {'id': 'reservation-3', 'lease_id': 'lease-2'}, ] } ret = self.fake_phys_plugin.get_allocations('host-1', {}) # sort returned value to use assertListEqual ret['reservations'].sort(key=lambda x: x['id']) self.assertDictEqual(expected, ret) def test_get_allocations_with_lease_id(self): self.db_get_reserv_allocs = self.patch( self.db_utils, 'get_reservation_allocations_by_host_ids') # Expecting a list of (Reservation, Allocation) self.db_get_reserv_allocs.return_value = [ ('reservation-1', 'lease-1', 'host-1'), ] expected = { 'resource_id': 'host-1', 'reservations': [ {'id': 'reservation-1', 'lease_id': 'lease-1'}, ] } ret = self.fake_phys_plugin.get_allocations('host-1', {'lease_id': 'lease-1'}) # sort returned value to use assertListEqual ret['reservations'].sort(key=lambda x: x['id']) self.assertDictEqual(expected, ret) def test_get_allocations_with_reservation_id(self): self.db_get_reserv_allocs = self.patch( self.db_utils, 'get_reservation_allocations_by_host_ids') # Expecting a list of (Reservation, Allocation) self.db_get_reserv_allocs.return_value = [ ('reservation-1', 'lease-1', 'host-1'), ] expected = { 'resource_id': 'host-1', 'reservations': [ {'id': 'reservation-1', 'lease_id': 'lease-1'}, ] } ret = self.fake_phys_plugin.get_allocations( 'host-1', {'reservation_id': 'reservation-1'}) # sort returned value to use assertListEqual ret['reservations'].sort(key=lambda x: x['id']) self.assertDictEqual(expected, ret) def test_get_allocations_with_invalid_host(self): self.db_get_reserv_allocs = self.patch( self.db_utils, 'get_reservation_allocations_by_host_ids') # Expecting a list of (Reservation, Allocation) self.db_get_reserv_allocs.return_value = [ ('reservation-1', 'lease-1', 'host-1'), ('reservation-1', 'lease-1', 'host-2'), ('reservation-2', 'lease-1', 'host-2'), ('reservation-2', 'lease-1', 'host-3'), ('reservation-3', 'lease-2', 'host-1'), ] expected = {'resource_id': 'no-reserved-host', 'reservations': []} ret = self.fake_phys_plugin.get_allocations('no-reserved-host', {}) self.assertDictEqual(expected, ret) def test_create_reservation_no_hosts_available(self): now = datetime.datetime.utcnow() values = { 'lease_id': u'018c1b43-e69e-4aef-a543-09681539cf4c', 'min': 1, 'max': 1, 'hypervisor_properties': '["=", "$memory_mb", "256"]', 'resource_properties': '', 'start_date': now, 'end_date': now + datetime.timedelta(hours=1), 'resource_type': plugin.RESOURCE_TYPE, } host_reservation_create = self.patch(self.db_api, 'host_reservation_create') matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts') matching_hosts.return_value = [] self.assertRaises(manager_exceptions.NotEnoughHostsAvailable, self.fake_phys_plugin.reserve_resource, u'f9894fcf-e2ed-41e9-8a4c-92fac332608e', values) self.rp_create.assert_not_called() host_reservation_create.assert_not_called() def test_create_reservation_hosts_available(self): values = { 'lease_id': u'018c1b43-e69e-4aef-a543-09681539cf4c', 'min': 1, 'max': 1, 'hypervisor_properties': '["=", "$memory_mb", "256"]', 'resource_properties': '', 'start_date': datetime.datetime(2013, 12, 19, 20, 00), 'end_date': datetime.datetime(2013, 12, 19, 21, 00), 'resource_type': plugin.RESOURCE_TYPE, } self.rp_create.return_value = mock.MagicMock(id=1) host_reservation_create = self.patch(self.db_api, 'host_reservation_create') matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts') matching_hosts.return_value = ['host1', 'host2'] host_allocation_create = self.patch( self.db_api, 'host_allocation_create') self.fake_phys_plugin.reserve_resource( u'441c1476-9f8f-4700-9f30-cd9b6fef3509', values) host_values = { 'reservation_id': u'441c1476-9f8f-4700-9f30-cd9b6fef3509', 'aggregate_id': 1, 'resource_properties': '', 'hypervisor_properties': '["=", "$memory_mb", "256"]', 'count_range': '1-1', 'status': 'pending', 'before_end': 'default' } host_reservation_create.assert_called_once_with(host_values) calls = [ mock.call( {'compute_host_id': 'host1', 'reservation_id': u'441c1476-9f8f-4700-9f30-cd9b6fef3509', }), mock.call( {'compute_host_id': 'host2', 'reservation_id': u'441c1476-9f8f-4700-9f30-cd9b6fef3509', }), ] host_allocation_create.assert_has_calls(calls) @ddt.data("min", "max", "hypervisor_properties", "resource_properties") def test_create_reservation_with_missing_param(self, missing_param): values = { 'lease_id': u'018c1b43-e69e-4aef-a543-09681539cf4c', 'min': 1, 'max': 2, 'before_end': 'default', 'hypervisor_properties': '["=", "$memory_mb", "256"]', 'resource_properties': '', 'start_date': datetime.datetime(2017, 3, 1, 20, 00), 'end_date': datetime.datetime(2017, 3, 2, 20, 00), 'resource_type': plugin.RESOURCE_TYPE} del values[missing_param] self.assertRaises( manager_exceptions.MissingParameter, self.fake_phys_plugin.reserve_resource, u'441c1476-9f8f-4700-9f30-cd9b6fef3509', values) @ddt.data({"params": {'max': 0}}, {"params": {'max': -1}}, {"params": {'max': 'one'}}, {"params": {'min': 0}}, {"params": {'min': -1}}, {"params": {'min': 'one'}}, {"params": {'before_end': 'invalid'}}) @ddt.unpack def test_create_reservation_with_invalid_param(self, params): values = { 'lease_id': u'018c1b43-e69e-4aef-a543-09681539cf4c', 'min': 1, 'max': 2, 'before_end': 'default', 'hypervisor_properties': '["=", "$memory_mb", "256"]', 'resource_properties': '', 'start_date': datetime.datetime(2017, 3, 1, 20, 00), 'end_date': datetime.datetime(2017, 3, 2, 20, 00), 'resource_type': plugin.RESOURCE_TYPE} for key, value in params.items(): values[key] = value self.assertRaises( manager_exceptions.MalformedParameter, self.fake_phys_plugin.reserve_resource, u'441c1476-9f8f-4700-9f30-cd9b6fef3509', values) @ddt.data({"params": {'max': 0}}, {"params": {'max': -1}}, {"params": {'max': 'one'}}, {"params": {'min': 0}}, {"params": {'min': -1}}, {"params": {'min': 'one'}}) @ddt.unpack def test_update_reservation_with_invalid_param(self, params): values = { 'lease_id': u'018c1b43-e69e-4aef-a543-09681539cf4c', 'min': 1, 'max': 2, 'before_end': 'default', 'hypervisor_properties': '["=", "$memory_mb", "256"]', 'resource_properties': '', 'start_date': datetime.datetime(2017, 3, 1, 20, 00), 'end_date': datetime.datetime(2017, 3, 2, 20, 00), 'resource_type': plugin.RESOURCE_TYPE} self.patch(self.db_api, 'reservation_get') self.patch(self.db_api, 'lease_get') host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'count_range': '1-1', 'hypervisor_properties': '["=", "$memory_mb", "256"]', 'resource_properties': '' } for key, value in params.items(): values[key] = value self.assertRaises( manager_exceptions.MalformedParameter, self.fake_phys_plugin.update_reservation, u'441c1476-9f8f-4700-9f30-cd9b6fef3509', values) def test_create_update_reservation_with_invalid_range(self): values = { 'lease_id': u'018c1b43-e69e-4aef-a543-09681539cf4c', 'min': 2, 'max': 1, 'hypervisor_properties': '["=", "$memory_mb", "256"]', 'resource_properties': '', 'start_date': datetime.datetime(2017, 3, 1, 20, 00), 'end_date': datetime.datetime(2017, 3, 2, 20, 00), 'resource_type': plugin.RESOURCE_TYPE, } self.patch(self.db_api, 'reservation_get') self.patch(self.db_api, 'lease_get') host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'count_range': '1-1', 'hypervisor_properties': '["=", "$memory_mb", "256"]', 'resource_properties': '' } self.assertRaises( manager_exceptions.InvalidRange, self.fake_phys_plugin.reserve_resource, u'441c1476-9f8f-4700-9f30-cd9b6fef3509', values) self.assertRaises( manager_exceptions.InvalidRange, self.fake_phys_plugin.update_reservation, u'441c1476-9f8f-4700-9f30-cd9b6fef3509', values) def test_update_reservation_shorten(self): values = { 'start_date': datetime.datetime(2013, 12, 19, 20, 30), 'end_date': datetime.datetime(2013, 12, 19, 21, 00) } reservation_get = self.patch(self.db_api, 'reservation_get') reservation_get.return_value = { 'lease_id': u'10870923-6d56-45c9-b592-f788053f5baa', } lease_get = self.patch(self.db_api, 'lease_get') lease_get.return_value = { 'start_date': datetime.datetime(2013, 12, 19, 20, 00), 'end_date': datetime.datetime(2013, 12, 19, 21, 00) } host_reservation_get = self.patch(self.db_api, 'host_reservation_get') self.fake_phys_plugin.update_reservation( '706eb3bc-07ed-4383-be93-b32845ece672', values) host_reservation_get.assert_not_called() def test_update_reservation_extend(self): values = { 'start_date': datetime.datetime(2013, 12, 19, 20, 00), 'end_date': datetime.datetime(2013, 12, 19, 21, 30) } reservation_get = self.patch(self.db_api, 'reservation_get') reservation_get.return_value = { 'lease_id': u'10870923-6d56-45c9-b592-f788053f5baa', 'resource_id': u'91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'status': 'pending' } lease_get = self.patch(self.db_api, 'lease_get') lease_get.return_value = { 'start_date': datetime.datetime(2013, 12, 19, 20, 00), 'end_date': datetime.datetime(2013, 12, 19, 21, 00) } host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'count_range': '1-1', 'hypervisor_properties': '["=", "$memory_mb", "256"]', 'resource_properties': '' } host_allocation_get_all = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all.return_value = [ { 'id': u'dd305477-4df8-4547-87f6-69069ee546a6', 'compute_host_id': 'host1' } ] host_get_all_by_queries = self.patch(self.db_api, 'host_get_all_by_queries') host_get_all_by_queries.return_value = [{'id': 'host1'}] get_reserved_periods = self.patch(self.db_utils, 'get_reserved_periods') get_reserved_periods.return_value = [ (datetime.datetime(2013, 12, 19, 20, 00), datetime.datetime(2013, 12, 19, 21, 00)) ] host_allocation_create = self.patch( self.db_api, 'host_allocation_create') host_allocation_destroy = self.patch( self.db_api, 'host_allocation_destroy') self.fake_phys_plugin.update_reservation( '706eb3bc-07ed-4383-be93-b32845ece672', values) host_allocation_create.assert_not_called() host_allocation_destroy.assert_not_called() def test_update_reservation_move_failure(self): values = { 'start_date': datetime.datetime(2013, 12, 20, 20, 00), 'end_date': datetime.datetime(2013, 12, 20, 21, 30) } reservation_get = self.patch(self.db_api, 'reservation_get') reservation_get.return_value = { 'lease_id': u'10870923-6d56-45c9-b592-f788053f5baa', 'resource_id': u'91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'status': 'active' } lease_get = self.patch(self.db_api, 'lease_get') lease_get.return_value = { 'start_date': datetime.datetime(2013, 12, 19, 20, 00), 'end_date': datetime.datetime(2013, 12, 19, 21, 00) } host_reservation_get = self.patch( self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'count_range': '1-1', 'hypervisor_properties': '["=", "$memory_mb", "256"]', 'resource_properties': '' } host_allocation_get_all = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all.return_value = [ { 'id': u'dd305477-4df8-4547-87f6-69069ee546a6', 'compute_host_id': 'host1' } ] host_get_all_by_queries = self.patch(self.db_api, 'host_get_all_by_queries') host_get_all_by_queries.return_value = [{'id': 'host1'}] get_reserved_periods = self.patch(self.db_utils, 'get_reserved_periods') get_reserved_periods.return_value = [ (datetime.datetime(2013, 12, 20, 20, 30), datetime.datetime(2013, 12, 20, 21, 00)) ] get_computehosts = self.patch(self.nova.ReservationPool, 'get_computehosts') get_computehosts.return_value = ['host1'] matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts') matching_hosts.return_value = [] self.assertRaises( manager_exceptions.NotEnoughHostsAvailable, self.fake_phys_plugin.update_reservation, '706eb3bc-07ed-4383-be93-b32845ece672', values) reservation_get.assert_called() def test_update_reservation_move_overlap(self): values = { 'start_date': datetime.datetime(2013, 12, 19, 20, 30), 'end_date': datetime.datetime(2013, 12, 19, 21, 30) } reservation_get = self.patch(self.db_api, 'reservation_get') reservation_get.return_value = { 'lease_id': u'10870923-6d56-45c9-b592-f788053f5baa', 'resource_id': u'91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'status': 'pending' } lease_get = self.patch(self.db_api, 'lease_get') lease_get.return_value = { 'start_date': datetime.datetime(2013, 12, 19, 20, 00), 'end_date': datetime.datetime(2013, 12, 19, 21, 00) } host_reservation_get = self.patch( self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'count_range': '1-1', 'hypervisor_properties': '["=", "$memory_mb", "256"]', 'resource_properties': '' } host_allocation_get_all = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all.return_value = [ { 'id': u'dd305477-4df8-4547-87f6-69069ee546a6', 'compute_host_id': 'host1' } ] host_get_all_by_queries = self.patch(self.db_api, 'host_get_all_by_queries') host_get_all_by_queries.return_value = [{'id': 'host1'}] get_reserved_periods = self.patch(self.db_utils, 'get_reserved_periods') get_reserved_periods.return_value = [ (datetime.datetime(2013, 12, 19, 20, 30), datetime.datetime(2013, 12, 19, 21, 00)) ] host_allocation_create = self.patch( self.db_api, 'host_allocation_create') host_allocation_destroy = self.patch( self.db_api, 'host_allocation_destroy') self.fake_phys_plugin.update_reservation( '706eb3bc-07ed-4383-be93-b32845ece672', values) host_allocation_create.assert_not_called() host_allocation_destroy.assert_not_called() def test_update_reservation_move_realloc(self): values = { 'start_date': datetime.datetime(2013, 12, 20, 20, 00), 'end_date': datetime.datetime(2013, 12, 20, 21, 30) } reservation_get = self.patch(self.db_api, 'reservation_get') reservation_get.return_value = { 'lease_id': u'10870923-6d56-45c9-b592-f788053f5baa', 'resource_id': u'91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'status': 'pending' } lease_get = self.patch(self.db_api, 'lease_get') lease_get.return_value = { 'start_date': datetime.datetime(2013, 12, 19, 20, 00), 'end_date': datetime.datetime(2013, 12, 19, 21, 00) } host_reservation_get = self.patch( self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'aggregate_id': 1, 'count_range': '1-1', 'hypervisor_properties': '["=", "$memory_mb", "256"]', 'resource_properties': '' } host_allocation_get_all = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all.return_value = [ { 'id': u'dd305477-4df8-4547-87f6-69069ee546a6', 'compute_host_id': 'host1' } ] host_get_all_by_queries = self.patch(self.db_api, 'host_get_all_by_queries') host_get_all_by_queries.return_value = [{'id': 'host1'}, {'id': 'host2'}] host_allocation_create = self.patch( self.db_api, 'host_allocation_create') host_allocation_destroy = self.patch( self.db_api, 'host_allocation_destroy') get_reserved_periods = self.patch(self.db_utils, 'get_reserved_periods') get_reserved_periods.return_value = [ (datetime.datetime(2013, 12, 20, 20, 30), datetime.datetime(2013, 12, 20, 21, 00)) ] matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts') matching_hosts.return_value = ['host2'] self.fake_phys_plugin.update_reservation( '706eb3bc-07ed-4383-be93-b32845ece672', values) host_reservation_get.assert_called_with( u'91253650-cc34-4c4f-bbe8-c943aa7d0c9b') host_allocation_destroy.assert_called_with( 'dd305477-4df8-4547-87f6-69069ee546a6') host_allocation_create.assert_called_with( { 'compute_host_id': 'host2', 'reservation_id': '706eb3bc-07ed-4383-be93-b32845ece672' } ) def test_update_reservation_min_increase_success(self): values = { 'start_date': datetime.datetime(2017, 7, 12, 20, 00), 'end_date': datetime.datetime(2017, 7, 12, 21, 00), 'min': 3 } reservation_get = self.patch(self.db_api, 'reservation_get') reservation_get.return_value = { 'lease_id': '10870923-6d56-45c9-b592-f788053f5baa', 'resource_id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'status': 'pending' } lease_get = self.patch(self.db_api, 'lease_get') lease_get.return_value = { 'start_date': datetime.datetime(2017, 7, 12, 20, 00), 'end_date': datetime.datetime(2017, 7, 12, 21, 00) } host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'count_range': '2-3', 'hypervisor_properties': '["=", "$memory_mb", "16384"]', 'resource_properties': '' } host_allocation_get_all = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all.return_value = [ { 'id': 'dd305477-4df8-4547-87f6-69069ee546a6', 'compute_host_id': 'host1' }, { 'id': 'dd305477-4df8-4547-87f6-69069ee546a7', 'compute_host_id': 'host2' } ] host_get_all_by_queries = self.patch(self.db_api, 'host_get_all_by_queries') host_get_all_by_queries.return_value = [ {'id': 'host1'}, {'id': 'host2'}, {'id': 'host3'} ] host_allocation_destroy = self.patch(self.db_api, 'host_allocation_destroy') host_allocation_create = self.patch(self.db_api, 'host_allocation_create') matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts') matching_hosts.return_value = ['host3'] host_reservation_update = self.patch(self.db_api, 'host_reservation_update') self.fake_phys_plugin.update_reservation( '706eb3bc-07ed-4383-be93-b32845ece672', values) host_reservation_get.assert_called_with( '91253650-cc34-4c4f-bbe8-c943aa7d0c9b') matching_hosts.assert_called_with( '["=", "$memory_mb", "16384"]', '', '1-1', datetime.datetime(2017, 7, 12, 20, 00), datetime.datetime(2017, 7, 12, 21, 00) ) host_allocation_destroy.assert_not_called() host_allocation_create.assert_called_with( { 'compute_host_id': 'host3', 'reservation_id': '706eb3bc-07ed-4383-be93-b32845ece672' } ) host_reservation_update.assert_called_with( '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', {'count_range': '3-3'} ) def test_update_reservation_min_increase_fail(self): values = { 'start_date': datetime.datetime(2017, 7, 12, 20, 00), 'end_date': datetime.datetime(2017, 7, 12, 21, 00), 'min': 3 } reservation_get = self.patch(self.db_api, 'reservation_get') reservation_get.return_value = { 'lease_id': '10870923-6d56-45c9-b592-f788053f5baa', 'resource_id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'status': 'pending' } lease_get = self.patch(self.db_api, 'lease_get') lease_get.return_value = { 'start_date': datetime.datetime(2017, 7, 12, 20, 00), 'end_date': datetime.datetime(2017, 7, 12, 21, 00) } host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'count_range': '2-3', 'hypervisor_properties': '["=", "$memory_mb", "16384"]', 'resource_properties': '' } host_allocation_get_all = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all.return_value = [ { 'id': 'dd305477-4df8-4547-87f6-69069ee546a6', 'compute_host_id': 'host1' }, { 'id': 'dd305477-4df8-4547-87f6-69069ee546a7', 'compute_host_id': 'host2' } ] host_get_all_by_queries = self.patch(self.db_api, 'host_get_all_by_queries') host_get_all_by_queries.return_value = [ {'id': 'host1'}, {'id': 'host2'} ] matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts') matching_hosts.return_value = [] self.assertRaises( manager_exceptions.NotEnoughHostsAvailable, self.fake_phys_plugin.update_reservation, '706eb3bc-07ed-4383-be93-b32845ece672', values) matching_hosts.assert_called_with( '["=", "$memory_mb", "16384"]', '', '1-1', datetime.datetime(2017, 7, 12, 20, 00), datetime.datetime(2017, 7, 12, 21, 00) ) def test_update_reservation_min_decrease(self): values = { 'start_date': datetime.datetime(2017, 7, 12, 20, 00), 'end_date': datetime.datetime(2017, 7, 12, 21, 00), 'min': 1 } reservation_get = self.patch(self.db_api, 'reservation_get') reservation_get.return_value = { 'lease_id': '10870923-6d56-45c9-b592-f788053f5baa', 'resource_id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'status': 'pending' } lease_get = self.patch(self.db_api, 'lease_get') lease_get.return_value = { 'start_date': datetime.datetime(2017, 7, 12, 20, 00), 'end_date': datetime.datetime(2017, 7, 12, 21, 00) } host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'count_range': '2-2', 'hypervisor_properties': '["=", "$memory_mb", "16384"]', 'resource_properties': '' } host_allocation_get_all = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all.return_value = [ { 'id': 'dd305477-4df8-4547-87f6-69069ee546a6', 'compute_host_id': 'host1' }, { 'id': 'dd305477-4df8-4547-87f6-69069ee546a7', 'compute_host_id': 'host2' } ] host_get_all_by_queries = self.patch(self.db_api, 'host_get_all_by_queries') host_get_all_by_queries.return_value = [ {'id': 'host1'}, {'id': 'host2'} ] matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts') host_allocation_destroy = self.patch(self.db_api, 'host_allocation_destroy') host_allocation_create = self.patch(self.db_api, 'host_allocation_create') host_reservation_update = self.patch(self.db_api, 'host_reservation_update') self.fake_phys_plugin.update_reservation( '706eb3bc-07ed-4383-be93-b32845ece672', values) matching_hosts.assert_not_called() host_allocation_destroy.assert_not_called() host_allocation_create.assert_not_called() host_reservation_update.assert_called_with( '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', {'count_range': '1-2'} ) def test_update_reservation_max_increase_alloc(self): values = { 'start_date': datetime.datetime(2017, 7, 12, 20, 00), 'end_date': datetime.datetime(2017, 7, 12, 21, 00), 'max': 3 } reservation_get = self.patch(self.db_api, 'reservation_get') reservation_get.return_value = { 'lease_id': '10870923-6d56-45c9-b592-f788053f5baa', 'resource_id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'status': 'pending' } lease_get = self.patch(self.db_api, 'lease_get') lease_get.return_value = { 'start_date': datetime.datetime(2017, 7, 12, 20, 00), 'end_date': datetime.datetime(2017, 7, 12, 21, 00) } host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'count_range': '1-2', 'hypervisor_properties': '["=", "$memory_mb", "16384"]', 'resource_properties': '' } host_allocation_get_all = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all.return_value = [ { 'id': 'dd305477-4df8-4547-87f6-69069ee546a6', 'compute_host_id': 'host1' }, { 'id': 'dd305477-4df8-4547-87f6-69069ee546a7', 'compute_host_id': 'host2' } ] host_get_all_by_queries = self.patch(self.db_api, 'host_get_all_by_queries') host_get_all_by_queries.return_value = [ {'id': 'host1'}, {'id': 'host2'}, {'id': 'host3'} ] host_allocation_destroy = self.patch(self.db_api, 'host_allocation_destroy') host_allocation_create = self.patch(self.db_api, 'host_allocation_create') matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts') matching_hosts.return_value = ['host3'] host_reservation_update = self.patch(self.db_api, 'host_reservation_update') self.fake_phys_plugin.update_reservation( '706eb3bc-07ed-4383-be93-b32845ece672', values) host_reservation_get.assert_called_with( '91253650-cc34-4c4f-bbe8-c943aa7d0c9b') matching_hosts.assert_called_with( '["=", "$memory_mb", "16384"]', '', '0-1', datetime.datetime(2017, 7, 12, 20, 00), datetime.datetime(2017, 7, 12, 21, 00) ) host_allocation_destroy.assert_not_called() host_allocation_create.assert_called_with( { 'compute_host_id': 'host3', 'reservation_id': '706eb3bc-07ed-4383-be93-b32845ece672' } ) host_reservation_update.assert_called_with( '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', {'count_range': '1-3'} ) def test_update_active_reservation_max_increase_alloc(self): values = { 'start_date': datetime.datetime(2017, 7, 12, 20, 00), 'end_date': datetime.datetime(2017, 7, 12, 21, 00), 'max': 3 } reservation_get = self.patch(self.db_api, 'reservation_get') reservation_get.return_value = { 'lease_id': '10870923-6d56-45c9-b592-f788053f5baa', 'resource_id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'status': 'active' } lease_get = self.patch(self.db_api, 'lease_get') lease_get.return_value = { 'start_date': datetime.datetime(2017, 7, 12, 20, 00), 'end_date': datetime.datetime(2017, 7, 12, 21, 00) } host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'count_range': '1-2', 'hypervisor_properties': '["=", "$memory_mb", "16384"]', 'resource_properties': '', 'reservation_id': u'706eb3bc-07ed-4383-be93-b32845ece672', 'aggregate_id': 1, } host_allocation_get_all = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all.return_value = [ { 'id': 'dd305477-4df8-4547-87f6-69069ee546a6', 'compute_host_id': 'host1' }, { 'id': 'dd305477-4df8-4547-87f6-69069ee546a7', 'compute_host_id': 'host2' } ] host_get_all_by_queries = self.patch(self.db_api, 'host_get_all_by_queries') host_get_all_by_queries.return_value = [ {'id': 'host1'}, {'id': 'host2'}, {'id': 'host3'} ] host_allocation_destroy = self.patch(self.db_api, 'host_allocation_destroy') host_allocation_create = self.patch(self.db_api, 'host_allocation_create') matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts') matching_hosts.return_value = ['host3'] host_get = self.patch(self.db_api, 'host_get') host_get.return_value = {'service_name': 'host3_hostname'} add_computehost = self.patch( self.nova.ReservationPool, 'add_computehost') host_reservation_update = self.patch(self.db_api, 'host_reservation_update') self.fake_phys_plugin.update_reservation( '706eb3bc-07ed-4383-be93-b32845ece672', values) host_reservation_get.assert_called_with( '91253650-cc34-4c4f-bbe8-c943aa7d0c9b') matching_hosts.assert_called_with( '["=", "$memory_mb", "16384"]', '', '0-1', datetime.datetime(2017, 7, 12, 20, 00), datetime.datetime(2017, 7, 12, 21, 00) ) host_allocation_destroy.assert_not_called() host_allocation_create.assert_called_with( { 'compute_host_id': 'host3', 'reservation_id': '706eb3bc-07ed-4383-be93-b32845ece672' } ) add_computehost.assert_called_with(1, ['host3_hostname']) host_reservation_update.assert_called_with( '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', {'count_range': '1-3'} ) def test_update_reservation_max_increase_noalloc(self): values = { 'start_date': datetime.datetime(2017, 7, 12, 20, 00), 'end_date': datetime.datetime(2017, 7, 12, 21, 00), 'max': 3 } reservation_get = self.patch(self.db_api, 'reservation_get') reservation_get.return_value = { 'lease_id': '10870923-6d56-45c9-b592-f788053f5baa', 'resource_id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'status': 'pending' } lease_get = self.patch(self.db_api, 'lease_get') lease_get.return_value = { 'start_date': datetime.datetime(2017, 7, 12, 20, 00), 'end_date': datetime.datetime(2017, 7, 12, 21, 00) } host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'count_range': '1-2', 'hypervisor_properties': '["=", "$memory_mb", "16384"]', 'resource_properties': '' } host_allocation_get_all = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all.return_value = [ { 'id': 'dd305477-4df8-4547-87f6-69069ee546a6', 'compute_host_id': 'host1' }, { 'id': 'dd305477-4df8-4547-87f6-69069ee546a7', 'compute_host_id': 'host2' } ] host_get_all_by_queries = self.patch(self.db_api, 'host_get_all_by_queries') host_get_all_by_queries.return_value = [ {'id': 'host1'}, {'id': 'host2'} ] matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts') matching_hosts.return_value = [] host_reservation_update = self.patch(self.db_api, 'host_reservation_update') self.fake_phys_plugin.update_reservation( '706eb3bc-07ed-4383-be93-b32845ece672', values) host_reservation_get.assert_called_with( '91253650-cc34-4c4f-bbe8-c943aa7d0c9b') matching_hosts.assert_called_with( '["=", "$memory_mb", "16384"]', '', '0-1', datetime.datetime(2017, 7, 12, 20, 00), datetime.datetime(2017, 7, 12, 21, 00) ) host_reservation_update.assert_called_with( '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', {'count_range': '1-3'} ) def test_update_reservation_max_decrease(self): values = { 'start_date': datetime.datetime(2017, 7, 12, 20, 00), 'end_date': datetime.datetime(2017, 7, 12, 21, 00), 'max': 1 } reservation_get = self.patch(self.db_api, 'reservation_get') reservation_get.return_value = { 'lease_id': '10870923-6d56-45c9-b592-f788053f5baa', 'resource_id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'status': 'pending' } lease_get = self.patch(self.db_api, 'lease_get') lease_get.return_value = { 'start_date': datetime.datetime(2017, 7, 12, 20, 00), 'end_date': datetime.datetime(2017, 7, 12, 21, 00) } host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'count_range': '1-2', 'hypervisor_properties': '["=", "$memory_mb", "16384"]', 'resource_properties': '' } host_allocation_get_all = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all.return_value = [ { 'id': 'dd305477-4df8-4547-87f6-69069ee546a6', 'compute_host_id': 'host1' }, { 'id': 'dd305477-4df8-4547-87f6-69069ee546a7', 'compute_host_id': 'host2' } ] host_get_all_by_queries = self.patch(self.db_api, 'host_get_all_by_queries') host_get_all_by_queries.return_value = [ {'id': 'host1'}, {'id': 'host2'} ] host_allocation_destroy = self.patch(self.db_api, 'host_allocation_destroy') host_reservation_update = self.patch(self.db_api, 'host_reservation_update') self.fake_phys_plugin.update_reservation( '706eb3bc-07ed-4383-be93-b32845ece672', values) host_reservation_get.assert_called_with( '91253650-cc34-4c4f-bbe8-c943aa7d0c9b') host_allocation_destroy.assert_called_with( 'dd305477-4df8-4547-87f6-69069ee546a6') host_reservation_update.assert_called_with( '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', {'count_range': '1-1'} ) def test_update_reservation_realloc_with_properties_change(self): values = { 'start_date': datetime.datetime(2017, 7, 12, 20, 00), 'end_date': datetime.datetime(2017, 7, 12, 21, 00), 'hypervisor_properties': '["=", "$memory_mb", "32768"]', } reservation_get = self.patch(self.db_api, 'reservation_get') reservation_get.return_value = { 'lease_id': '10870923-6d56-45c9-b592-f788053f5baa', 'resource_id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'status': 'pending' } lease_get = self.patch(self.db_api, 'lease_get') lease_get.return_value = { 'start_date': datetime.datetime(2017, 7, 12, 20, 00), 'end_date': datetime.datetime(2017, 7, 12, 21, 00) } host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'count_range': '1-1', 'hypervisor_properties': '["=", "$memory_mb", "16384"]', 'resource_properties': '' } host_allocation_get_all = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all.return_value = [ { 'id': 'dd305477-4df8-4547-87f6-69069ee546a6', 'compute_host_id': 'host1' } ] host_get_all_by_queries = self.patch(self.db_api, 'host_get_all_by_queries') host_get_all_by_queries.return_value = [{'id': 'host2'}] matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts') matching_hosts.return_value = ['host2'] host_allocation_create = self.patch(self.db_api, 'host_allocation_create') host_allocation_destroy = self.patch(self.db_api, 'host_allocation_destroy') host_reservation_update = self.patch(self.db_api, 'host_reservation_update') self.fake_phys_plugin.update_reservation( '706eb3bc-07ed-4383-be93-b32845ece672', values) host_reservation_get.assert_called_with( '91253650-cc34-4c4f-bbe8-c943aa7d0c9b') matching_hosts.assert_called_with( '["=", "$memory_mb", "32768"]', '', '1-1', datetime.datetime(2017, 7, 12, 20, 00), datetime.datetime(2017, 7, 12, 21, 00) ) host_allocation_create.assert_called_with( { 'compute_host_id': 'host2', 'reservation_id': '706eb3bc-07ed-4383-be93-b32845ece672' } ) host_allocation_destroy.assert_called_with( 'dd305477-4df8-4547-87f6-69069ee546a6' ) host_reservation_update.assert_called_with( '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', {'hypervisor_properties': '["=", "$memory_mb", "32768"]'} ) def test_update_reservation_no_requested_hosts_available(self): values = { 'start_date': datetime.datetime(2017, 7, 12, 20, 00), 'end_date': datetime.datetime(2017, 7, 12, 21, 00), 'resource_properties': '[">=", "$vcpus", "32768"]' } reservation_get = self.patch(self.db_api, 'reservation_get') reservation_get.return_value = { 'lease_id': '10870923-6d56-45c9-b592-f788053f5baa', 'resource_id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'status': 'pending' } lease_get = self.patch(self.db_api, 'lease_get') lease_get.return_value = { 'start_date': datetime.datetime(2013, 12, 19, 20, 00), 'end_date': datetime.datetime(2013, 12, 19, 21, 00) } host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'id': '91253650-cc34-4c4f-bbe8-c943aa7d0c9b', 'count_range': '1-1', 'hypervisor_properties': '["=", "$memory_mb", "16384"]', 'resource_properties': '' } host_allocation_get_all = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all.return_value = [ { 'id': 'dd305477-4df8-4547-87f6-69069ee546a6', 'compute_host_id': 'host1' } ] host_get_all_by_queries = self.patch(self.db_api, 'host_get_all_by_queries') host_get_all_by_queries.return_value = [] matching_hosts = self.patch(self.fake_phys_plugin, '_matching_hosts') matching_hosts.return_value = [] self.assertRaises( manager_exceptions.NotEnoughHostsAvailable, self.fake_phys_plugin.update_reservation, '441c1476-9f8f-4700-9f30-cd9b6fef3509', values) def test_on_start(self): host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'reservation_id': u'593e7028-c0d1-4d76-8642-2ffd890b324c', 'aggregate_id': 1, } host_allocation_get_all_by_values = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all_by_values.return_value = [ {'compute_host_id': 'host1'}, ] host_get = self.patch(self.db_api, 'host_get') host_get.return_value = {'service_name': 'host1_hostname'} add_computehost = self.patch( self.nova.ReservationPool, 'add_computehost') self.fake_phys_plugin.on_start(u'04de74e8-193a-49d2-9ab8-cba7b49e45e8') add_computehost.assert_called_with(1, ['host1_hostname']) def test_before_end_with_no_action(self): host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = {'before_end': ''} reservationpool = self.patch(self.nova, 'ReservationPool') self.fake_phys_plugin.before_end( u'04de74e8-193a-49d2-9ab8-cba7b49e45e8') reservationpool.assert_not_called() def test_before_end_with_snapshot(self): host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'aggregate_id': 1, 'before_end': 'snapshot' } get_computehosts = self.patch(self.nova.ReservationPool, 'get_computehosts') get_computehosts.return_value = ['host'] list_servers = self.patch(self.ServerManager, 'list') list_servers.return_value = ['server1', 'server2'] create_image = self.patch(self.ServerManager, 'create_image') self.fake_phys_plugin.before_end( u'04de74e8-193a-49d2-9ab8-cba7b49e45e8') create_image.assert_any_call(server='server1') create_image.assert_any_call(server='server2') def test_on_end_with_instances(self): host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'id': u'04de74e8-193a-49d2-9ab8-cba7b49e45e8', 'reservation_id': u'593e7028-c0d1-4d76-8642-2ffd890b324c', 'aggregate_id': 1 } host_reservation_update = self.patch( self.db_api, 'host_reservation_update') host_allocation_get_all_by_values = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all_by_values.return_value = [ {'id': u'bfa9aa0b-8042-43eb-a4e6-4555838bf64f', 'compute_host_id': u'cdae2a65-236f-475a-977d-f6ad82f828b7', }, ] host_allocation_destroy = self.patch( self.db_api, 'host_allocation_destroy') get_computehosts = self.patch(self.nova.ReservationPool, 'get_computehosts') get_computehosts.return_value = ['host'] list_servers = self.patch(self.ServerManager, 'list') list_servers.return_value = ['server1', 'server2'] delete_server = self.patch(self.ServerManager, 'delete') # Mock delete_server so the first call fails to find the instance. # This can happen when the user is deleting instances concurrently. delete_server.side_effect = mock.Mock( side_effect=[nova_exceptions.NotFound( 404, 'Instance server1 could not be found.'), None]) delete_pool = self.patch(self.nova.ReservationPool, 'delete') self.fake_phys_plugin.on_end(u'04de74e8-193a-49d2-9ab8-cba7b49e45e8') host_reservation_update.assert_called_with( u'04de74e8-193a-49d2-9ab8-cba7b49e45e8', {'status': 'completed'}) host_allocation_destroy.assert_called_with( u'bfa9aa0b-8042-43eb-a4e6-4555838bf64f') list_servers.assert_called_with(search_opts={'host': 'host', 'all_tenants': 1}) delete_server.assert_any_call(server='server1') delete_server.assert_any_call(server='server2') delete_pool.assert_called_with(1) def test_on_end_without_instances(self): host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = { 'id': u'04de74e8-193a-49d2-9ab8-cba7b49e45e8', 'reservation_id': u'593e7028-c0d1-4d76-8642-2ffd890b324c', 'aggregate_id': 1 } host_reservation_update = self.patch( self.db_api, 'host_reservation_update') host_allocation_get_all_by_values = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_allocation_get_all_by_values.return_value = [ {'id': u'bfa9aa0b-8042-43eb-a4e6-4555838bf64f', 'compute_host_id': u'cdae2a65-236f-475a-977d-f6ad82f828b7', }, ] host_allocation_destroy = self.patch( self.db_api, 'host_allocation_destroy') get_computehosts = self.patch(self.nova.ReservationPool, 'get_computehosts') get_computehosts.return_value = ['host'] list_servers = self.patch(self.ServerManager, 'list') list_servers.return_value = [] delete_server = self.patch(self.ServerManager, 'delete') delete_pool = self.patch(self.nova.ReservationPool, 'delete') self.fake_phys_plugin.on_end(u'04de74e8-193a-49d2-9ab8-cba7b49e45e8') host_reservation_update.assert_called_with( u'04de74e8-193a-49d2-9ab8-cba7b49e45e8', {'status': 'completed'}) host_allocation_destroy.assert_called_with( u'bfa9aa0b-8042-43eb-a4e6-4555838bf64f') delete_server.assert_not_called() delete_pool.assert_called_with(1) def test_heal_reservations_before_start_and_resources_changed(self): failed_host = {'id': '1'} dummy_reservation = { 'id': 'rsrv-1', 'resource_type': plugin.RESOURCE_TYPE, 'lease_id': 'lease-1', 'status': 'pending', 'hypervisor_properties': [], 'resource_properties': [], 'resource_id': 'resource-1', 'computehost_allocations': [{ 'id': 'alloc-1', 'compute_host_id': failed_host['id'], 'reservation_id': 'rsrv-1' }] } get_reservations = self.patch(self.db_utils, 'get_reservations_by_host_ids') get_reservations.return_value = [dummy_reservation] reallocate = self.patch(self.fake_phys_plugin, '_reallocate') reallocate.return_value = True result = self.fake_phys_plugin.heal_reservations( [failed_host], datetime.datetime(2020, 1, 1, 12, 00), datetime.datetime(2020, 1, 1, 13, 00)) reallocate.assert_called_once_with( dummy_reservation['computehost_allocations'][0]) self.assertEqual({}, result) def test_heal_reservations_before_start_and_missing_resources(self): failed_host = {'id': '1'} dummy_reservation = { 'id': 'rsrv-1', 'resource_type': plugin.RESOURCE_TYPE, 'lease_id': 'lease-1', 'status': 'pending', 'hypervisor_properties': [], 'resource_properties': [], 'resource_id': 'resource-1', 'computehost_allocations': [{ 'id': 'alloc-1', 'compute_host_id': failed_host['id'], 'reservation_id': 'rsrv-1' }] } get_reservations = self.patch(self.db_utils, 'get_reservations_by_host_ids') get_reservations.return_value = [dummy_reservation] reallocate = self.patch(self.fake_phys_plugin, '_reallocate') reallocate.return_value = False result = self.fake_phys_plugin.heal_reservations( [failed_host], datetime.datetime(2020, 1, 1, 12, 00), datetime.datetime(2020, 1, 1, 13, 00)) reallocate.assert_called_once_with( dummy_reservation['computehost_allocations'][0]) self.assertEqual( {dummy_reservation['id']: {'missing_resources': True}}, result) def test_heal_active_reservations_and_resources_changed(self): failed_host = {'id': '1'} dummy_reservation = { 'id': 'rsrv-1', 'resource_type': plugin.RESOURCE_TYPE, 'lease_id': 'lease-1', 'status': 'active', 'hypervisor_properties': [], 'resource_properties': [], 'resource_id': 'resource-1', 'computehost_allocations': [{ 'id': 'alloc-1', 'compute_host_id': failed_host['id'], 'reservation_id': 'rsrv-1' }] } get_reservations = self.patch(self.db_utils, 'get_reservations_by_host_ids') get_reservations.return_value = [dummy_reservation] reallocate = self.patch(self.fake_phys_plugin, '_reallocate') reallocate.return_value = True result = self.fake_phys_plugin.heal_reservations( [failed_host], datetime.datetime(2020, 1, 1, 12, 00), datetime.datetime(2020, 1, 1, 13, 00)) reallocate.assert_called_once_with( dummy_reservation['computehost_allocations'][0]) self.assertEqual( {dummy_reservation['id']: {'resources_changed': True}}, result) def test_heal_active_reservations_and_missing_resources(self): failed_host = {'id': '1'} dummy_reservation = { 'id': 'rsrv-1', 'resource_type': plugin.RESOURCE_TYPE, 'lease_id': 'lease-1', 'status': 'active', 'hypervisor_properties': [], 'resource_properties': [], 'resource_id': 'resource-1', 'computehost_allocations': [{ 'id': 'alloc-1', 'compute_host_id': failed_host['id'], 'reservation_id': 'rsrv-1' }] } get_reservations = self.patch(self.db_utils, 'get_reservations_by_host_ids') get_reservations.return_value = [dummy_reservation] reallocate = self.patch(self.fake_phys_plugin, '_reallocate') reallocate.return_value = False result = self.fake_phys_plugin.heal_reservations( [failed_host], datetime.datetime(2020, 1, 1, 12, 00), datetime.datetime(2020, 1, 1, 13, 00)) reallocate.assert_called_once_with( dummy_reservation['computehost_allocations'][0]) self.assertEqual( {dummy_reservation['id']: {'missing_resources': True}}, result) def test_reallocate_before_start(self): failed_host = {'id': '1'} new_host = {'id': '2'} dummy_allocation = { 'id': 'alloc-1', 'compute_host_id': failed_host['id'], 'reservation_id': 'rsrv-1' } dummy_reservation = { 'id': 'rsrv-1', 'resource_type': plugin.RESOURCE_TYPE, 'lease_id': 'lease-1', 'status': 'pending', 'hypervisor_properties': [], 'resource_properties': [], 'resource_id': 'resource-1' } dummy_host_reservation = { 'aggregate_id': 1 } dummy_lease = { 'name': 'lease-name', 'start_date': datetime.datetime(2020, 1, 1, 12, 00), 'end_date': datetime.datetime(2020, 1, 2, 12, 00), 'trust_id': 'trust-1' } reservation_get = self.patch(self.db_api, 'reservation_get') reservation_get.return_value = dummy_reservation host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = dummy_host_reservation lease_get = self.patch(self.db_api, 'lease_get') lease_get.return_value = dummy_lease matching_hosts = self.patch(host_plugin.PhysicalHostPlugin, '_matching_hosts') matching_hosts.return_value = [new_host['id']] alloc_update = self.patch(self.db_api, 'host_allocation_update') with mock.patch.object(datetime, 'datetime', mock.Mock(wraps=datetime.datetime)) as patched: patched.utcnow.return_value = datetime.datetime( 2020, 1, 1, 11, 00) result = self.fake_phys_plugin._reallocate(dummy_allocation) matching_hosts.assert_called_once_with( dummy_reservation['hypervisor_properties'], dummy_reservation['resource_properties'], '1-1', dummy_lease['start_date'], dummy_lease['end_date']) alloc_update.assert_called_once_with( dummy_allocation['id'], {'compute_host_id': new_host['id']}) self.assertEqual(True, result) def test_reallocate_active(self): failed_host = {'id': '1', 'service_name': 'compute-1'} new_host = {'id': '2', 'service_name': 'compute-2'} dummy_allocation = { 'id': 'alloc-1', 'compute_host_id': failed_host['id'], 'reservation_id': 'rsrv-1' } dummy_reservation = { 'id': 'rsrv-1', 'resource_type': plugin.RESOURCE_TYPE, 'lease_id': 'lease-1', 'status': 'active', 'hypervisor_properties': [], 'resource_properties': [], 'resource_id': 'resource-1' } dummy_host_reservation = { 'aggregate_id': 1 } dummy_lease = { 'name': 'lease-name', 'start_date': datetime.datetime(2020, 1, 1, 12, 00), 'end_date': datetime.datetime(2020, 1, 2, 12, 00), 'trust_id': 'trust-1' } reservation_get = self.patch(self.db_api, 'reservation_get') reservation_get.return_value = dummy_reservation lease_get = self.patch(self.db_api, 'lease_get') lease_get.return_value = dummy_lease host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = dummy_host_reservation host_get = self.patch(self.db_api, 'host_get') host_get.side_effect = [failed_host, new_host] matching_hosts = self.patch(host_plugin.PhysicalHostPlugin, '_matching_hosts') matching_hosts.return_value = [new_host['id']] alloc_update = self.patch(self.db_api, 'host_allocation_update') with mock.patch.object(datetime, 'datetime', mock.Mock(wraps=datetime.datetime)) as patched: patched.utcnow.return_value = datetime.datetime( 2020, 1, 1, 13, 00) result = self.fake_phys_plugin._reallocate(dummy_allocation) self.remove_compute_host.assert_called_once_with( dummy_host_reservation['aggregate_id'], failed_host['service_name']) matching_hosts.assert_called_once_with( dummy_reservation['hypervisor_properties'], dummy_reservation['resource_properties'], '1-1', datetime.datetime(2020, 1, 1, 13, 00), dummy_lease['end_date']) alloc_update.assert_called_once_with( dummy_allocation['id'], {'compute_host_id': new_host['id']}) self.add_compute_host( dummy_host_reservation['aggregate_id'], new_host['service_name']) self.assertEqual(True, result) def test_reallocate_missing_resources(self): failed_host = {'id': '1'} dummy_allocation = { 'id': 'alloc-1', 'compute_host_id': failed_host['id'], 'reservation_id': 'rsrv-1' } dummy_reservation = { 'id': 'rsrv-1', 'resource_type': plugin.RESOURCE_TYPE, 'lease_id': 'lease-1', 'status': 'pending', 'hypervisor_properties': [], 'resource_properties': [], 'resource_id': 'resource-1' } dummy_host_reservation = { 'aggregate_id': 1 } dummy_lease = { 'name': 'lease-name', 'start_date': datetime.datetime(2020, 1, 1, 12, 00), 'end_date': datetime.datetime(2020, 1, 2, 12, 00), 'trust_id': 'trust-1' } reservation_get = self.patch(self.db_api, 'reservation_get') reservation_get.return_value = dummy_reservation host_reservation_get = self.patch(self.db_api, 'host_reservation_get') host_reservation_get.return_value = dummy_host_reservation lease_get = self.patch(self.db_api, 'lease_get') lease_get.return_value = dummy_lease matching_hosts = self.patch(host_plugin.PhysicalHostPlugin, '_matching_hosts') matching_hosts.return_value = [] alloc_destroy = self.patch(self.db_api, 'host_allocation_destroy') with mock.patch.object(datetime, 'datetime', mock.Mock(wraps=datetime.datetime)) as patched: patched.utcnow.return_value = datetime.datetime( 2020, 1, 1, 11, 00) result = self.fake_phys_plugin._reallocate(dummy_allocation) matching_hosts.assert_called_once_with( dummy_reservation['hypervisor_properties'], dummy_reservation['resource_properties'], '1-1', dummy_lease['start_date'], dummy_lease['end_date']) alloc_destroy.assert_called_once_with(dummy_allocation['id']) self.assertEqual(False, result) def test_matching_hosts_not_allocated_hosts(self): def host_allocation_get_all_by_values(**kwargs): if kwargs['compute_host_id'] == 'host1': return True host_get = self.patch( self.db_api, 'reservable_host_get_all_by_queries') host_get.return_value = [ {'id': 'host1'}, {'id': 'host2'}, {'id': 'host3'}, ] host_get = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_get.side_effect = host_allocation_get_all_by_values host_get = self.patch( self.db_utils, 'get_free_periods') host_get.return_value = [ (datetime.datetime(2013, 12, 19, 20, 00), datetime.datetime(2013, 12, 19, 21, 00)), ] result = self.fake_phys_plugin._matching_hosts( '[]', '[]', '1-3', datetime.datetime(2013, 12, 19, 20, 00), datetime.datetime(2013, 12, 19, 21, 00)) self.assertEqual(['host2', 'host3'], result) def test_matching_hosts_allocated_hosts(self): def host_allocation_get_all_by_values(**kwargs): if kwargs['compute_host_id'] == 'host1': return True host_get = self.patch( self.db_api, 'reservable_host_get_all_by_queries') host_get.return_value = [ {'id': 'host1'}, {'id': 'host2'}, {'id': 'host3'}, ] host_get = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_get.side_effect = host_allocation_get_all_by_values host_get = self.patch( self.db_utils, 'get_free_periods') host_get.return_value = [ (datetime.datetime(2013, 12, 19, 20, 00), datetime.datetime(2013, 12, 19, 21, 00)), ] result = self.fake_phys_plugin._matching_hosts( '[]', '[]', '3-3', datetime.datetime(2013, 12, 19, 20, 00), datetime.datetime(2013, 12, 19, 21, 00)) self.assertEqual(['host1', 'host2', 'host3'], result) def test_matching_hosts_allocated_hosts_with_cleaning_time(self): def host_allocation_get_all_by_values(**kwargs): if kwargs['compute_host_id'] == 'host1': return True self.cfg.CONF.set_override('cleaning_time', '5') host_get = self.patch( self.db_api, 'reservable_host_get_all_by_queries') host_get.return_value = [ {'id': 'host1'}, {'id': 'host2'}, {'id': 'host3'}, ] host_get = self.patch( self.db_api, 'host_allocation_get_all_by_values') host_get.side_effect = host_allocation_get_all_by_values host_get = self.patch( self.db_utils, 'get_free_periods') host_get.return_value = [ (datetime.datetime(2013, 12, 19, 20, 00) - datetime.timedelta(minutes=5), datetime.datetime(2013, 12, 19, 21, 00) + datetime.timedelta(minutes=5)) ] result = self.fake_phys_plugin._matching_hosts( '[]', '[]', '3-3', datetime.datetime(2013, 12, 19, 20, 00), datetime.datetime(2013, 12, 19, 21, 00)) self.assertEqual(['host1', 'host2', 'host3'], result) def test_matching_hosts_not_matching(self): host_get = self.patch( self.db_api, 'reservable_host_get_all_by_queries') host_get.return_value = [] result = self.fake_phys_plugin._matching_hosts( '["=", "$memory_mb", "2048"]', '[]', '1-1', datetime.datetime(2013, 12, 19, 20, 00), datetime.datetime(2013, 12, 19, 21, 00)) self.assertEqual([], result) def test_check_params_with_valid_before_end(self): values = { 'min': 1, 'max': 2, 'resource_properties': '', 'hypervisor_properties': '', 'before_end': 'snapshot' } self.fake_phys_plugin._check_params(values) self.assertEqual(values['before_end'], 'snapshot') def test_check_params_with_invalid_before_end(self): values = { 'min': 1, 'max': 2, 'resource_properties': '', 'hypervisor_properties': '', 'before_end': 'invalid' } self.assertRaises(manager_exceptions.MalformedParameter, self.fake_phys_plugin._check_params, values) def test_check_params_without_before_end(self): self.cfg.CONF.set_override('before_end', '', group='physical:host') values = { 'min': 1, 'max': 2, 'resource_properties': '', 'hypervisor_properties': '' } self.fake_phys_plugin._check_params(values) self.assertEqual(values['before_end'], 'default') class PhysicalHostMonitorPluginTestCase(tests.TestCase): def setUp(self): super(PhysicalHostMonitorPluginTestCase, self).setUp() self.patch(nova_client, 'Client') self.host_monitor_plugin = host_plugin.PhysicalHostMonitorPlugin() def test_configuration(self): # reset the singleton at first host_plugin.PhysicalHostMonitorPlugin._instance = None self.cfg = self.useFixture(conf_fixture.Config(CONF)) self.cfg.config(os_admin_username='fake-user') self.cfg.config(os_admin_password='fake-passwd') self.cfg.config(os_admin_user_domain_name='fake-user-domain') self.cfg.config(os_admin_project_name='fake-pj-name') self.cfg.config(os_admin_project_domain_name='fake-pj-domain') self.host_monitor_plugin = host_plugin.PhysicalHostMonitorPlugin() self.assertEqual('fake-user', self.host_monitor_plugin.username) self.assertEqual("fake-passwd", self.host_monitor_plugin.password) self.assertEqual("fake-user-domain", self.host_monitor_plugin.user_domain_name) self.assertEqual("fake-pj-name", self.host_monitor_plugin.project_name) self.assertEqual("fake-pj-domain", self.host_monitor_plugin.project_domain_name) def test_notification_callback_disabled_true(self): failed_host = {'hypervisor_hostname': 'hypvsr1'} event_type = 'service.update' payload = { 'nova_object.namespace': 'nova', 'nova_object.name': 'ServiceStatusPayload', 'nova_object.version': '1.1', 'nova_object.data': { 'host': failed_host['hypervisor_hostname'], 'disabled': True, 'last_seen_up': '2012-10-29T13:42:05Z', 'binary': 'nova-compute', 'topic': 'compute', 'disabled_reason': None, 'report_count': 1, 'forced_down': False, 'version': 22, 'availability_zone': None, 'uuid': 'fa69c544-906b-4a6a-a9c6-c1f7a8078c73' } } host_get_all = self.patch(db_api, 'reservable_host_get_all_by_queries') host_get_all.return_value = [failed_host] handle_failures = self.patch(self.host_monitor_plugin, '_handle_failures') handle_failures.return_value = {'rsrv-1': {'missing_resources': True}} result = self.host_monitor_plugin.notification_callback(event_type, payload) host_get_all.assert_called_once_with( ['hypervisor_hostname == ' + payload['nova_object.data']['host']]) self.assertEqual({'rsrv-1': {'missing_resources': True}}, result) def test_notification_callback_no_failure(self): event_type = 'service.update' payload = { 'nova_object.namespace': 'nova', 'nova_object.name': 'ServiceStatusPayload', 'nova_object.version': '1.1', 'nova_object.data': { 'host': 'compute-1', 'disabled': False, 'last_seen_up': '2012-10-29T13:42:05Z', 'binary': 'nova-compute', 'topic': 'compute', 'disabled_reason': None, 'report_count': 1, 'forced_down': False, 'version': 22, 'availability_zone': None, 'uuid': 'fa69c544-906b-4a6a-a9c6-c1f7a8078c73' } } host_get_all = self.patch(db_api, 'host_get_all_by_queries') host_get_all.return_value = [] handle_failures = self.patch(self.host_monitor_plugin, '_handle_failures') result = self.host_monitor_plugin.notification_callback(event_type, payload) host_get_all.assert_called_once_with( ['reservable == 0', 'hypervisor_hostname == ' + payload['nova_object.data']['host']]) handle_failures.assert_not_called() self.assertEqual({}, result) def test_notification_callback_recover(self): recovered_host = {'hypervisor_hostname': 'hypvsr1', 'id': 1} event_type = 'service.update' payload = { 'nova_object.namespace': 'nova', 'nova_object.name': 'ServiceStatusPayload', 'nova_object.version': '1.1', 'nova_object.data': { 'host': 'compute-1', 'disabled': False, 'last_seen_up': '2012-10-29T13:42:05Z', 'binary': 'nova-compute', 'topic': 'compute', 'disabled_reason': None, 'report_count': 1, 'forced_down': False, 'version': 22, 'availability_zone': None, 'uuid': 'fa69c544-906b-4a6a-a9c6-c1f7a8078c73' } } host_get_all = self.patch(db_api, 'host_get_all_by_queries') host_get_all.return_value = [recovered_host] handle_failures = self.patch(self.host_monitor_plugin, '_handle_failures') host_update = self.patch(db_api, 'host_update') result = self.host_monitor_plugin.notification_callback(event_type, payload) host_get_all.assert_called_once_with( ['reservable == 0', 'hypervisor_hostname == ' + payload['nova_object.data']['host']]) host_update.assert_called_once_with(recovered_host['id'], {'reservable': True}) handle_failures.assert_not_called() self.assertEqual({}, result) def test_poll_resource_failures_state_down(self): hosts = [ {'id': '1', 'hypervisor_hostname': 'hypvsr1', 'reservable': True}, {'id': '2', 'hypervisor_hostname': 'hypvsr2', 'reservable': True}, ] host_get_all = self.patch(db_api, 'host_get_all_by_filters') host_get_all.return_value = hosts hypervisors_list = self.patch( self.host_monitor_plugin.nova.hypervisors, 'list') hypervisors_list.return_value = [ mock.MagicMock(id=1, state='down', status='enabled'), mock.MagicMock(id=2, state='down', status='enabled')] result = self.host_monitor_plugin._poll_resource_failures() self.assertEqual((hosts, []), result) def test_poll_resource_failures_status_disabled(self): hosts = [ {'id': '1', 'hypervisor_hostname': 'hypvsr1', 'reservable': True}, {'id': '2', 'hypervisor_hostname': 'hypvsr2', 'reservable': True}, ] host_get_all = self.patch(db_api, 'host_get_all_by_filters') host_get_all.return_value = hosts hypervisors_list = self.patch( self.host_monitor_plugin.nova.hypervisors, 'list') hypervisors_list.return_value = [ mock.MagicMock(id=1, state='up', status='disabled'), mock.MagicMock(id=2, state='up', status='disabled')] result = self.host_monitor_plugin._poll_resource_failures() self.assertEqual((hosts, []), result) def test_poll_resource_failures_nothing(self): hosts = [ {'id': '1', 'hypervisor_hostname': 'hypvsr1', 'reservable': True}, {'id': '2', 'hypervisor_hostname': 'hypvsr2', 'reservable': True}, ] host_get_all = self.patch(db_api, 'host_get_all_by_filters') host_get_all.return_value = hosts hypervisors_list = self.patch( self.host_monitor_plugin.nova.hypervisors, 'list') hypervisors_list.return_value = [ mock.MagicMock(id=1, state='up', status='enabled'), mock.MagicMock(id=2, state='up', status='enabled')] result = self.host_monitor_plugin._poll_resource_failures() self.assertEqual(([], []), result) def test_poll_resource_failures_recover(self): hosts = [ {'id': '1', 'hypervisor_hostname': 'hypvsr1', 'reservable': False}, {'id': '2', 'hypervisor_hostname': 'hypvsr2', 'reservable': False}, ] host_get_all = self.patch(db_api, 'host_get_all_by_filters') host_get_all.return_value = hosts hypervisors_list = self.patch( self.host_monitor_plugin.nova.hypervisors, 'list') hypervisors_list.return_value = [ mock.MagicMock(id=1, state='up', status='enabled'), mock.MagicMock(id=2, state='up', status='enabled')] result = self.host_monitor_plugin._poll_resource_failures() self.assertEqual(([], hosts), result) def test_handle_failures(self): failed_hosts = [ {'id': '1', 'hypervisor_hostname': 'hypvsr1'} ] host_update = self.patch(db_api, 'host_update') heal = self.patch(self.host_monitor_plugin, 'heal') self.host_monitor_plugin._handle_failures(failed_hosts) host_update.assert_called_once_with(failed_hosts[0]['id'], {'reservable': False}) heal.assert_called_once() def test_heal(self): failed_hosts = [ {'id': '1', 'hypervisor_hostname': 'hypvsr1'} ] reservation_flags = { 'rsrv-1': {'missing_resources': True} } hosts_get = self.patch(db_api, 'unreservable_host_get_all_by_queries') hosts_get.return_value = failed_hosts get_healing_interval = self.patch(self.host_monitor_plugin, 'get_healing_interval') get_healing_interval.return_value = 60 healing_handler = mock.Mock() healing_handler.return_value = reservation_flags self.host_monitor_plugin.healing_handlers = [healing_handler] start_date = datetime.datetime(2020, 1, 1, 12, 00) with mock.patch.object(datetime, 'datetime', mock.Mock(wraps=datetime.datetime)) as patched: patched.utcnow.return_value = start_date result = self.host_monitor_plugin.heal() healing_handler.assert_called_once_with( failed_hosts, start_date, start_date + datetime.timedelta(minutes=60) ) self.assertEqual(reservation_flags, result)
stackforge/blazar
blazar/tests/plugins/oshosts/test_physical_host_plugin.py
Python
apache-2.0
103,122
<?php /** * BoxBilling * * @copyright BoxBilling, Inc (http://www.boxbilling.com) * @license Apache-2.0 * * Copyright BoxBilling, Inc * This source file is subject to the Apache-2.0 License that is bundled * with this source code in the file LICENSE */ namespace Box\Mod\Filemanager; class Service implements \Box\InjectionAwareInterface { protected $di = null; public function setDi($di) { $this->di = $di; } public function getDi() { return $this->di; } public function saveFile($path, $content = PHP_EOL) { $path = $this->_getPath($path); $bytes = $this->di['tools']->file_put_contents($content, $path); return ($bytes > 0); } public function create($path, $type) { $path = $this->_getPath($path); $res = false; switch ($type) { case 'dir': if (!$this->di['tools']->fileExists($path)) { $res = $this->di['tools']->mkdir($path, 0755); } else { throw new \Box_Exception('Directory already exists'); } break; case 'file': $res = $this->saveFile($path, ' '); break; default: throw new \Box_Exception('Unknown item type'); break; } return $res; } public function move($from, $to) { $from = $this->_getPath($from); $to = $this->_getPath($to) . DIRECTORY_SEPARATOR . basename($from); return $this->di['tools']->rename($from, $to); } public function getFiles($dir = DIRECTORY_SEPARATOR) { $dir = ($dir == DIRECTORY_SEPARATOR) ? DIRECTORY_SEPARATOR : (string)$dir; $dir = trim($dir, DIRECTORY_SEPARATOR); $dir = $this->_getPath($dir); $getdir = realpath($dir); if (empty($getdir)) { return array( 'filecount' => 0, 'files' => null, ); } $sd = @scandir($getdir); $sd = array_diff($sd, array('.', '..', '.svn', '.git')); $files = $dirs = array(); foreach ($sd as $file) { $path = $getdir . '/' . $file; if (is_file($path)) { $files[] = array('filename' => $file, 'type' => 'file', 'path' => $path, 'size' => filesize($path)); } else { $dirs[] = array('filename' => $file, 'type' => 'dir', 'path' => $path, 'size' => filesize($path)); } } $files = array_merge($dirs, $files); $out = array('files' => $files); $out['filecount'] = count($sd); return $out; } private function _getPath($path) { $_path = BB_PATH_ROOT . DIRECTORY_SEPARATOR; $path = str_replace($_path, '', $path); $path = trim($path, DIRECTORY_SEPARATOR); $path = str_replace('//', DIRECTORY_SEPARATOR, $_path . $path); return $path; } }
4Giedrius/boxbilling
src/bb-modules/Filemanager/Service.php
PHP
apache-2.0
3,051
__source__ = 'https://leetcode.com/problems/equal-tree-partition/discuss/' # Time: O() # Space: O() # # Description: Leetcode # 663. Equal Tree Partition # # Given a binary tree with n nodes, # your task is to check if it's possible to partition the tree to two trees # which have the equal sum of values after removing exactly one edge on the original tree. # # Example 1: # Input: # 5 # / \ # 10 10 # / \ # 2 3 # # Output: True # Explanation: # 5 # / # 10 # # Sum: 15 # # 10 # / \ # 2 3 # # Sum: 15 # Example 2: # Input: # 1 # / \ # 2 10 # / \ # 2 20 # # Output: False # Explanation: You can't split the tree into two trees with equal sum after removing exactly one edge on the tree. # Note: # The range of tree node value is in the range of [-100000, 100000]. # 1 <= n <= 10000 # # Companies # Amazon # Related Topics # Tree # import unittest class Solution(object): pass # your function here class TestMethods(unittest.TestCase): def test_Local(self): self.assertEqual(1, 1) if __name__ == '__main__': unittest.main() Java = ''' # Thought: https://leetcode.com/problems/equal-tree-partition/solution/ /** * Definition for a binary tree node. * public class TreeNode { * int val; * TreeNode left; * TreeNode right; * TreeNode(int x) { val = x; } * } */ # 7ms 98.08% class class Solution { int judge=0; public boolean checkEqualTree(TreeNode root) { if(root==null) return false; int s=sum(root); if(s%2!=0) return false; check(root.left,s/2); check(root.right,s/2); if(judge==1) return true; return false; } private int sum(TreeNode root){ if(root==null) return 0; return root.val+sum(root.left)+sum(root.right); } private int check(TreeNode root,int half){ if(root==null) return 0; int s=root.val+check(root.left,half)+check(root.right,half); if(s==half) judge=1; return s; } } # hash map # 14ms 31.49% class Solution { public boolean checkEqualTree(TreeNode root) { Map<Integer, Integer> map = new HashMap<Integer, Integer>(); int sum = getsum(root, map); if(sum == 0)return map.getOrDefault(sum, 0) > 1; return sum%2 == 0 && map.containsKey(sum/2); } public int getsum(TreeNode root, Map<Integer, Integer> map ){ if(root == null)return 0; int cur = root.val + getsum(root.left, map) + getsum(root.right, map); map.put(cur, map.getOrDefault(cur,0) + 1); return cur; } } '''
JulyKikuAkita/PythonPrac
cs15211/EqualTreePartition.py
Python
apache-2.0
2,676
/* * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package leap.orm.domain; import leap.lang.*; import leap.lang.annotation.Nullable; import leap.lang.enums.Bool; import leap.lang.expression.Expression; import leap.lang.jdbc.JdbcType; import leap.orm.generator.IdGenerator; import java.util.regex.Pattern; public class Domain implements Sourced,Named { private final Object source; private final String name; private final String defaultColumnName; private final JdbcType type; private final Integer length; private final Integer precision; private final Integer scale; private final Boolean nullable; private final String defaultValue; private final Boolean insert; private final Boolean update; private final Expression insertValue; private final Expression updateValue; private final Boolean filterable; private final Boolean sortable; private final Boolean filter; private final Expression filterValue; private final Expression filterIfValue; private final Float sortOrder; private final boolean autoMapping; private final IdGenerator idGenerator; public Domain(Object source, String name, String defaultColumnName, JdbcType type, Integer length, Integer precision, Integer scale, Boolean nullable, String defaultValue, Boolean insert, Expression insertValue, Boolean update, Expression updateValue, Boolean filterable, Boolean sortable, Boolean filter, Expression filterValue,Expression filterIfValue, Float sortOrder, boolean autoMapping, IdGenerator idGenerator) { Args.notEmpty(name,"name"); this.source = source; this.name = name; this.defaultColumnName = defaultColumnName; this.type = type; this.length = length; this.precision = precision; this.scale = scale; this.nullable = nullable; this.defaultValue = defaultValue; this.insert = insert; this.insertValue = insertValue; this.update = update; this.updateValue = updateValue; this.filterable = filterable; this.sortable = sortable; this.filter = filter; this.filterValue = filterValue; this.filterIfValue=filterIfValue; this.sortOrder = sortOrder; this.autoMapping = autoMapping; this.idGenerator = idGenerator; } @Override public Object getSource() { return source; } public String getName() { return name; } public String getDefaultColumnName() { return defaultColumnName; } public JdbcType getType() { return type; } public Integer getLength() { return length; } public Integer getPrecision() { return precision; } public Integer getScale() { return scale; } public Boolean getNullable() { return nullable; } public String getDefaultValue() { return defaultValue; } public Expression getInsertValue() { return insertValue; } public Boolean getInsert() { return insert; } public Boolean getUpdate() { return update; } public Expression getUpdateValue() { return updateValue; } public Boolean getFilterable() { return filterable; } public Boolean getSortable() { return sortable; } public Boolean getFilter() { return filter; } public Expression getFilterValue() { return filterValue; } public Float getSortOrder() { return sortOrder; } public boolean isAutoMapping() { return autoMapping; } public IdGenerator getIdGenerator() { return idGenerator; } @Override public String toString() { return "Domain : " + name; } public Expression getFilterIfValue() { return filterIfValue; } }
leapframework/framework
data/orm/src/main/java/leap/orm/domain/Domain.java
Java
apache-2.0
4,481
package fw.db.connection.impl; import fw.common.util.CE; import fw.common.util.LE; import fw.db.connection.DBException; import fw.db.connection.RecordHandler; import java.sql.ResultSet; import java.sql.ResultSetMetaData; /** * 通用的EntityBean查询映射处理类<br/> * NOTE: 给定的Entity必须符合STREET规定的命名规则 * @author dzb * * @param <T> */ class RecordHandlerImpl<T> implements RecordHandler<T> { private final CE<T> ce; //private String[] propNames = nul public RecordHandlerImpl(Class<T> classOfT) { ce = CE.of(classOfT); //propNames =LE.getPropertyNames(classOfT); } public T mapping(ResultSet rs, int row) throws DBException { try { ResultSetMetaData rsm = rs.getMetaData(); String[] columnNames = new String[rsm.getColumnCount()]; for (int i=0; i<rsm.getColumnCount(); i++){ columnNames[i] = rsm.getColumnName(i+1).toLowerCase(); } T bean = ce.create(); for (String cn : columnNames) { Object v = rs.getObject(cn); String fn = DBUtils.convColumnNameToPropertyName(cn); // TODO think about the comment block check logic // Field field = ce.getField(fn); // v = LE.coerce(v, field.getType()); LE.setPropertyValue(bean, fn, v); } return bean; } catch (Throwable e) { throw new DBException(e); } } }
dzb/fw-kit
src/main/java/fw/db/connection/impl/RecordHandlerImpl.java
Java
apache-2.0
1,457
/* * java API for the SatoChip Bitcoin Hardware Wallet * (c) 2015 by Toporin - 16DMCk4WUaHofchAhpMaQS4UPm4urcy2dN * Sources available on https://github.com/Toporin * * Copyright 2015 by Toporin (https://github.com/Toporin) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.satochip.satochipclient; import javax.smartcardio.CommandAPDU; import javax.smartcardio.ResponseAPDU; /** * A CardConnectorException is thrown when a APDU command returns a response different from 0x9000 */ public class CardConnectorException extends Exception{ /** Block or transaction hash */ private CommandAPDU c; private ResponseAPDU r; /** * Creates a new exception with a detail message * * @param msg Detail message */ public CardConnectorException(String msg, CommandAPDU c, ResponseAPDU r) { super(msg); //only for debug purpose as it may contains sensitive data! //this.c= c; //this.r= r; // safer to remove sensitive information this.c= new CommandAPDU(c.getCLA(), c.getINS(), c.getP1(), c.getP2(), null); byte[] sw12=new byte[2]; sw12[0]=(byte)r.getSW1(); sw12[1]=(byte)r.getSW2(); this.r= new ResponseAPDU(sw12); } CardConnectorException(String unable_to_recover_public_key_from_signatu) { throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } /** * Returns the SW12 code associated with this exception * * @return SW12 */ public short getSW12() { return (short)r.getSW(); } /** * Returns the SW12 code associated with this exception * * @return SW12 */ public short getIns() { return (short)c.getINS(); } public ResponseAPDU getResponse(){ return r; } public CommandAPDU getCommand(){ return c; } }
Toporin/SatoChipClient
src/main/java/org/satochip/satochipclient/CardConnectorException.java
Java
apache-2.0
2,634
package com.ybook.app.pinnedheaderlistview; import android.util.SparseArray; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import com.ybook.app.pinnedheaderlistview.PinnedHeaderListView.PinnedSectionedHeaderAdapter; public abstract class SectionedBaseAdapter extends BaseAdapter implements PinnedSectionedHeaderAdapter { private static int HEADER_VIEW_TYPE = 0; private static int ITEM_VIEW_TYPE = 0; /** * Holds the calculated values of @{link getPositionInSectionForPosition} */ private SparseArray<Integer> mSectionPositionCache; /** * Holds the calculated values of @{link getSectionForPosition} */ private SparseArray<Integer> mSectionCache; /** * Holds the calculated values of @{link getCountForSection} */ private SparseArray<Integer> mSectionCountCache; /** * Caches the item count */ private int mCount; /** * Caches the section count */ private int mSectionCount; public SectionedBaseAdapter() { super(); mSectionCache = new SparseArray<Integer>(); mSectionPositionCache = new SparseArray<Integer>(); mSectionCountCache = new SparseArray<Integer>(); mCount = -1; mSectionCount = -1; } @Override public void notifyDataSetChanged() { mSectionCache.clear(); mSectionPositionCache.clear(); mSectionCountCache.clear(); mCount = -1; mSectionCount = -1; super.notifyDataSetChanged(); } @Override public void notifyDataSetInvalidated() { mSectionCache.clear(); mSectionPositionCache.clear(); mSectionCountCache.clear(); mCount = -1; mSectionCount = -1; super.notifyDataSetInvalidated(); } @Override public final int getCount() { if (mCount >= 0) { return mCount; } int count = 0; for (int i = 0; i < internalGetSectionCount(); i++) { count += internalGetCountForSection(i); count++; // for the header view } mCount = count; return count; } @Override public final Object getItem(int position) { return getItem(getSectionForPosition(position), getPositionInSectionForPosition(position)); } @Override public final long getItemId(int position) { return getItemId(getSectionForPosition(position), getPositionInSectionForPosition(position)); } @Override public final View getView(int position, View convertView, ViewGroup parent) { if (isSectionHeader(position)) { return getSectionHeaderView(getSectionForPosition(position), convertView, parent); } return getItemView(getSectionForPosition(position), getPositionInSectionForPosition(position), convertView, parent); } @Override public final int getItemViewType(int position) { if (isSectionHeader(position)) { return getItemViewTypeCount() + getSectionHeaderViewType(getSectionForPosition(position)); } return getItemViewType(getSectionForPosition(position), getPositionInSectionForPosition(position)); } @Override public final int getViewTypeCount() { return getItemViewTypeCount() + getSectionHeaderViewTypeCount(); } public final int getSectionForPosition(int position) { // first try to retrieve values from cache Integer cachedSection = mSectionCache.get(position); if (cachedSection != null) { return cachedSection; } int sectionStart = 0; for (int i = 0; i < internalGetSectionCount(); i++) { int sectionCount = internalGetCountForSection(i); int sectionEnd = sectionStart + sectionCount + 1; if (position >= sectionStart && position < sectionEnd) { mSectionCache.put(position, i); return i; } sectionStart = sectionEnd; } return 0; } public int getPositionInSectionForPosition(int position) { // first try to retrieve values from cache Integer cachedPosition = mSectionPositionCache.get(position); if (cachedPosition != null) { return cachedPosition; } int sectionStart = 0; for (int i = 0; i < internalGetSectionCount(); i++) { int sectionCount = internalGetCountForSection(i); int sectionEnd = sectionStart + sectionCount + 1; if (position >= sectionStart && position < sectionEnd) { int positionInSection = position - sectionStart - 1; mSectionPositionCache.put(position, positionInSection); return positionInSection; } sectionStart = sectionEnd; } return 0; } public final boolean isSectionHeader(int position) { int sectionStart = 0; for (int i = 0; i < internalGetSectionCount(); i++) { if (position == sectionStart) { return true; } else if (position < sectionStart) { return false; } sectionStart += internalGetCountForSection(i) + 1; } return false; } public int getItemViewType(int section, int position) { return ITEM_VIEW_TYPE; } public int getItemViewTypeCount() { return 1; } public int getSectionHeaderViewType(int section) { return HEADER_VIEW_TYPE; } public int getSectionHeaderViewTypeCount() { return 1; } public abstract Object getItem(int section, int position); public abstract long getItemId(int section, int position); public abstract int getSectionCount(); public abstract int getCountForSection(int section); public abstract View getItemView(int section, int position, View convertView, ViewGroup parent); public abstract View getSectionHeaderView(int section, View convertView, ViewGroup parent); private int internalGetCountForSection(int section) { Integer cachedSectionCount = mSectionCountCache.get(section); if (cachedSectionCount != null) { return cachedSectionCount; } int sectionCount = getCountForSection(section); mSectionCountCache.put(section, sectionCount); return sectionCount; } private int internalGetSectionCount() { if (mSectionCount >= 0) { return mSectionCount; } mSectionCount = getSectionCount(); return mSectionCount; } }
a642500/Ybook
app/src/main/java/com/ybook/app/pinnedheaderlistview/SectionedBaseAdapter.java
Java
apache-2.0
6,634
/* * Copyright 2013 Maksim Kisilyov * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.luxoft.p4ctm; import java.io.*; import java.util.Properties; public class CTMBuilderOptions { private String server; private String user; private String password; private String fileSpec; private String requirementIdRegex; private String typeOfChangeRegex; private String acceptedTypesOfChangeRegex; private String clDescriptionsOverridePath; private String requirementsInputFile; private String requirementsInputRegex; private String requirementsFilterFile; private String matrixOutputFile; public CTMBuilderOptions() { this.server = "perforce:1666"; this.user = "anonymous"; this.password = ""; this.fileSpec = "//depot/..."; this.requirementIdRegex = "(?m)^\\[[xX]\\] TTS/Ref : (.*?)\\s*?$"; this.typeOfChangeRegex = "(?m)^\\[[xX]\\] (?:BF|FE|OP|QI|IN|OT) = (.*?)\\s*?$"; this.acceptedTypesOfChangeRegex = "BugFix|Feature"; this.clDescriptionsOverridePath = null; this.requirementsInputFile = null; this.requirementsInputRegex = "(?ms)CTRS ID: (?<rid>.*?)$.*?Object Heading: (?<name>.*?)$.*?Object Text: (?<desc>.*?)$"; this.requirementsFilterFile = null; this.matrixOutputFile = "TM.xls"; } public CTMBuilderOptions(String server, String user, String password, String fileSpec, String requirementIdSearchPattern, String typeOfChangeRegex, String acceptedTypesOfChangeRegex, String CLDescriptionsInputPath, String requirementsInputFile, String requirementsInputRegex, String requirementsFilterFile, String matrixOutputFile) { this.server = server; this.user = user; this.password = password; this.fileSpec = fileSpec; this.requirementIdRegex = requirementIdSearchPattern; this.typeOfChangeRegex = typeOfChangeRegex; this.acceptedTypesOfChangeRegex = acceptedTypesOfChangeRegex; this.clDescriptionsOverridePath = CLDescriptionsInputPath; this.requirementsInputFile = requirementsInputFile; this.requirementsInputRegex = requirementsInputRegex; this.requirementsFilterFile = requirementsFilterFile; this.matrixOutputFile = matrixOutputFile; } public void loadFromXML(File input) throws IOException { new XMLIO().readXML(input); } public void storeToXML(File output) throws IOException { new XMLIO().writeXML(output); } public String getServer() { return server; } public String getUser() { return user; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getFileSpec() { return fileSpec; } public String getRequirementIdRegex() { return requirementIdRegex; } public String getTypeOfChangeRegex() { return typeOfChangeRegex; } public String getAcceptedTypesOfChangeRegex() { return acceptedTypesOfChangeRegex; } public String getCLDescriptionsOverridePath() { return clDescriptionsOverridePath; } public String getRequirementsInputFile() { return requirementsInputFile; } public String getRequirementsInputRegex() { return requirementsInputRegex; } public String getRequirementsFilterFile() { return requirementsFilterFile; } public String getMatrixOutputFile() { return matrixOutputFile; } private interface Archive { String fill(String key, String value); } private class XMLIO extends Properties { private void serialize(Archive archive) { server = archive.fill("server", server); user = archive.fill("user", user); fileSpec = archive.fill("file spec", fileSpec); requirementIdRegex = archive.fill("requirement id regex", requirementIdRegex); typeOfChangeRegex = archive.fill("type of change regex", typeOfChangeRegex); acceptedTypesOfChangeRegex = archive.fill("accepted types of change regex", acceptedTypesOfChangeRegex); clDescriptionsOverridePath = archive.fill("cl descriptions override path", clDescriptionsOverridePath); requirementsInputFile = archive.fill("requirements input file", requirementsInputFile); requirementsInputRegex = archive.fill("requirements input regex", requirementsInputRegex); requirementsFilterFile = archive.fill("requirements filter file", requirementsFilterFile); matrixOutputFile = archive.fill("output file", matrixOutputFile); } public void readXML(File file) throws IOException { try (InputStream inputStream = new FileInputStream(file)) { loadFromXML(inputStream); serialize(new InputArchive()); } } public void writeXML(File file) throws IOException { try (OutputStream outputStream = new FileOutputStream(file)) { serialize(new OutputArchive()); storeToXML(outputStream, null); } } private class InputArchive implements Archive { @Override public String fill(String key, String value) { return getProperty(key); } } private class OutputArchive implements Archive { @Override public String fill(String key, String value) { if (key == null || value == null) { return null; } setProperty(key, value); return value; } } } }
MKisilyov/P4CTM
src/com/luxoft/p4ctm/CTMBuilderOptions.java
Java
apache-2.0
6,603
<?php /** * @author: MUlt1mate * Date: 30.03.13 * Time: 15:40 */ require_once __DIR__ . '/../vendor/autoload.php'; require_once __DIR__ . '/Main_controller.php'; function tload($class_name) { if (file_exists(__DIR__ . '/' . $class_name . '.php')) { require_once __DIR__ . '/' . $class_name . '.php'; return; } if (file_exists(__DIR__ . '/AppModels/' . $class_name . '.php')) { require_once __DIR__ . '/AppModels/' . $class_name . '.php'; return; } } spl_autoload_register('tload');
MUlt1mate/timetable_narhoz
include/class_loader.php
PHP
apache-2.0
536
// Copyright 2018 The Terraformer Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package commercetools import ( "context" "github.com/GoogleCloudPlatform/terraformer/providers/commercetools/connectivity" "github.com/GoogleCloudPlatform/terraformer/terraformutils" "github.com/labd/commercetools-go-sdk/commercetools" ) type StoreGenerator struct { CommercetoolsService } // InitResources generates Terraform Resources from Commercetools API func (g *StoreGenerator) InitResources() error { cfg := connectivity.Config{ ClientID: g.GetArgs()["client_id"].(string), ClientSecret: g.GetArgs()["client_secret"].(string), ClientScope: g.GetArgs()["client_scope"].(string), TokenURL: g.GetArgs()["token_url"].(string) + "/oauth/token", BaseURL: g.GetArgs()["base_url"].(string), } client := cfg.NewClient() stores, err := client.StoreQuery(context.Background(), &commercetools.QueryInput{}) if err != nil { return err } for _, store := range stores.Results { g.Resources = append(g.Resources, terraformutils.NewResource( store.ID, store.Key, "commercetools_store", "commercetools", map[string]string{}, []string{}, map[string]interface{}{}, )) } return nil }
GoogleCloudPlatform/terraformer
providers/commercetools/store.go
GO
apache-2.0
1,745
using System; using System.Collections.Generic; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.ComponentModel; using System.ComponentModel.Composition; using Microsoft.VisualStudio.Language.Intellisense; using Microsoft.VisualStudio.Text; using Microsoft.VisualStudio.Text.Classification; using Microsoft.VisualStudio.Utilities; using Microsoft.VisualStudio.Text.Editor; using Microsoft.VisualStudio.Editor; using Microsoft.VisualStudio.Shell; namespace Hyperstore.CodeAnalysis.Editor.Completion { [Export(typeof(IIntellisenseControllerProvider))] [ContentType(ContentTypeAndFileExtensionDefinition.ContentTypeName)] [Name("Hyperstore Completion Controller")] [Order(Before = "Default Completion Controller")] [TextViewRole(PredefinedTextViewRoles.Editable)] internal class CompletionControllerProvider : IIntellisenseControllerProvider { [Import] private ICompletionBroker CompletionBrokerMapService { get; set; } public IIntellisenseController TryCreateIntellisenseController(ITextView textView, IList<ITextBuffer> subjectBuffers) { // Create the completion controller and add it to the view properties var completionController = new CompletionController(subjectBuffers, textView, this.CompletionBrokerMapService); textView.Properties.AddProperty(completionController.GetType(), completionController); return completionController; } } }
Hyperstore/Hyperstore.CodeAnalysis
Hyperstore.CodeAnalysis.Editor/Completion/CompletionProvider.cs
C#
apache-2.0
1,553
var BinaryWriter = require('./BinaryWriter'); function LeaderboardPosition(position) { this.place = position } module.exports = LeaderboardPosition; LeaderboardPosition.prototype.build = function() { var buf = new BinaryWriter(); buf.writeUInt8(0x30); buf.writeUInt16(this.place); return buf.toBuffer(); };
proxiemind/MultiOgar-Edited
src/packet/LeaderboardPosition.js
JavaScript
apache-2.0
330
import sys class Encoding(object): @staticmethod def normalize(value): """ Normalize value :param value: The value :return: The processed value """ # Python 2 vs Python 3 if sys.version_info < (3, 0): return Encoding.to_ascii(value) else: return Encoding.to_unicode(value) @staticmethod def to_ascii(value): """ To ascii :param value: The value :return: The processed value """ # Dict if isinstance(value, dict): processed_value = {} for key in value: if Encoding._is_unicode(key): processed_key = key.encode('ascii') else: processed_key = key processed_value[processed_key] = Encoding.to_ascii(value[key]) # List elif isinstance(value, list): processed_value = [] for value in value: processed_value.append(Encoding.to_ascii(value)) # Unicode elif Encoding._is_unicode(value): processed_value = value.encode('ascii') else: processed_value = value return processed_value @staticmethod def to_unicode(value): """ To unicode :param value: The value :return: The processed value """ # Dict if isinstance(value, dict): processed_value = {} for key in value: if Encoding._is_ascii(key): processed_key = key.decode('utf-8') else: processed_key = key processed_value[processed_key] = Encoding.to_unicode(value[key]) # List elif isinstance(value, list): processed_value = [] for value in value: processed_value.append(Encoding.to_unicode(value)) # Unicode elif Encoding._is_ascii(value): processed_value = value.decode('utf-8') else: processed_value = value return processed_value @staticmethod def _is_ascii(value): """ Check if ascii :param value: The value :return: Ascii or not """ # Python 2 vs Python 3 if sys.version_info < (3, 0): return isinstance(value, str) else: return isinstance(value, bytes) @staticmethod def _is_unicode(value): """ Check if unicode :param value: The value :return: Ascii or not """ # Python 2 vs Python 3 if sys.version_info < (3, 0): return isinstance(value, unicode) else: return isinstance(value, str)
LowieHuyghe/script-core
scriptcore/encoding/encoding.py
Python
apache-2.0
2,861
package ir.ops; import ir.visitor.IrVisitor; public class ArrayAccess implements Expression { private Expression reference; private DataType type; private Expression index; public ArrayAccess(Expression reference, DataType type, Expression index) { this.reference = reference; this.type = type; this.index = index; } @Override public void accept(IrVisitor visitor) { visitor.visit(this); } public Expression getReference() { return reference; } public DataType getType() { return type; } public Expression getIndex() { return index; } @Override public String toString() { return reference.toString() + "[" + index.toString() + "]"; } }
AndrewHancock/MiniJava-compiler
src/main/java/ir/ops/ArrayAccess.java
Java
apache-2.0
691
<?php include_once('top.php');?> <body> <div id="wrapper"> <!-- Navigation --> <nav class="navbar navbar-default navbar-static-top" role="navigation" style="margin-bottom: 0"> <?php include_once('header.php');?> <?php include_once('leftsidebar.php');?> </nav> <div id="page-wrapper"> <div class="row"> <div class="col-lg-12"> <h1 class="page-header">View Visa</h1> </div> <!-- /.col-lg-12 --> </div> <!-- /.row --> <div class="row"> <div class="col-lg-12 alert alert-success hide" id="message_status"></div> <div class="col-lg-12"> <div class="panel panel-default"> <div class="panel-heading"> View Visa </div> <!-- /.panel-heading --> <div class="panel-body"> <div class="dataTable_wrapper"> <table class="table table-striped table-bordered table-hover" id="dataTables-example"> <thead> <tr> <th>Sur Name</th> <th>Given Name</th> <th>Passport No</th> <th>Visa Status</th> <th>Passport Image</th> <th>Action</th> <th>View VISA Doc</th> </tr> </thead> <tbody> <?php $SQL = "SELECT * FROM tblumrah WHERE user_id = ".(int)$_SESSION["client_id"]." ORDER BY id DESC"; $result = MySQLQuery($SQL); while($row = mysql_fetch_array($result)) { // ,MYSQL_ASSOC $visa_status = $row['visa_status']; $inprocess_selected = ""; $aproved_selected = ""; $rejected_selected = ""; if($visa_status == 1) $visa_status = "In Process"; else if($visa_status == 2) $visa_status = "Approved"; else if($visa_status == 3) $visa_status = "Rejected"; //$inprocess_selected = ""; ?> <tr class="odd gradeX" id="DelID_<?php echo $row['id'];?>"> <td class="left"><?php echo $row['sur_name'];?></td> <td class="left"><?php echo $row['given_name'];?></td> <td class="left" id="Trans_<?php echo $row['id'];?>"><?php echo $row['passport_no'];?></td> <td class="left" style="padding-top: 0;padding-bottom: 0;"><?php echo $visa_status; ?></td> <td class="left"><a href="" id="<?php echo $row['id'];?>" data-toggle="modal" data-target=".bs-example-modal-lg2" class="show_passport">View Passport Image</a></td> <td class="left"><a href="" id="<?php echo $row['id'];?>" data-toggle="modal" data-target=".bs-example-modal-lg" class="show_dialog">View All Details</a></td> <td class="left"><a href="" id="<?php echo $row['id'];?>" data-toggle="modal" data-target=".bs-example-modal-lg5" class="show_visa_docs">Visa Docs</a></td> </tr> <?php } ?> </tbody> </table> </div> <!-- /.table-responsive --> </div> <!-- /.panel-body --> </div> <!-- /.panel --> </div> <!-- /.col-lg-12 --> </div> <!-- /.row --> <!-- /.row --> <!-- /.row --> <!-- /.row --> </div> <!-- /#page-wrapper --> </div> <!-- /#wrapper --> <!-- Modal --> <div class="modal fade bs-example-modal-lg5" tabindex="-1" role="dialog" aria-labelledby="myLargeModalLabel"> <div class="modal-dialog modal-lg" role="document"> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">&times;</span></button> <h4 class="modal-title" id="myModalLabel">View Visa Docs Image</h4> </div> <div class="modal-body" align="center" id="visa_docs_image"> <img src="" width="600"> </div> <div class="modal-footer"> <button type="button" class="btn btn-default" data-dismiss="modal">Close</button> </div> </div> </div> </div> <div class="modal fade bs-example-modal-lg2" tabindex="-1" role="dialog" aria-labelledby="myLargeModalLabel"> <div class="modal-dialog modal-lg" role="document"> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">&times;</span></button> <h4 class="modal-title" id="myModalLabel">View Passport Image</h4> </div> <div class="modal-body" align="center" id="passport_image"> <img src="" width="600"> </div> <div class="modal-footer"> <button type="button" class="btn btn-default" data-dismiss="modal">Close</button> </div> </div> </div> </div> <div class="modal fade bs-example-modal-lg" id="exampleModal" tabindex="-1" role="dialog" aria-labelledby="exampleModalLabel"> <div class="modal-dialog" role="document"> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">&times;</span></button> <h4 class="modal-title" id="myModalLabel">View All Details</h4> </div> <div class="modal-body" align="center" id="visa_details"> </div> <div class="modal-footer"> <button type="button" class="btn btn-default" data-dismiss="modal">Close</button> </div> </div> </div> </div> <div aria-hidden="true" aria-labelledby="myModalLabel" role="dialog" tabindex="-1" id="myModal" class="modal fade" style="display: none;"> <div class="modal-dialog"> <div class="modal-content"> <div class="modal-body">Do you want to delete this record?</div> <div class="modal-footer"> <button class="btn btn-primary" type="button" id="DeleteRecord">Delete</button> <button data-dismiss="modal" class="btn btn-default" type="button">Close</button> </div> </div> <!-- /.modal-content --> </div> <!-- /.modal-dialog --> </div> <input type="hidden" id="currentID" value="" /> <?php include_once('jquery.php');?> <!-- Page-Level Demo Scripts - Tables - Use for reference --> <script> $(document).ready(function() { $('#dataTables-example').DataTable({ responsive: true }); //Show dialog with image $(".show_dialog").on('click', function(){ var current_id = $(this).attr('id'); //console.log(current_id); var action = "GetVisaDetail"; jQuery.ajax({ type: "POST", url: "action.php", data: {current_id: current_id, action: action}, cache: false, success: function(response) { console.log(response); $("#visa_details").html(response); } }); }); //Show dialog with passport image $(".show_passport").on('click', function(){ var current_id = $(this).attr('id'); //console.log(current_id); var action = "GetPassportImageName"; jQuery.ajax({ type: "POST", url: "action.php", data: {current_id: current_id, action: action}, cache: false, success: function(response) { console.log(response); var src1 = 'images/user_passport/'+response+''; $("#passport_image img").attr("src", src1); //$("#bank_slip_image") } }); }); //Show dialog with visa docs $(".show_visa_docs").on('click', function(){ var current_id = $(this).attr('id'); //console.log(current_id); var action = "GetVisaDocsImageName"; jQuery.ajax({ type: "POST", url: "action.php", data: {current_id: current_id, action: action}, cache: false, success: function(response) { //console.log(response); var src1 = 'images/visa_docs/'+response+''; $("#visa_docs_image img").attr("src", src1); //$("#bank_slip_image") } }); }); // Update payment status jQuery(document).on('change','.get_visa_status',function(e){ var current_id = $(this).attr('id'); var array = current_id.split('_'); current_id = array[3]; //console.log(current_id); return false; var selected_value = $(this).val(); var Trans_id = $("#Trans_"+current_id).html(); var selected_text = ""; if(selected_value == 1) selected_text = "In process"; else if(selected_value == 2) selected_text = "Aproved"; else if(selected_value == 3) selected_text = "Rejected"; var action = "UpdateVisaStatus"; jQuery.ajax({ type: "POST", url: "action.php", data: {current_id: current_id, action: action, selected_value: selected_value}, cache: false, success: function(response) { // update_staff var obj = eval( "(" + response + ")" ) ; if(obj == "2") { $("#visa_status_dropdown_"+current_id).prop('disabled', 'disabled'); } else { $("#message_status").html(''); $("#message_status").html('Now this Passport No '+Trans_id+' visa status is '+selected_text+'!'); $("#message_status").removeClass('hide'); } } }); }); }); </script> </body> </html>
jhassan/unique
unique/view_visa.php
PHP
apache-2.0
12,112
<?php namespace BankId\Merchant\Library\Schemas\saml\protocol; /** * Class representing ArtifactResolveType * * * XSD Type: ArtifactResolveType */ class ArtifactResolveType extends RequestAbstractType { /** * @property string $artifact */ private $artifact = null; /** * Gets as artifact * * @return string */ public function getArtifact() { return $this->artifact; } /** * Sets a new artifact * * @param string $artifact * @return self */ public function setArtifact($artifact) { $this->artifact = $artifact; return $this; } }
notarynodes/idintt
hackathon-module/idintt-php-module/library/Schemas/saml/protocol/ArtifactResolveType.php
PHP
apache-2.0
704
/* FluorineFx open source library Copyright (C) 2007 Zoltan Csibi, zoltan@TheSilentGroup.com, FluorineFx.com This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ using System; using System.Collections; using FluorineFx.Collections; using FluorineFx.Util; #if !(NET_1_1) using System.Collections.Generic; using FluorineFx.Collections.Generic; #endif namespace FluorineFx.Messaging.Api { /// <summary> /// ServiceContainer implementation. /// This type supports the Fluorine infrastructure and is not intended to be used directly from your code. /// </summary> class ServiceContainer : IServiceContainer { #if !(NET_1_1) private Dictionary<Type, object> _services = new Dictionary<Type, object>(); #else private Hashtable _services = new Hashtable(); #endif private IServiceProvider _parentProvider; public ServiceContainer():this(null) { } public ServiceContainer(IServiceProvider parentProvider) { _parentProvider = parentProvider; } public IServiceContainer Container { get { IServiceContainer service = null; if (_parentProvider != null) { service = (IServiceContainer)_parentProvider.GetService(typeof(IServiceContainer)); } return service; } set { _parentProvider = value; } } /// <summary> /// Gets an object that can be used to synchronize access. /// </summary> public object SyncRoot { get { return ((ICollection)_services).SyncRoot; } } #region IServiceContainer Members public void AddService(Type serviceType, object service) { AddService(serviceType, service, false); } public void AddService(Type serviceType, object service, bool promote) { ValidationUtils.ArgumentNotNull(serviceType, "serviceType"); ValidationUtils.ArgumentNotNull(service, "service"); lock (this.SyncRoot) { if (promote) { IServiceContainer container = this.Container; if (container != null) { container.AddService(serviceType, service, promote); return; } } if (_services.ContainsKey(serviceType)) throw new ArgumentException(string.Format("Service {0} already exists", serviceType.FullName)); _services[serviceType] = service; } } public void RemoveService(Type serviceType) { RemoveService(serviceType, false); } public void RemoveService(Type serviceType, bool promote) { ValidationUtils.ArgumentNotNull(serviceType, "serviceType"); lock (this.SyncRoot) { if (promote) { IServiceContainer container = this.Container; if (container != null) { container.RemoveService(serviceType, promote); return; } } if (_services.ContainsKey(serviceType)) { IService service = _services[serviceType] as IService; if (service != null) service.Shutdown(); } _services.Remove(serviceType); } } #endregion #region IServiceProvider Members public object GetService(Type serviceType) { ValidationUtils.ArgumentNotNull(serviceType, "serviceType"); object service = null; lock (this.SyncRoot) { if( _services.ContainsKey(serviceType) ) service = _services[serviceType]; if (service == null && _parentProvider != null) { service = _parentProvider.GetService(serviceType); } } return service; } #endregion internal void Shutdown() { lock (this.SyncRoot) { foreach (object serviceInstance in _services.Values) { IService service = serviceInstance as IService; if (service != null) service.Shutdown(); } _services.Clear(); _services = null; _parentProvider = null; } } } }
gspark/PmsAssistant
FluorineFx/Messaging/Api/ServiceContainer.cs
C#
apache-2.0
5,560
/* Copyright 2016 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ // Tagger transition system. // // This transition system has one type of actions: // - The SHIFT action pushes the next input token to the stack and // advances to the next input token, assigning a part-of-speech tag to the // token that was shifted. // // The transition system operates with parser actions encoded as integers: // - A SHIFT action is encoded as number starting from 0. #include <string> #include "parser_features.h" #include "parser_state.h" #include "parser_transitions.h" #include "sentence_features.h" #include "shared_store.h" #include "task_context.h" #include "term_frequency_map.h" #include "utils.h" #include "tensorflow/core/lib/strings/strcat.h" namespace syntaxnet { class TaggerTransitionState : public ParserTransitionState { public: explicit TaggerTransitionState(const TermFrequencyMap *tag_map, const TagToCategoryMap *tag_to_category) : tag_map_(tag_map), tag_to_category_(tag_to_category) {} explicit TaggerTransitionState(const TaggerTransitionState *state) : TaggerTransitionState(state->tag_map_, state->tag_to_category_) { tag_ = state->tag_; gold_tag_ = state->gold_tag_; } // Clones the transition state by returning a new object. ParserTransitionState *Clone() const override { return new TaggerTransitionState(this); } // Reads gold tags for each token. void Init(ParserState *state) override { tag_.resize(state->sentence().token_size(), -1); gold_tag_.resize(state->sentence().token_size(), -1); for (int pos = 0; pos < state->sentence().token_size(); ++pos) { int tag = tag_map_->LookupIndex(state->GetToken(pos).tag(), -1); gold_tag_[pos] = tag; } } // Returns the tag assigned to a given token. int Tag(int index) const { DCHECK_GE(index, 0); DCHECK_LT(index, tag_.size()); return index == -1 ? -1 : tag_[index]; } // Sets this tag on the token at index. void SetTag(int index, int tag) { DCHECK_GE(index, 0); DCHECK_LT(index, tag_.size()); tag_[index] = tag; } // Returns the gold tag for a given token. int GoldTag(int index) const { DCHECK_GE(index, -1); DCHECK_LT(index, gold_tag_.size()); return index == -1 ? -1 : gold_tag_[index]; } // Returns the string representation of a POS tag, or an empty string // if the tag is invalid. string TagAsString(int tag) const { if (tag >= 0 && tag < tag_map_->Size()) { return tag_map_->GetTerm(tag); } return ""; } // Adds transition state specific annotations to the document. void AddParseToDocument(const ParserState &state, bool rewrite_root_labels, Sentence *sentence) const override { for (size_t i = 0; i < tag_.size(); ++i) { Token *token = sentence->mutable_token(i); token->set_tag(TagAsString(Tag(i))); if (tag_to_category_) { token->set_category(tag_to_category_->GetCategory(token->tag())); } } } // Whether a parsed token should be considered correct for evaluation. bool IsTokenCorrect(const ParserState &state, int index) const override { return GoldTag(index) == Tag(index); } // Returns a human readable string representation of this state. string ToString(const ParserState &state) const override { string str; for (int i = state.StackSize(); i > 0; --i) { const string &word = state.GetToken(state.Stack(i - 1)).word(); if (i != state.StackSize() - 1) str.append(" "); tensorflow::strings::StrAppend( &str, word, "[", TagAsString(Tag(state.StackSize() - i)), "]"); } for (int i = state.Next(); i < state.NumTokens(); ++i) { tensorflow::strings::StrAppend(&str, " ", state.GetToken(i).word()); } return str; } private: // Currently assigned POS tags for each token in this sentence. std::vector<int> tag_; // Gold POS tags from the input document. std::vector<int> gold_tag_; // Tag map used for conversions between integer and string representations // part of speech tags. Not owned. const TermFrequencyMap *tag_map_ = nullptr; // Tag to category map. Not owned. const TagToCategoryMap *tag_to_category_ = nullptr; TF_DISALLOW_COPY_AND_ASSIGN(TaggerTransitionState); }; class TaggerTransitionSystem : public ParserTransitionSystem { public: ~TaggerTransitionSystem() override { SharedStore::Release(tag_map_); } // Determines tag map location. void Setup(TaskContext *context) override { input_tag_map_ = context->GetInput("tag-map", "text", ""); join_category_to_pos_ = context->GetBoolParameter("join_category_to_pos"); if (!join_category_to_pos_) { input_tag_to_category_ = context->GetInput("tag-to-category", "text", ""); } } // Reads tag map and tag to category map. void Init(TaskContext *context) override { const string tag_map_path = TaskContext::InputFile(*input_tag_map_); tag_map_ = SharedStoreUtils::GetWithDefaultName<TermFrequencyMap>( tag_map_path, 0, 0); if (!join_category_to_pos_) { const string tag_to_category_path = TaskContext::InputFile(*input_tag_to_category_); tag_to_category_ = SharedStoreUtils::GetWithDefaultName<TagToCategoryMap>( tag_to_category_path); } } // The SHIFT action uses the same value as the corresponding action type. static ParserAction ShiftAction(int tag) { return tag; } // The tagger transition system doesn't look at the dependency tree, so it // allows non-projective trees. bool AllowsNonProjective() const override { return true; } // Returns the number of action types. int NumActionTypes() const override { return 1; } // Returns the number of possible actions. int NumActions(int num_labels) const override { return tag_map_->Size(); } // The default action for a given state is assigning the most frequent tag. ParserAction GetDefaultAction(const ParserState &state) const override { return ShiftAction(0); } // Returns the next gold action for a given state according to the // underlying annotated sentence. ParserAction GetNextGoldAction(const ParserState &state) const override { if (!state.EndOfInput()) { return ShiftAction(TransitionState(state).GoldTag(state.Next())); } return ShiftAction(0); } // Checks if the action is allowed in a given parser state. bool IsAllowedAction(ParserAction action, const ParserState &state) const override { return !state.EndOfInput(); } // Makes a shift by pushing the next input token on the stack and moving to // the next position. void PerformActionWithoutHistory(ParserAction action, ParserState *state) const override { DCHECK(!state->EndOfInput()); if (!state->EndOfInput()) { MutableTransitionState(state)->SetTag(state->Next(), action); state->Push(state->Next()); state->Advance(); } } // We are in a final state when we reached the end of the input and the stack // is empty. bool IsFinalState(const ParserState &state) const override { return state.EndOfInput(); } // Returns a string representation of a parser action. string ActionAsString(ParserAction action, const ParserState &state) const override { return tensorflow::strings::StrCat("SHIFT(", tag_map_->GetTerm(action), ")"); } // No state is deterministic in this transition system. bool IsDeterministicState(const ParserState &state) const override { return false; } // Returns a new transition state to be used to enhance the parser state. ParserTransitionState *NewTransitionState(bool training_mode) const override { return new TaggerTransitionState(tag_map_, tag_to_category_); } // Downcasts the const ParserTransitionState in ParserState to a const // TaggerTransitionState. static const TaggerTransitionState &TransitionState( const ParserState &state) { return *static_cast<const TaggerTransitionState *>( state.transition_state()); } // Downcasts the ParserTransitionState in ParserState to an // TaggerTransitionState. static TaggerTransitionState *MutableTransitionState(ParserState *state) { return static_cast<TaggerTransitionState *>( state->mutable_transition_state()); } // Input for the tag map. Not owned. TaskInput *input_tag_map_ = nullptr; // Tag map used for conversions between integer and string representations // part of speech tags. Owned through SharedStore. const TermFrequencyMap *tag_map_ = nullptr; // Input for the tag to category map. Not owned. TaskInput *input_tag_to_category_ = nullptr; // Tag to category map. Owned through SharedStore. const TagToCategoryMap *tag_to_category_ = nullptr; bool join_category_to_pos_ = false; }; REGISTER_TRANSITION_SYSTEM("tagger", TaggerTransitionSystem); // Feature function for retrieving the tag assigned to a token by the tagger // transition system. class PredictedTagFeatureFunction : public BasicParserSentenceFeatureFunction<Tag> { public: PredictedTagFeatureFunction() {} // Gets the TaggerTransitionState from the parser state and reads the assigned // tag at the focus index. Returns -1 if the focus is not within the sentence. FeatureValue Compute(const WorkspaceSet &workspaces, const ParserState &state, int focus, const FeatureVector *result) const override { if (focus < 0 || focus >= state.sentence().token_size()) return -1; return static_cast<const TaggerTransitionState *>(state.transition_state()) ->Tag(focus); } private: TF_DISALLOW_COPY_AND_ASSIGN(PredictedTagFeatureFunction); }; REGISTER_PARSER_IDX_FEATURE_FUNCTION("pred-tag", PredictedTagFeatureFunction); } // namespace syntaxnet
unsiloai/syntaxnet-ops-hack
tensorflow/core/syntaxnet/tagger_transitions.cc
C++
apache-2.0
10,523
/* ** Copyright (C) 2003-2010 Institute for Systems Biology ** Seattle, Washington, USA. ** ** This library is free software; you can redistribute it and/or ** modify it under the terms of the GNU Lesser General Public ** License as published by the Free Software Foundation; either ** version 2.1 of the License, or (at your option) any later version. ** ** This library is distributed in the hope that it will be useful, ** but WITHOUT ANY WARRANTY; without even the implied warranty of ** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ** Lesser General Public License for more details. ** ** You should have received a copy of the GNU Lesser General Public ** License along with this library; if not, write to the Free Software ** Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.systemsbiology.addama.commons.httpclient.support; import org.apache.commons.httpclient.HttpMethod; /** * @author hrovira */ public final class IsExpectedStatusCodeResponseCallback implements ResponseCallback { private final int expectedStatusCode; public IsExpectedStatusCodeResponseCallback(int expectedStatusCode) { this.expectedStatusCode = expectedStatusCode; } public Object onResponse(int statusCode, HttpMethod method) throws HttpClientResponseException { return (statusCode == expectedStatusCode); } }
hrovira/addama-googlecode
commons/httpclient-support/src/main/java/org/systemsbiology/addama/commons/httpclient/support/IsExpectedStatusCodeResponseCallback.java
Java
apache-2.0
1,491
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from xml.parsers.expat import ParserCreate class DefaultSaxHandler(object): def start_element(self, name, attrs): print('sax:start_element: %s, attrs: %s' % (name, str(attrs))) def end_element(self, name): print('sax:end_element: %s' % name) def char_data(self, text): print('sax:char_data: %s' % text) xml = r'''<?xml version="1.0"?> <ol> <li><a href="/python">Python</a></li> <li><a href="/ruby">Ruby</a></li> </ol> ''' handler = DefaultSaxHandler() parser = ParserCreate() parser.StartElementHandler = handler.start_element parser.EndElementHandler = handler.end_element parser.CharacterDataHandler = handler.char_data parser.Parse(xml)
whyDK37/py_bootstrap
samples/commonlib/use_sax.py
Python
apache-2.0
739
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Data; namespace BLL { public partial class Task { DAL.Task pr = new DAL.Task(); public DataTable GetTaskList(string strWhere) { return pr.GetTaskList(strWhere); } public bool AddTaskList(Model.Task model) { return pr.AddTaskList(model); } public bool UpdateTaskList(Model.Task model) { return pr.UpdateTaskList(model); } } }
yongjianzheng/Schedule
BLL/Task.cs
C#
apache-2.0
596
package org.mk300.marshal.minimum.test; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.util.Date; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.atomic.AtomicReferenceArray; import org.apache.commons.io.HexDump; import org.junit.Test; import org.mk300.marshal.minimum.MinimumMarshaller; @SuppressWarnings({"rawtypes", "unchecked"}) public class LinkedBlockingDequeTest { @Test public void testLinkedBlockingDeque() throws Exception { LinkedBlockingDeque data = new LinkedBlockingDeque(10); data.add(new Date(0)); data.add(new Date(3)); data.add(new Date(4)); data.add(new Date(5)); data.add(new Date(6)); data.add(new Date(7)); testAndPrintHexAndCheck(data); } // LinkedBlockingDeque は equalsメソッドを実装していない・・・ private void testAndPrintHexAndCheck(LinkedBlockingDeque<Date> target) throws Exception{ try { byte[] bytes = MinimumMarshaller.marshal(target); System.out.println(target.getClass().getSimpleName() + " binary size is " + bytes.length); ByteArrayOutputStream os = new ByteArrayOutputStream(); HexDump.dump(bytes, 0, os, 0); System.out.println(os.toString()); System.out.println(""); LinkedBlockingDeque<Date> o = (LinkedBlockingDeque<Date>)MinimumMarshaller.unmarshal(bytes); // 正確に復元されていることの検証 if( o.size() != target.size()) { throw new RuntimeException("オブジェクトが異なります。target=" + target + ", desr=" + o); } if( o.remainingCapacity() != target.remainingCapacity()) { throw new RuntimeException("オブジェクトが異なります。target=" + target + ", desr=" + o); } Date[] desr = o.toArray(new Date[0]); Date[] origin = target.toArray(new Date[0]); for(int i=0; i<desr.length ; i++) { if(desr[i] == null && origin[i] == null) { continue; } if(desr[i] == null || origin[i] == null) { throw new RuntimeException("オブジェクトが異なります。target=" + target + ", desr=" + o); } if( ! desr[i].equals(origin[i])) { throw new RuntimeException("オブジェクトが異なります。target=" + target + ", desr=" + o); } } } finally { } // おまけ 普通のByteArray*Streamも使えるか? try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); MinimumMarshaller.marshal(target, baos); byte[] bytes = baos.toByteArray(); ByteArrayInputStream bais = new ByteArrayInputStream(bytes); LinkedBlockingDeque<Date> o = (LinkedBlockingDeque<Date>)MinimumMarshaller.unmarshal(bais); // 正確に復元されていることの検証 if( o.size() != target.size()) { throw new RuntimeException("オブジェクトが異なります。target=" + target + ", desr=" + o); } if( o.remainingCapacity() != target.remainingCapacity()) { throw new RuntimeException("オブジェクトが異なります。target=" + target + ", desr=" + o); } Date[] desr = o.toArray(new Date[0]); Date[] origin = target.toArray(new Date[0]); for(int i=0; i<desr.length ; i++) { if(desr[i] == null && origin[i] == null) { continue; } if(desr[i] == null || origin[i] == null) { throw new RuntimeException("オブジェクトが異なります。target=" + target + ", desr=" + o); } if( ! desr[i].equals(origin[i])) { throw new RuntimeException("オブジェクトが異なります。target=" + target + ", desr=" + o); } } } finally { } } }
mkobayas/minimum-marshaller
src/test/java/org/mk300/marshal/minimum/test/LinkedBlockingDequeTest.java
Java
apache-2.0
3,613
/* * Copyright 2013-2014 UOL - Universo Online Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package br.com.uol.runas; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.context.annotation.ComponentScan; @ComponentScan @EnableAutoConfiguration public class MainApplication { public static void main(String[] args) { System.getProperties().put("server.port", 8195); SpringApplication.run(MainApplication.class, args); } }
uolcombr/runas
src/main/java/br/com/uol/runas/MainApplication.java
Java
apache-2.0
1,072
# Copyright [2017] [name of copyright owner] # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and limitations under the License. # Author : Álvaro Román Royo (alvaro.varo98@gmail.com) import http.server import http.client import json import socketserver class testHTTPRequestHandler(http.server.BaseHTTPRequestHandler): OPENFDA_API_URL = "api.fda.gov" OPENFDA_API_EVENT = "/drug/event.json" OPENFDA_API_LYRICA = '?search=patient.drug.medicinalproduct:"LYRICA"&limit=10' def get_main_page(self): html = ''' <html> <head> <title>OpenFDA app</title> </head> <body> <h1>OpenFDA Client</h1> <form method='get' action='receivedrug'> <input type='submit' value='Enviar a OpenFDA'> </input> </form> <form method='get' action='searchmed'> <input type='text' name='drug'></input> <input type='submit' value='Buscar Medicamento'></input> </form> <form method='get' action='receivecompany'> <input type='submit' value='Find companies'></input> </form> <form method='get' action='searchcom'> <input type='text' name='drug'></input> <input type='submit' value='Buscar medicinalproduct'></input> </form> </body> </html> ''' return html def get_med(self,drug): conn = http.client.HTTPSConnection(self.OPENFDA_API_URL) conn.request("GET", self.OPENFDA_API_EVENT + '?search=patient.drug.medicinalproduct:'+drug+'&limit=10') r1 = conn.getresponse() print(r1.status, r1.reason) data1 = r1.read() data = data1.decode('utf8') events = json.loads(data) #event = events['results'][0]['patient']['drug'] return events def get_medicinalproduct(self,com_num): conn = http.client.HTTPSConnection(self.OPENFDA_API_URL) conn.request("GET", self.OPENFDA_API_EVENT + '?search=companynumb:'+com_num+'&limit=10') r1 = conn.getresponse() print(r1.status, r1.reason) data1 = r1.read() data = data1.decode('utf8') events = json.loads(data) return events def get_event(self): conn = http.client.HTTPSConnection(self.OPENFDA_API_URL) conn.request("GET", self.OPENFDA_API_EVENT + '?limit=10') r1 = conn.getresponse() print(r1.status, r1.reason) data1 = r1.read() data = data1.decode('utf8') events = json.loads(data) #event = events['results'][0]['patient']['drug'] return events def get_drug(self, events): medicamentos=[] for event in events['results']: medicamentos+=[event['patient']['drug'][0]['medicinalproduct']] return medicamentos def get_com_num(self, events): com_num=[] for event in events['results']: com_num+=[event['companynumb']] return com_num def drug_page(self,medicamentos): s='' for drug in medicamentos: s += "<li>"+drug+"</li>" html=''' <html> <head></head> <body> <ul> %s </ul> </body> </html>''' %(s) return html def do_GET(self): print (self.path) #print (self.path) self.send_response(200) self.send_header('Content-type','text/html') self.end_headers() if self.path == '/' : html = self.get_main_page() self.wfile.write(bytes(html,'utf8')) elif self.path == '/receivedrug?': events = self.get_event() medicamentos = self.get_drug(events) html = self.drug_page(medicamentos) self.wfile.write(bytes(html,'utf8')) elif self.path == '/receivecompany?': events = self.get_event() com_num = self.get_com_num(events) html = self.drug_page(com_num) self.wfile.write(bytes(html,'utf8')) elif 'searchmed' in self.path: drug=self.path.split('=')[1] print (drug) events = self.get_med(drug) com_num = self.get_com_num(events) html = self.drug_page(com_num) self.wfile.write(bytes(html,'utf8')) elif 'searchcom' in self.path: com_num = self.path.split('=')[1] print (com_num) events = self.get_medicinalproduct(com_num) medicinalproduct = self.get_drug(events) html = self.drug_page(medicinalproduct) self.wfile.write(bytes(html,'utf8')) return
varoroyo/Alvaro-Roman
web.py
Python
apache-2.0
5,333
define( ({ loadingInfo: "Carregando...", emptyInfo: "Nenhum item a exibir", loadFailInfo: "Falha ao carregar dados!" }) );
andrescabrera/gwt-dojo-toolkit
src/gwt/dojo/gridx/public/dojo/gridx/nls/pt/Body.js
JavaScript
apache-2.0
127
package net.nopattern.cordova.brightcoveplayer; /** * Created by peterchin on 6/9/16. */ public enum Cmd { LOAD, LOADED, DISABLE, ENABLE, PAUSE, PLAY, HIDE, SHOW, SEEK, RATE, RESUME, REPOSITION };
MedStudy/brightcove-player-plugin
src/android/Cmd.java
Java
apache-2.0
248
/** * Copyright 2010 CosmoCode GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.cosmocode.palava.model.business; import java.util.Locale; import java.util.Set; import javax.persistence.Column; import javax.persistence.Embeddable; import javax.persistence.MappedSuperclass; import javax.persistence.Transient; import org.apache.commons.lang.StringUtils; import org.apache.commons.validator.EmailValidator; import org.apache.commons.validator.UrlValidator; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableSet; import de.cosmocode.commons.Locales; import de.cosmocode.commons.Patterns; import de.cosmocode.commons.TrimMode; /** * Abstract base implementation of the {@link AddressBase} interface. * * @author Willi Schoenborn */ @Embeddable @MappedSuperclass public abstract class AbstractAddress implements AddressBase { private static final EmailValidator EMAIL_VALIDATOR = EmailValidator.getInstance(); private static final UrlValidator URL_VALIDATOR = new UrlValidator(); private static final Set<String> INVERSE_ADDRESS_COUNTRIES = ImmutableSet.of( Locale.US.getCountry(), Locale.UK.getCountry(), Locale.CANADA.getCountry(), Locales.AUSTRALIA.getCountry(), Locale.FRANCE.getCountry(), Locales.NEW_ZEALAND.getCountry() ); private String street; @Column(name = "street_number") private String streetNumber; private String additional; @Column(name = "postal_code") private String postalCode; private String district; @Column(name = "city_name") private String cityName; private String state; @Column(name = "country_code") private String countryCode; private Double latitude; private Double longitude; private String phone; @Column(name = "mobile_phone") private String mobilePhone; private String fax; @Column(unique = true) private String email; private String website; @Transient private final transient Location location = new InternalLocation(); @Override public String getStreet() { return street; } @Override public void setStreet(String street) { this.street = TrimMode.NULL.apply(street); } @Override public String getStreetNumber() { return streetNumber; } @Override public void setStreetNumber(String streetNumber) { this.streetNumber = TrimMode.NULL.apply(streetNumber); } @Override public String getLocalizedAddress() { if (INVERSE_ADDRESS_COUNTRIES.contains(countryCode)) { return getAddressInverse(); } else { return getAddress(); } } private String getAddress() { return String.format("%s %s", street, streetNumber).trim(); } private String getAddressInverse() { return String.format("%s %s", streetNumber, street).trim(); } @Override public String getAdditional() { return additional; } @Override public void setAdditional(String additional) { this.additional = TrimMode.NULL.apply(additional); } @Override public String getPostalCode() { return postalCode; } @Override public void setPostalCode(String postalCode) { this.postalCode = TrimMode.NULL.apply(postalCode); } @Override public String getDistrict() { return district; } @Override public void setDistrict(String district) { this.district = TrimMode.NULL.apply(district); } @Override public String getCityName() { return cityName; } @Override public void setCityName(String cityName) { this.cityName = TrimMode.NULL.apply(cityName); } @Override public String getState() { return state; } @Override public void setState(String state) { this.state = TrimMode.NULL.apply(state); } @Override public String getCountryCode() { return countryCode == null ? null : countryCode.toUpperCase(); } @Override public void setCountryCode(String code) { this.countryCode = StringUtils.upperCase(TrimMode.NULL.apply(code)); if (countryCode == null) return; Preconditions.checkArgument(Patterns.ISO_3166_1_ALPHA_2.matcher(countryCode).matches(), "%s does not match %s", countryCode, Patterns.ISO_3166_1_ALPHA_2.pattern() ); } @Override public Location getLocation() { return location; } /** * Internal implementation of the {@link Location} interface which * owns a reference to the enclosing class and is able to directly manipulate the * corresponding values. * * @author Willi Schoenborn */ private final class InternalLocation extends AbstractLocation { @Override public Double getLatitude() { return latitude; } @Override public void setLatitude(Double latitude) { AbstractAddress.this.latitude = latitude; } @Override public Double getLongitude() { return longitude; } @Override public void setLongitude(Double longitude) { AbstractAddress.this.longitude = longitude; } } @Override public void setLocation(Location location) { Preconditions.checkNotNull(location, "Location"); this.latitude = location.getLatitude(); this.longitude = location.getLongitude(); } @Override public boolean hasLocation() { return latitude != null && longitude != null; } @Override public String getPhone() { return phone; } @Override public void setPhone(String phone) { this.phone = TrimMode.NULL.apply(phone); } @Override public String getMobilePhone() { return mobilePhone; } @Override public void setMobilePhone(String mobilePhone) { this.mobilePhone = TrimMode.NULL.apply(mobilePhone); } @Override public String getFax() { return fax; } @Override public void setFax(String fax) { this.fax = TrimMode.NULL.apply(fax); } @Override public String getEmail() { return email; } @Override public void setEmail(String e) { this.email = TrimMode.NULL.apply(e); if (email == null) return; Preconditions.checkArgument(EMAIL_VALIDATOR.isValid(email), "%s is not a valid email", email); } @Override public String getWebsite() { return website; } @Override public void setWebsite(String w) { this.website = TrimMode.NULL.apply(w); if (website == null) return; Preconditions.checkArgument(URL_VALIDATOR.isValid(website), "%s is not a valid website", website); } }
palava/palava-model
src/main/java/de/cosmocode/palava/model/business/AbstractAddress.java
Java
apache-2.0
7,669
/******************************************************************************* * Copyright (C) 2017 Push Technology Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package com.pushtechnology.adapters.rest.model.v13; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.NonNull; import lombok.ToString; import lombok.Value; /** * Endpoint configuration. Version 13. * <p> * Description of a REST endpoint to poll. * * @author Push Technology Limited */ @Value @Builder @AllArgsConstructor @ToString(of = "name") public class EndpointConfig { /** * The name of the endpoint. */ @NonNull String name; /** * The URL of the endpoint. */ @NonNull String url; /** * The topic path to map the endpoint to. It is relative to the service * topic path root. */ @NonNull String topicPath; /** * The type of content produced by the endpoint. * <p> * Supports the values: * <ul> * <li>auto</li> * <li>json</li> * <li>application/json</li> * <li>text/json</li> * <li>string</li> * <li>text/plain</li> * <li>binary</li> * <li>application/octet-stream</li> * </ul> */ @NonNull String produces; }
pushtechnology/diffusion-rest-adapter
configuration-model/src/main/java/com/pushtechnology/adapters/rest/model/v13/EndpointConfig.java
Java
apache-2.0
1,884
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "paddle/phi/kernels/batch_norm_kernel.h" #include "paddle/phi/backends/cpu/cpu_context.h" #include "paddle/phi/core/kernel_registry.h" #include "paddle/phi/kernels/funcs/eigen/common.h" #include "paddle/fluid/framework/tensor_util.h" namespace phi { template <typename T> using EigenArrayMap = Eigen::Map<Eigen::Array<T, Eigen::Dynamic, Eigen::Dynamic>>; template <typename T> using ConstEigenArrayMap = Eigen::Map<const Eigen::Array<T, Eigen::Dynamic, Eigen::Dynamic>>; template <typename T> using EigenVectorArrayMap = Eigen::Map<Eigen::Array<T, Eigen::Dynamic, 1>>; template <typename T> using ConstEigenVectorArrayMap = Eigen::Map<const Eigen::Array<T, Eigen::Dynamic, 1>>; template <typename T, typename Context> void BatchNormKernel(const Context& ctx, const DenseTensor& x, const DenseTensor& scale, const DenseTensor& bias, const DenseTensor& mean, const DenseTensor& variance, float momentum, float epsilon, const std::string& data_layout_str, bool is_test, bool use_global_stats, bool trainable_statistics, bool fuse_with_relu, DenseTensor* y, DenseTensor* mean_out, DenseTensor* variance_out, DenseTensor* saved_mean, DenseTensor* saved_variance, DenseTensor* reserve_space) { bool test_mode = is_test && (!trainable_statistics); bool global_stats = test_mode || use_global_stats; auto data_layout = paddle::framework::StringToDataLayout(data_layout_str); const auto& x_dims = x.dims(); PADDLE_ENFORCE_GE( x_dims.size(), 2, phi::errors::InvalidArgument( "The size of input X's dimensions should be larger than 1." "But received: the size of input X's dimensions is [%d]", x_dims.size())); PADDLE_ENFORCE_LE( x_dims.size(), 5, phi::errors::InvalidArgument( "The size of input X's dimensions should be less than 6." "But received: the size of input X's dimensionss is [%d]", x_dims.size())); const int N = x_dims[0]; const int C = (data_layout == DataLayout::kNCHW ? x_dims[1] : x_dims[x_dims.size() - 1]); const int sample_size = x.numel() / N / C; // alloc memory ctx.template Alloc<T>(y); ctx.template Alloc<T>(mean_out); ctx.template Alloc<T>(variance_out); ctx.template Alloc<T>(saved_mean); ctx.template Alloc<T>(saved_variance); // input dimension is 2 and the format is NCHW. The input can be regarded // as NHWC format if (x_dims.size() == 2 && data_layout == DataLayout::kNCHW) { data_layout = DataLayout::kNHWC; } if (!global_stats) { // saved_xx is use just in this batch of data EigenVectorArrayMap<T> saved_mean_e(ctx.template Alloc<T>(saved_mean), C); EigenVectorArrayMap<T> saved_variance_e( ctx.template Alloc<T>(saved_variance), C); saved_mean_e.setZero(); saved_variance_e.setZero(); EigenVectorArrayMap<T> running_mean_arr(ctx.template Alloc<T>(mean_out), C); EigenVectorArrayMap<T> running_var_arr(ctx.template Alloc<T>(variance_out), C); if ((N * sample_size) == 1) { // Only 1 element in normalization dimension, // we skip the batch norm calculation, let y = x. paddle::framework::TensorCopy(x, ctx.GetPlace(), y); return; } switch (data_layout) { case DataLayout::kNCHW: { ConstEigenArrayMap<T> x_arr(x.data<T>(), sample_size, N * C); for (int nc = 0; nc < N * C; ++nc) { saved_mean_e(nc % C) += x_arr.col(nc).sum(); } saved_mean_e /= N * sample_size; for (int nc = 0; nc < N * C; ++nc) { saved_variance_e(nc % C) += (x_arr.col(nc) - saved_mean_e(nc % C)).matrix().squaredNorm(); } saved_variance_e /= N * sample_size; break; } case DataLayout::kNHWC: { ConstEigenArrayMap<T> x_arr(x.data<T>(), C, N * sample_size); for (int i = 0; i < N * sample_size; ++i) { saved_mean_e += x_arr.col(i); } saved_mean_e /= N * sample_size; for (int i = 0; i < N * sample_size; ++i) { saved_variance_e += (x_arr.col(i) - saved_mean_e) * (x_arr.col(i) - saved_mean_e); } saved_variance_e /= N * sample_size; break; } default: PADDLE_THROW(phi::errors::InvalidArgument("Unknown storage order: %s", data_layout_str)); } // if MomentumTensor is set, use MomentumTensor value, momentum // is only used in this training branch running_mean_arr = running_mean_arr * momentum + saved_mean_e * (1. - momentum); running_var_arr = running_var_arr * momentum + saved_variance_e * (1. - momentum); } // use SavedMean and SavedVariance to do normalize Eigen::Array<T, Eigen::Dynamic, 1> inv_std(C); if (global_stats) { ConstEigenVectorArrayMap<T> var_arr(variance.data<T>(), C); inv_std = (var_arr + epsilon).sqrt().inverse(); } else { EigenVectorArrayMap<T> saved_inv_std(saved_variance->data<T>(), C); // inverse SavedVariance first, gradient will use it too. saved_inv_std = (saved_inv_std + epsilon).inverse().sqrt(); inv_std = saved_inv_std; } ConstEigenVectorArrayMap<T> mean_arr( global_stats ? mean.data<T>() : saved_mean->data<T>(), C); // ((x - est_mean) * (inv_var) * scale + bias // formula transform ====> // (x * inv_var * scale) + (bias - est_mean * inv_var * scale) ConstEigenVectorArrayMap<T> scale_arr(scale.data<T>(), C); ConstEigenVectorArrayMap<T> bias_arr(bias.data<T>(), C); Eigen::Array<T, Eigen::Dynamic, 1> new_scale = inv_std * scale_arr; Eigen::Array<T, Eigen::Dynamic, 1> new_bias = bias_arr - mean_arr * inv_std * scale_arr; switch (data_layout) { case DataLayout::kNCHW: { EigenArrayMap<T> y_arr(ctx.template Alloc<T>(y), sample_size, N * C); ConstEigenArrayMap<T> x_arr(x.data<T>(), sample_size, N * C); for (int nc = 0; nc < N * C; ++nc) { y_arr.col(nc) = x_arr.col(nc) * new_scale(nc % C) + new_bias(nc % C); } break; } case DataLayout::kNHWC: { EigenArrayMap<T>(ctx.template Alloc<T>(y), C, N * sample_size) = (ConstEigenArrayMap<T>(x.data<T>(), C, N * sample_size).colwise() * new_scale) .colwise() + new_bias; break; } default: PADDLE_THROW(phi::errors::InvalidArgument("Unknown storage order: %d", data_layout)); } } } // namespace phi PD_REGISTER_KERNEL( batch_norm, CPU, ALL_LAYOUT, phi::BatchNormKernel, float, double) {}
PaddlePaddle/Paddle
paddle/phi/kernels/cpu/batch_norm_kernel.cc
C++
apache-2.0
7,683
/* * Copyright 2015 - FOCONIS AG * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express o * implied. See the License for the specific language governing * permissions and limitations under the License. * */ package org.openntf.domino.commons; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.ObjectOutputStream; import java.io.Serializable; import java.math.BigInteger; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; /** * Class to compute several hashes. (mainly MD5, maybe extended to SHA-1 and others) * * @author Roland Praml, FOCONIS AG * */ public enum Hash { ; /** * Checksum (various variants). The algorithm to be used has to be passed as parameter. */ public static String checksum(final byte[] whose, final String algorithmName) throws NoSuchAlgorithmException { MessageDigest algorithm = MessageDigest.getInstance(algorithmName); algorithm.reset(); algorithm.update(whose); return finishChecksum(algorithm); } private static String finishChecksum(final MessageDigest algorithm) { BigInteger bi = new BigInteger(1, algorithm.digest()); return bi.toString(16); } public static String checksum(final String whose, final String alg) throws NoSuchAlgorithmException { return checksum(whose.getBytes(Strings.UTF_8_CHARSET), alg); } public static String checksum(final File whose, final String alg) throws IOException, NoSuchAlgorithmException { byte[] buffer = new byte[32768]; MessageDigest algorithm = MessageDigest.getInstance(alg); algorithm.reset(); FileInputStream fis = new FileInputStream(whose); int nread; try { while ((nread = fis.read(buffer)) > 0) algorithm.update(buffer, 0, nread); return finishChecksum(algorithm); } finally { fis.close(); } } public static String checksum(final Serializable whose, final String algorithm) throws NoSuchAlgorithmException { String result = null; ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { ObjectOutputStream out = new ObjectOutputStream(baos); out.writeObject(whose); result = checksum(baos.toByteArray(), algorithm); out.close(); } catch (IOException ioex) { throw new RuntimeException("Unexpected IOException", ioex); } return result; } /** * Same variants for MD5. */ public static String md5(final byte[] whose) { try { return checksum(whose, "MD5"); } catch (NoSuchAlgorithmException e) { throw new IllegalStateException("No MD5 algorithm present, why that?"); } } public static String md5(final String whose) { try { return checksum(whose, "MD5"); } catch (NoSuchAlgorithmException e) { throw new IllegalStateException("No MD5 algorithm present, why that?"); } } public static String md5(final File whose) throws IOException { try { return checksum(whose, "MD5"); } catch (NoSuchAlgorithmException e) { throw new IllegalStateException("No MD5 algorithm present, why that?"); } } public static String md5(final Serializable whose) { try { return checksum(whose, "MD5"); } catch (NoSuchAlgorithmException e) { throw new IllegalStateException("No MD5 algorithm present, why that?"); } } }
rPraml/org.openntf.domino
domino/commons/src/main/java/org/openntf/domino/commons/Hash.java
Java
apache-2.0
3,653
<?php /** * INJI * * @author Alexey Krupskiy <admin@inji.ru> * @link http://inji.ru/ * @copyright 2017 Alexey Krupskiy * @license https://github.com/injitools/Inji/blob/master/LICENSE */ class Search extends Module { }
injitools/cms-Inji
system/modules/Search/Search.php
PHP
apache-2.0
227
/* * Minio Go Library for Amazon S3 Compatible Cloud Storage (C) 2015, 2016 Minio, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package minio import ( "bytes" "crypto/md5" "crypto/sha256" "encoding/hex" "encoding/xml" "fmt" "hash" "io" "io/ioutil" "net/http" "net/url" "os" "sort" "strconv" "strings" ) // Comprehensive put object operation involving multipart resumable uploads. // // Following code handles these types of readers. // // - *os.File // - *minio.Object // - Any reader which has a method 'ReadAt()' // // If we exhaust all the known types, code proceeds to use stream as // is where each part is re-downloaded, checksummed and verified // before upload. func (c Client) putObjectMultipart(bucketName, objectName string, reader io.Reader, size int64, contentType string, progress io.Reader) (n int64, err error) { if size > 0 && size > minPartSize { // Verify if reader is *os.File, then use file system functionalities. if isFile(reader) { return c.putObjectMultipartFromFile(bucketName, objectName, reader.(*os.File), size, contentType, progress) } // Verify if reader is *minio.Object or io.ReaderAt. // NOTE: Verification of object is kept for a specific purpose // while it is going to be duck typed similar to io.ReaderAt. // It is to indicate that *minio.Object implements io.ReaderAt. // and such a functionality is used in the subsequent code // path. if isObject(reader) || isReadAt(reader) { return c.putObjectMultipartFromReadAt(bucketName, objectName, reader.(io.ReaderAt), size, contentType, progress) } } // For any other data size and reader type we do generic multipart // approach by staging data in temporary files and uploading them. return c.putObjectMultipartStream(bucketName, objectName, reader, size, contentType, progress) } // putObjectStream uploads files bigger than 5MiB, and also supports // special case where size is unknown i.e '-1'. func (c Client) putObjectMultipartStream(bucketName, objectName string, reader io.Reader, size int64, contentType string, progress io.Reader) (n int64, err error) { // Input validation. if err := isValidBucketName(bucketName); err != nil { return 0, err } if err := isValidObjectName(objectName); err != nil { return 0, err } // Total data read and written to server. should be equal to 'size' at the end of the call. var totalUploadedSize int64 // Complete multipart upload. var complMultipartUpload completeMultipartUpload // A map of all previously uploaded parts. var partsInfo = make(map[int]objectPart) // getUploadID for an object, initiates a new multipart request // if it cannot find any previously partially uploaded object. uploadID, isNew, err := c.getUploadID(bucketName, objectName, contentType) if err != nil { return 0, err } // If This session is a continuation of a previous session fetch all // previously uploaded parts info and as a special case only fetch partsInfo // for only known upload size. if !isNew { // Fetch previously uploaded parts and maximum part size. partsInfo, err = c.listObjectParts(bucketName, objectName, uploadID) if err != nil { return 0, err } } // Calculate the optimal parts info for a given size. totalPartsCount, partSize, _, err := optimalPartInfo(size) if err != nil { return 0, err } // Part number always starts with '1'. partNumber := 1 // Initialize a temporary buffer. tmpBuffer := new(bytes.Buffer) for partNumber <= totalPartsCount { // Choose hash algorithms to be calculated by hashCopyN, avoid sha256 // with non-v4 signature request or HTTPS connection hashSums := make(map[string][]byte) hashAlgos := make(map[string]hash.Hash) hashAlgos["md5"] = md5.New() if c.signature.isV4() && !c.secure { hashAlgos["sha256"] = sha256.New() } // Calculates hash sums while copying partSize bytes into tmpBuffer. prtSize, rErr := hashCopyN(hashAlgos, hashSums, tmpBuffer, reader, partSize) if rErr != nil { if rErr != io.EOF { return 0, rErr } } var reader io.Reader // Update progress reader appropriately to the latest offset // as we read from the source. reader = newHook(tmpBuffer, progress) part, ok := partsInfo[partNumber] // Verify if part should be uploaded. if !ok || shouldUploadPart(objectPart{ ETag: hex.EncodeToString(hashSums["md5"]), PartNumber: partNumber, Size: prtSize, }, uploadPartReq{PartNum: partNumber, Part: &part}) { // Proceed to upload the part. var objPart objectPart objPart, err = c.uploadPart(bucketName, objectName, uploadID, reader, partNumber, hashSums["md5"], hashSums["sha256"], prtSize) if err != nil { // Reset the temporary buffer upon any error. tmpBuffer.Reset() return totalUploadedSize, err } // Save successfully uploaded part metadata. partsInfo[partNumber] = objPart } else { // Update the progress reader for the skipped part. if progress != nil { if _, err = io.CopyN(ioutil.Discard, progress, prtSize); err != nil { return totalUploadedSize, err } } } // Reset the temporary buffer. tmpBuffer.Reset() // Save successfully uploaded size. totalUploadedSize += prtSize // Increment part number. partNumber++ // For unknown size, Read EOF we break away. // We do not have to upload till totalPartsCount. if size < 0 && rErr == io.EOF { break } } // Verify if we uploaded all the data. if size > 0 { if totalUploadedSize != size { return totalUploadedSize, ErrUnexpectedEOF(totalUploadedSize, size, bucketName, objectName) } } // Loop over total uploaded parts to save them in // Parts array before completing the multipart request. for i := 1; i < partNumber; i++ { part, ok := partsInfo[i] if !ok { return 0, ErrInvalidArgument(fmt.Sprintf("Missing part number %d", i)) } complMultipartUpload.Parts = append(complMultipartUpload.Parts, completePart{ ETag: part.ETag, PartNumber: part.PartNumber, }) } // Sort all completed parts. sort.Sort(completedParts(complMultipartUpload.Parts)) _, err = c.completeMultipartUpload(bucketName, objectName, uploadID, complMultipartUpload) if err != nil { return totalUploadedSize, err } // Return final size. return totalUploadedSize, nil } // initiateMultipartUpload - Initiates a multipart upload and returns an upload ID. func (c Client) initiateMultipartUpload(bucketName, objectName, contentType string) (initiateMultipartUploadResult, error) { // Input validation. if err := isValidBucketName(bucketName); err != nil { return initiateMultipartUploadResult{}, err } if err := isValidObjectName(objectName); err != nil { return initiateMultipartUploadResult{}, err } // Initialize url queries. urlValues := make(url.Values) urlValues.Set("uploads", "") if contentType == "" { contentType = "application/octet-stream" } // Set ContentType header. customHeader := make(http.Header) customHeader.Set("Content-Type", contentType) reqMetadata := requestMetadata{ bucketName: bucketName, objectName: objectName, queryValues: urlValues, customHeader: customHeader, } // Execute POST on an objectName to initiate multipart upload. resp, err := c.executeMethod("POST", reqMetadata) defer closeResponse(resp) if err != nil { return initiateMultipartUploadResult{}, err } if resp != nil { if resp.StatusCode != http.StatusOK { return initiateMultipartUploadResult{}, httpRespToErrorResponse(resp, bucketName, objectName) } } // Decode xml for new multipart upload. initiateMultipartUploadResult := initiateMultipartUploadResult{} err = xmlDecoder(resp.Body, &initiateMultipartUploadResult) if err != nil { return initiateMultipartUploadResult, err } return initiateMultipartUploadResult, nil } // uploadPart - Uploads a part in a multipart upload. func (c Client) uploadPart(bucketName, objectName, uploadID string, reader io.Reader, partNumber int, md5Sum, sha256Sum []byte, size int64) (objectPart, error) { // Input validation. if err := isValidBucketName(bucketName); err != nil { return objectPart{}, err } if err := isValidObjectName(objectName); err != nil { return objectPart{}, err } if size > maxPartSize { return objectPart{}, ErrEntityTooLarge(size, maxPartSize, bucketName, objectName) } if size <= -1 { return objectPart{}, ErrEntityTooSmall(size, bucketName, objectName) } if partNumber <= 0 { return objectPart{}, ErrInvalidArgument("Part number cannot be negative or equal to zero.") } if uploadID == "" { return objectPart{}, ErrInvalidArgument("UploadID cannot be empty.") } // Get resources properly escaped and lined up before using them in http request. urlValues := make(url.Values) // Set part number. urlValues.Set("partNumber", strconv.Itoa(partNumber)) // Set upload id. urlValues.Set("uploadId", uploadID) reqMetadata := requestMetadata{ bucketName: bucketName, objectName: objectName, queryValues: urlValues, contentBody: reader, contentLength: size, contentMD5Bytes: md5Sum, contentSHA256Bytes: sha256Sum, } // Execute PUT on each part. resp, err := c.executeMethod("PUT", reqMetadata) defer closeResponse(resp) if err != nil { return objectPart{}, err } if resp != nil { if resp.StatusCode != http.StatusOK { return objectPart{}, httpRespToErrorResponse(resp, bucketName, objectName) } } // Once successfully uploaded, return completed part. objPart := objectPart{} objPart.Size = size objPart.PartNumber = partNumber // Trim off the odd double quotes from ETag in the beginning and end. objPart.ETag = strings.TrimPrefix(resp.Header.Get("ETag"), "\"") objPart.ETag = strings.TrimSuffix(objPart.ETag, "\"") return objPart, nil } // completeMultipartUpload - Completes a multipart upload by assembling previously uploaded parts. func (c Client) completeMultipartUpload(bucketName, objectName, uploadID string, complete completeMultipartUpload) (completeMultipartUploadResult, error) { // Input validation. if err := isValidBucketName(bucketName); err != nil { return completeMultipartUploadResult{}, err } if err := isValidObjectName(objectName); err != nil { return completeMultipartUploadResult{}, err } // Initialize url queries. urlValues := make(url.Values) urlValues.Set("uploadId", uploadID) // Marshal complete multipart body. completeMultipartUploadBytes, err := xml.Marshal(complete) if err != nil { return completeMultipartUploadResult{}, err } // Instantiate all the complete multipart buffer. completeMultipartUploadBuffer := bytes.NewReader(completeMultipartUploadBytes) reqMetadata := requestMetadata{ bucketName: bucketName, objectName: objectName, queryValues: urlValues, contentBody: completeMultipartUploadBuffer, contentLength: int64(len(completeMultipartUploadBytes)), contentSHA256Bytes: sum256(completeMultipartUploadBytes), } // Execute POST to complete multipart upload for an objectName. resp, err := c.executeMethod("POST", reqMetadata) defer closeResponse(resp) if err != nil { return completeMultipartUploadResult{}, err } if resp != nil { if resp.StatusCode != http.StatusOK { return completeMultipartUploadResult{}, httpRespToErrorResponse(resp, bucketName, objectName) } } // Read resp.Body into a []bytes to parse for Error response inside the body var b []byte b, err = ioutil.ReadAll(resp.Body) if err != nil { return completeMultipartUploadResult{}, err } // Decode completed multipart upload response on success. completeMultipartUploadResult := completeMultipartUploadResult{} err = xmlDecoder(bytes.NewReader(b), &completeMultipartUploadResult) if err != nil { // xml parsing failure due to presence an ill-formed xml fragment return completeMultipartUploadResult, err } else if completeMultipartUploadResult.Bucket == "" { // xml's Decode method ignores well-formed xml that don't apply to the type of value supplied. // In this case, it would leave completeMultipartUploadResult with the corresponding zero-values // of the members. // Decode completed multipart upload response on failure completeMultipartUploadErr := ErrorResponse{} err = xmlDecoder(bytes.NewReader(b), &completeMultipartUploadErr) if err != nil { // xml parsing failure due to presence an ill-formed xml fragment return completeMultipartUploadResult, err } return completeMultipartUploadResult, completeMultipartUploadErr } return completeMultipartUploadResult, nil }
mgit-at/arti
vendor/github.com/minio/minio-go/api-put-object-multipart.go
GO
apache-2.0
13,084
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.inspector.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.inspector.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * DurationRange JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DurationRangeJsonUnmarshaller implements Unmarshaller<DurationRange, JsonUnmarshallerContext> { public DurationRange unmarshall(JsonUnmarshallerContext context) throws Exception { DurationRange durationRange = new DurationRange(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return null; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("minSeconds", targetDepth)) { context.nextToken(); durationRange.setMinSeconds(context.getUnmarshaller(Integer.class).unmarshall(context)); } if (context.testExpression("maxSeconds", targetDepth)) { context.nextToken(); durationRange.setMaxSeconds(context.getUnmarshaller(Integer.class).unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return durationRange; } private static DurationRangeJsonUnmarshaller instance; public static DurationRangeJsonUnmarshaller getInstance() { if (instance == null) instance = new DurationRangeJsonUnmarshaller(); return instance; } }
jentfoo/aws-sdk-java
aws-java-sdk-inspector/src/main/java/com/amazonaws/services/inspector/model/transform/DurationRangeJsonUnmarshaller.java
Java
apache-2.0
2,962
/** * <copyright> * </copyright> * * $Id$ */ package kieker.tools.slastic.metamodel.componentAssembly; import kieker.tools.slastic.metamodel.core.FQNamedEntity; import kieker.tools.slastic.metamodel.typeRepository.ConnectorType; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>Assembly Connector</b></em>'. * <!-- end-user-doc --> * * <p> * The following features are supported: * <ul> * <li>{@link kieker.tools.slastic.metamodel.componentAssembly.AssemblyConnector#getConnectorType <em>Connector Type</em>}</li> * </ul> * </p> * * @see kieker.tools.slastic.metamodel.componentAssembly.ComponentAssemblyPackage#getAssemblyConnector() * @model abstract="true" * @generated */ public interface AssemblyConnector extends FQNamedEntity { /** * Returns the value of the '<em><b>Connector Type</b></em>' reference. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Connector Type</em>' reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Connector Type</em>' reference. * @see #setConnectorType(ConnectorType) * @see kieker.tools.slastic.metamodel.componentAssembly.ComponentAssemblyPackage#getAssemblyConnector_ConnectorType() * @model required="true" ordered="false" * @generated */ ConnectorType getConnectorType(); /** * Sets the value of the '{@link kieker.tools.slastic.metamodel.componentAssembly.AssemblyConnector#getConnectorType <em>Connector Type</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Connector Type</em>' reference. * @see #getConnectorType() * @generated */ void setConnectorType(ConnectorType value); } // AssemblyConnector
SLAsticSPE/slastic
src-gen/kieker/tools/slastic/metamodel/componentAssembly/AssemblyConnector.java
Java
apache-2.0
1,803
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.features.project.intellij; import com.facebook.buck.core.exceptions.HumanReadableException; import com.facebook.buck.core.util.log.Logger; import com.facebook.buck.features.project.intellij.model.ModuleIndexEntry; import com.facebook.buck.util.xml.XmlDomParser; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet.Builder; import java.io.IOException; import java.io.InputStream; import java.nio.file.Paths; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; /** Responsible for parsing an existing modules.xml file */ public class IntellijModulesListParser { private static Logger LOG = Logger.get(IntellijModulesListParser.class); /** * @param modulesFile modules.xml input stream * @return A list of module entries as specified by the modules.xml file * @throws IOException */ public ImmutableSet<ModuleIndexEntry> getAllModules(InputStream modulesFile) throws IOException { final Document doc; try { doc = XmlDomParser.parse(modulesFile); } catch (SAXException e) { LOG.error("Cannot read modules.xml file", e); throw new HumanReadableException( "Could not update 'modules.xml' file because it is malformed", e); } final Builder<ModuleIndexEntry> builder = ImmutableSet.builder(); try { XPath xpath = XPathFactory.newInstance().newXPath(); final NodeList moduleList = (NodeList) xpath .compile("/project/component/modules/module") .evaluate(doc, XPathConstants.NODESET); for (int i = 0; i < moduleList.getLength(); i++) { final Element moduleEntry = (Element) moduleList.item(i); if (!moduleEntry.hasAttribute("filepath")) { continue; } String filepath = moduleEntry.getAttribute("filepath"); String fileurl = moduleEntry.getAttribute("fileurl"); String filepathWithoutProjectPrefix; // The template has a hardcoded $PROJECT_DIR$/ prefix, so we need to strip that out // of the value we pass to ST if (filepath.startsWith("$PROJECT_DIR$")) { filepathWithoutProjectPrefix = filepath.substring("$PROJECT_DIR$".length() + 1); } else { filepathWithoutProjectPrefix = filepath; } builder.add( ModuleIndexEntry.builder() .setFilePath(Paths.get(filepathWithoutProjectPrefix)) .setFileUrl(fileurl) .setGroup( moduleEntry.hasAttribute("group") ? moduleEntry.getAttribute("group") : null) .build()); } } catch (XPathExpressionException e) { throw new HumanReadableException("Illegal xpath expression.", e); } return builder.build(); } }
zpao/buck
src/com/facebook/buck/features/project/intellij/IntellijModulesListParser.java
Java
apache-2.0
3,637
//----------------------------------------------------------------------------- // // (C) Brandon Valosek, 2011 <bvalosek@gmail.com> // //----------------------------------------------------------------------------- // Modified by Willi Ye to work with big.LITTLE package com.bvalosek.cpuspy; import android.app.Application; import android.content.Context; import com.swapnil133609.zeuscontrols.utils.Utils; import java.util.HashMap; import java.util.Map; /** * main application class */ public class CpuSpyApp extends Application { private final String PREF_OFFSETS; /** * the long-living object used to monitor the system frequency states */ private final CpuStateMonitor _monitor; public CpuSpyApp(int core) { PREF_OFFSETS = "offsets" + core; _monitor = new CpuStateMonitor(core); } /** * On application start, load the saved offsets and stash the current kernel * version string */ @Override public void onCreate() { super.onCreate(); loadOffsets(getApplicationContext()); } /** * @return the internal CpuStateMonitor object */ public CpuStateMonitor getCpuStateMonitor() { return _monitor; } /** * Load the saved string of offsets from preferences and put it into the * state monitor */ public void loadOffsets(Context context) { String prefs = Utils.getString(PREF_OFFSETS, "", context); if (prefs.length() < 1) return; // split the string by peroids and then the info by commas and load Map<Integer, Long> offsets = new HashMap<>(); String[] sOffsets = prefs.split(","); for (String offset : sOffsets) { String[] parts = offset.split(" "); offsets.put(Utils.stringToInt(parts[0]), Utils.stringToLong(parts[1])); } _monitor.setOffsets(offsets); } /** * Save the state-time offsets as a string e.g. "100 24, 200 251, 500 124 * etc */ public void saveOffsets(Context context) { // build the string by iterating over the freq->duration map String str = ""; for (Map.Entry<Integer, Long> entry : _monitor.getOffsets().entrySet()) str += entry.getKey() + " " + entry.getValue() + ","; Utils.saveString(PREF_OFFSETS, str, context); } }
Swapnil133609/Zeus-Controls
app/src/main/java/com/bvalosek/cpuspy/CpuSpyApp.java
Java
apache-2.0
2,362
Polymer('x-foo')
jarrodek/feed-reader
paper/lib_new/polymer/workbench/styling/host.html.0.js
JavaScript
apache-2.0
16
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.kendra.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Defines the mapping between a field in the Confluence data source to an Amazon Kendra index field. * </p> * <p> * You must first create the index field using the <code>UpdateIndex</code> API. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kendra-2019-02-03/ConfluenceSpaceToIndexFieldMapping" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ConfluenceSpaceToIndexFieldMapping implements Serializable, Cloneable, StructuredPojo { /** * <p> * The name of the field in the data source. * </p> */ private String dataSourceFieldName; /** * <p> * The format for date fields in the data source. If the field specified in <code>DataSourceFieldName</code> is a * date field you must specify the date format. If the field is not a date field, an exception is thrown. * </p> */ private String dateFieldFormat; /** * <p> * The name of the index field to map to the Confluence data source field. The index field type must match the * Confluence field type. * </p> */ private String indexFieldName; /** * <p> * The name of the field in the data source. * </p> * * @param dataSourceFieldName * The name of the field in the data source. * @see ConfluenceSpaceFieldName */ public void setDataSourceFieldName(String dataSourceFieldName) { this.dataSourceFieldName = dataSourceFieldName; } /** * <p> * The name of the field in the data source. * </p> * * @return The name of the field in the data source. * @see ConfluenceSpaceFieldName */ public String getDataSourceFieldName() { return this.dataSourceFieldName; } /** * <p> * The name of the field in the data source. * </p> * * @param dataSourceFieldName * The name of the field in the data source. * @return Returns a reference to this object so that method calls can be chained together. * @see ConfluenceSpaceFieldName */ public ConfluenceSpaceToIndexFieldMapping withDataSourceFieldName(String dataSourceFieldName) { setDataSourceFieldName(dataSourceFieldName); return this; } /** * <p> * The name of the field in the data source. * </p> * * @param dataSourceFieldName * The name of the field in the data source. * @return Returns a reference to this object so that method calls can be chained together. * @see ConfluenceSpaceFieldName */ public ConfluenceSpaceToIndexFieldMapping withDataSourceFieldName(ConfluenceSpaceFieldName dataSourceFieldName) { this.dataSourceFieldName = dataSourceFieldName.toString(); return this; } /** * <p> * The format for date fields in the data source. If the field specified in <code>DataSourceFieldName</code> is a * date field you must specify the date format. If the field is not a date field, an exception is thrown. * </p> * * @param dateFieldFormat * The format for date fields in the data source. If the field specified in <code>DataSourceFieldName</code> * is a date field you must specify the date format. If the field is not a date field, an exception is * thrown. */ public void setDateFieldFormat(String dateFieldFormat) { this.dateFieldFormat = dateFieldFormat; } /** * <p> * The format for date fields in the data source. If the field specified in <code>DataSourceFieldName</code> is a * date field you must specify the date format. If the field is not a date field, an exception is thrown. * </p> * * @return The format for date fields in the data source. If the field specified in <code>DataSourceFieldName</code> * is a date field you must specify the date format. If the field is not a date field, an exception is * thrown. */ public String getDateFieldFormat() { return this.dateFieldFormat; } /** * <p> * The format for date fields in the data source. If the field specified in <code>DataSourceFieldName</code> is a * date field you must specify the date format. If the field is not a date field, an exception is thrown. * </p> * * @param dateFieldFormat * The format for date fields in the data source. If the field specified in <code>DataSourceFieldName</code> * is a date field you must specify the date format. If the field is not a date field, an exception is * thrown. * @return Returns a reference to this object so that method calls can be chained together. */ public ConfluenceSpaceToIndexFieldMapping withDateFieldFormat(String dateFieldFormat) { setDateFieldFormat(dateFieldFormat); return this; } /** * <p> * The name of the index field to map to the Confluence data source field. The index field type must match the * Confluence field type. * </p> * * @param indexFieldName * The name of the index field to map to the Confluence data source field. The index field type must match * the Confluence field type. */ public void setIndexFieldName(String indexFieldName) { this.indexFieldName = indexFieldName; } /** * <p> * The name of the index field to map to the Confluence data source field. The index field type must match the * Confluence field type. * </p> * * @return The name of the index field to map to the Confluence data source field. The index field type must match * the Confluence field type. */ public String getIndexFieldName() { return this.indexFieldName; } /** * <p> * The name of the index field to map to the Confluence data source field. The index field type must match the * Confluence field type. * </p> * * @param indexFieldName * The name of the index field to map to the Confluence data source field. The index field type must match * the Confluence field type. * @return Returns a reference to this object so that method calls can be chained together. */ public ConfluenceSpaceToIndexFieldMapping withIndexFieldName(String indexFieldName) { setIndexFieldName(indexFieldName); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDataSourceFieldName() != null) sb.append("DataSourceFieldName: ").append(getDataSourceFieldName()).append(","); if (getDateFieldFormat() != null) sb.append("DateFieldFormat: ").append(getDateFieldFormat()).append(","); if (getIndexFieldName() != null) sb.append("IndexFieldName: ").append(getIndexFieldName()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ConfluenceSpaceToIndexFieldMapping == false) return false; ConfluenceSpaceToIndexFieldMapping other = (ConfluenceSpaceToIndexFieldMapping) obj; if (other.getDataSourceFieldName() == null ^ this.getDataSourceFieldName() == null) return false; if (other.getDataSourceFieldName() != null && other.getDataSourceFieldName().equals(this.getDataSourceFieldName()) == false) return false; if (other.getDateFieldFormat() == null ^ this.getDateFieldFormat() == null) return false; if (other.getDateFieldFormat() != null && other.getDateFieldFormat().equals(this.getDateFieldFormat()) == false) return false; if (other.getIndexFieldName() == null ^ this.getIndexFieldName() == null) return false; if (other.getIndexFieldName() != null && other.getIndexFieldName().equals(this.getIndexFieldName()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDataSourceFieldName() == null) ? 0 : getDataSourceFieldName().hashCode()); hashCode = prime * hashCode + ((getDateFieldFormat() == null) ? 0 : getDateFieldFormat().hashCode()); hashCode = prime * hashCode + ((getIndexFieldName() == null) ? 0 : getIndexFieldName().hashCode()); return hashCode; } @Override public ConfluenceSpaceToIndexFieldMapping clone() { try { return (ConfluenceSpaceToIndexFieldMapping) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.kendra.model.transform.ConfluenceSpaceToIndexFieldMappingMarshaller.getInstance().marshall(this, protocolMarshaller); } }
aws/aws-sdk-java
aws-java-sdk-kendra/src/main/java/com/amazonaws/services/kendra/model/ConfluenceSpaceToIndexFieldMapping.java
Java
apache-2.0
10,473
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.shield.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * A summary of information about the attack. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/shield-2016-06-02/SummarizedAttackVector" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class SummarizedAttackVector implements Serializable, Cloneable, StructuredPojo { /** * <p> * The attack type, for example, SNMP reflection or SYN flood. * </p> */ private String vectorType; /** * <p> * The list of counters that describe the details of the attack. * </p> */ private java.util.List<SummarizedCounter> vectorCounters; /** * <p> * The attack type, for example, SNMP reflection or SYN flood. * </p> * * @param vectorType * The attack type, for example, SNMP reflection or SYN flood. */ public void setVectorType(String vectorType) { this.vectorType = vectorType; } /** * <p> * The attack type, for example, SNMP reflection or SYN flood. * </p> * * @return The attack type, for example, SNMP reflection or SYN flood. */ public String getVectorType() { return this.vectorType; } /** * <p> * The attack type, for example, SNMP reflection or SYN flood. * </p> * * @param vectorType * The attack type, for example, SNMP reflection or SYN flood. * @return Returns a reference to this object so that method calls can be chained together. */ public SummarizedAttackVector withVectorType(String vectorType) { setVectorType(vectorType); return this; } /** * <p> * The list of counters that describe the details of the attack. * </p> * * @return The list of counters that describe the details of the attack. */ public java.util.List<SummarizedCounter> getVectorCounters() { return vectorCounters; } /** * <p> * The list of counters that describe the details of the attack. * </p> * * @param vectorCounters * The list of counters that describe the details of the attack. */ public void setVectorCounters(java.util.Collection<SummarizedCounter> vectorCounters) { if (vectorCounters == null) { this.vectorCounters = null; return; } this.vectorCounters = new java.util.ArrayList<SummarizedCounter>(vectorCounters); } /** * <p> * The list of counters that describe the details of the attack. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setVectorCounters(java.util.Collection)} or {@link #withVectorCounters(java.util.Collection)} if you want * to override the existing values. * </p> * * @param vectorCounters * The list of counters that describe the details of the attack. * @return Returns a reference to this object so that method calls can be chained together. */ public SummarizedAttackVector withVectorCounters(SummarizedCounter... vectorCounters) { if (this.vectorCounters == null) { setVectorCounters(new java.util.ArrayList<SummarizedCounter>(vectorCounters.length)); } for (SummarizedCounter ele : vectorCounters) { this.vectorCounters.add(ele); } return this; } /** * <p> * The list of counters that describe the details of the attack. * </p> * * @param vectorCounters * The list of counters that describe the details of the attack. * @return Returns a reference to this object so that method calls can be chained together. */ public SummarizedAttackVector withVectorCounters(java.util.Collection<SummarizedCounter> vectorCounters) { setVectorCounters(vectorCounters); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getVectorType() != null) sb.append("VectorType: ").append(getVectorType()).append(","); if (getVectorCounters() != null) sb.append("VectorCounters: ").append(getVectorCounters()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof SummarizedAttackVector == false) return false; SummarizedAttackVector other = (SummarizedAttackVector) obj; if (other.getVectorType() == null ^ this.getVectorType() == null) return false; if (other.getVectorType() != null && other.getVectorType().equals(this.getVectorType()) == false) return false; if (other.getVectorCounters() == null ^ this.getVectorCounters() == null) return false; if (other.getVectorCounters() != null && other.getVectorCounters().equals(this.getVectorCounters()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getVectorType() == null) ? 0 : getVectorType().hashCode()); hashCode = prime * hashCode + ((getVectorCounters() == null) ? 0 : getVectorCounters().hashCode()); return hashCode; } @Override public SummarizedAttackVector clone() { try { return (SummarizedAttackVector) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.shield.model.transform.SummarizedAttackVectorMarshaller.getInstance().marshall(this, protocolMarshaller); } }
aws/aws-sdk-java
aws-java-sdk-shield/src/main/java/com/amazonaws/services/shield/model/SummarizedAttackVector.java
Java
apache-2.0
7,238
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.rest; import java.util.Map; import java.util.Set; import org.apache.camel.Component; import org.apache.camel.Consumer; import org.apache.camel.ExchangePattern; import org.apache.camel.NoFactoryAvailableException; import org.apache.camel.NoSuchBeanException; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.spi.FactoryFinder; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.RestConfiguration; import org.apache.camel.spi.RestConsumerFactory; import org.apache.camel.spi.RestProducerFactory; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriPath; import org.apache.camel.support.DefaultEndpoint; import org.apache.camel.util.HostUtils; import org.apache.camel.util.ObjectHelper; import static org.apache.camel.support.RestProducerFactoryHelper.setupComponent; /** * The rest component is used for either hosting REST services (consumer) or calling external REST services (producer). */ @UriEndpoint(firstVersion = "2.14.0", scheme = "rest", title = "REST", syntax = "rest:method:path:uriTemplate", label = "core,rest", lenientProperties = true) public class RestEndpoint extends DefaultEndpoint { public static final String[] DEFAULT_REST_CONSUMER_COMPONENTS = new String[]{"coap", "netty-http", "netty4-http", "jetty", "restlet", "servlet", "spark-java", "undertow"}; public static final String[] DEFAULT_REST_PRODUCER_COMPONENTS = new String[]{"http", "http4", "netty4-http", "jetty", "restlet", "undertow"}; public static final String DEFAULT_API_COMPONENT_NAME = "swagger"; public static final String RESOURCE_PATH = "META-INF/services/org/apache/camel/rest/"; @UriPath(label = "common", enums = "get,post,put,delete,patch,head,trace,connect,options") @Metadata(required = true) private String method; @UriPath(label = "common") @Metadata(required = true) private String path; @UriPath(label = "common") private String uriTemplate; @UriParam(label = "common") private String consumes; @UriParam(label = "common") private String produces; @UriParam(label = "common") private String componentName; @UriParam(label = "common") private String inType; @UriParam(label = "common") private String outType; @UriParam(label = "common") private String routeId; @UriParam(label = "consumer") private String description; @UriParam(label = "producer") private String apiDoc; @UriParam(label = "producer") private String host; @UriParam(label = "producer", multiValue = true) private String queryParameters; @UriParam(label = "producer", enums = "auto,off,json,xml,json_xml") private RestConfiguration.RestBindingMode bindingMode; private Map<String, Object> parameters; public RestEndpoint(String endpointUri, RestComponent component) { super(endpointUri, component); setExchangePattern(ExchangePattern.InOut); } @Override public RestComponent getComponent() { return (RestComponent) super.getComponent(); } public String getMethod() { return method; } /** * HTTP method to use. */ public void setMethod(String method) { this.method = method; } public String getPath() { return path; } /** * The base path */ public void setPath(String path) { this.path = path; } public String getUriTemplate() { return uriTemplate; } /** * The uri template */ public void setUriTemplate(String uriTemplate) { this.uriTemplate = uriTemplate; } public String getConsumes() { return consumes; } /** * Media type such as: 'text/xml', or 'application/json' this REST service accepts. * By default we accept all kinds of types. */ public void setConsumes(String consumes) { this.consumes = consumes; } public String getProduces() { return produces; } /** * Media type such as: 'text/xml', or 'application/json' this REST service returns. */ public void setProduces(String produces) { this.produces = produces; } public String getComponentName() { return componentName; } /** * The Camel Rest component to use for the REST transport, such as restlet, spark-rest. * If no component has been explicit configured, then Camel will lookup if there is a Camel component * that integrates with the Rest DSL, or if a org.apache.camel.spi.RestConsumerFactory is registered in the registry. * If either one is found, then that is being used. */ public void setComponentName(String componentName) { this.componentName = componentName; } public String getInType() { return inType; } /** * To declare the incoming POJO binding type as a FQN class name */ public void setInType(String inType) { this.inType = inType; } public String getOutType() { return outType; } /** * To declare the outgoing POJO binding type as a FQN class name */ public void setOutType(String outType) { this.outType = outType; } public String getRouteId() { return routeId; } /** * Name of the route this REST services creates */ public void setRouteId(String routeId) { this.routeId = routeId; } public String getDescription() { return description; } /** * Human description to document this REST service */ public void setDescription(String description) { this.description = description; } public Map<String, Object> getParameters() { return parameters; } /** * Additional parameters to configure the consumer of the REST transport for this REST service */ public void setParameters(Map<String, Object> parameters) { this.parameters = parameters; } public String getApiDoc() { return apiDoc; } /** * The swagger api doc resource to use. * The resource is loaded from classpath by default and must be in JSon format. */ public void setApiDoc(String apiDoc) { this.apiDoc = apiDoc; } public String getHost() { return host; } /** * Host and port of HTTP service to use (override host in swagger schema) */ public void setHost(String host) { this.host = host; } public String getQueryParameters() { return queryParameters; } /** * Query parameters for the HTTP service to call */ public void setQueryParameters(String queryParameters) { this.queryParameters = queryParameters; } public RestConfiguration.RestBindingMode getBindingMode() { return bindingMode; } /** * Configures the binding mode for the producer. If set to anything * other than 'off' the producer will try to convert the body of * the incoming message from inType to the json or xml, and the * response from json or xml to outType. */ public void setBindingMode(RestConfiguration.RestBindingMode bindingMode) { this.bindingMode = bindingMode; } public void setBindingMode(String bindingMode) { this.bindingMode = RestConfiguration.RestBindingMode.valueOf(bindingMode.toLowerCase()); } @Override public Producer createProducer() throws Exception { if (ObjectHelper.isEmpty(host)) { // hostname must be provided throw new IllegalArgumentException("Hostname must be configured on either restConfiguration" + " or in the rest endpoint uri as a query parameter with name host, eg rest:" + method + ":" + path + "?host=someserver"); } RestProducerFactory apiDocFactory = null; RestProducerFactory factory = null; if (apiDoc != null) { log.debug("Discovering camel-swagger-java on classpath for using api-doc: {}", apiDoc); // lookup on classpath using factory finder to automatic find it (just add camel-swagger-java to classpath etc) try { FactoryFinder finder = getCamelContext().getFactoryFinder(RESOURCE_PATH); Object instance = finder.newInstance(DEFAULT_API_COMPONENT_NAME); if (instance instanceof RestProducerFactory) { // this factory from camel-swagger-java will facade the http component in use apiDocFactory = (RestProducerFactory) instance; } parameters.put("apiDoc", apiDoc); } catch (NoFactoryAvailableException e) { throw new IllegalStateException("Cannot find camel-swagger-java on classpath to use with api-doc: " + apiDoc); } } String cname = getComponentName(); if (cname != null) { Object comp = getCamelContext().getRegistry().lookupByName(getComponentName()); if (comp instanceof RestProducerFactory) { factory = (RestProducerFactory) comp; } else { comp = setupComponent(getComponentName(), getCamelContext(), (Map<String, Object>) parameters.get("component")); if (comp instanceof RestProducerFactory) { factory = (RestProducerFactory) comp; } } if (factory == null) { if (comp != null) { throw new IllegalArgumentException("Component " + getComponentName() + " is not a RestProducerFactory"); } else { throw new NoSuchBeanException(getComponentName(), RestProducerFactory.class.getName()); } } cname = getComponentName(); } // try all components if (factory == null) { for (String name : getCamelContext().getComponentNames()) { Component comp = setupComponent(name, getCamelContext(), (Map<String, Object>) parameters.get("component")); if (comp instanceof RestProducerFactory) { factory = (RestProducerFactory) comp; cname = name; break; } } } parameters.put("componentName", cname); // lookup in registry if (factory == null) { Set<RestProducerFactory> factories = getCamelContext().getRegistry().findByType(RestProducerFactory.class); if (factories != null && factories.size() == 1) { factory = factories.iterator().next(); } } // no explicit factory found then try to see if we can find any of the default rest consumer components // and there must only be exactly one so we safely can pick this one if (factory == null) { RestProducerFactory found = null; String foundName = null; for (String name : DEFAULT_REST_PRODUCER_COMPONENTS) { Object comp = setupComponent(getComponentName(), getCamelContext(), (Map<String, Object>) parameters.get("component")); if (comp instanceof RestProducerFactory) { if (found == null) { found = (RestProducerFactory) comp; foundName = name; } else { throw new IllegalArgumentException("Multiple RestProducerFactory found on classpath. Configure explicit which component to use"); } } } if (found != null) { log.debug("Auto discovered {} as RestProducerFactory", foundName); factory = found; } } if (factory != null) { log.debug("Using RestProducerFactory: {}", factory); RestConfiguration config = getCamelContext().getRestConfiguration(cname, true); Producer producer; if (apiDocFactory != null) { // wrap the factory using the api doc factory which will use the factory parameters.put("restProducerFactory", factory); producer = apiDocFactory.createProducer(getCamelContext(), host, method, path, uriTemplate, queryParameters, consumes, produces, config, parameters); } else { producer = factory.createProducer(getCamelContext(), host, method, path, uriTemplate, queryParameters, consumes, produces, config, parameters); } RestProducer answer = new RestProducer(this, producer, config); answer.setOutType(outType); answer.setType(inType); answer.setBindingMode(bindingMode); return answer; } else { throw new IllegalStateException("Cannot find RestProducerFactory in Registry or as a Component to use"); } } @Override public Consumer createConsumer(Processor processor) throws Exception { RestConsumerFactory factory = null; String cname = null; if (getComponentName() != null) { Object comp = getCamelContext().getRegistry().lookupByName(getComponentName()); if (comp instanceof RestConsumerFactory) { factory = (RestConsumerFactory) comp; } else { comp = getCamelContext().getComponent(getComponentName()); if (comp instanceof RestConsumerFactory) { factory = (RestConsumerFactory) comp; } } if (factory == null) { if (comp != null) { throw new IllegalArgumentException("Component " + getComponentName() + " is not a RestConsumerFactory"); } else { throw new NoSuchBeanException(getComponentName(), RestConsumerFactory.class.getName()); } } cname = getComponentName(); } // try all components if (factory == null) { for (String name : getCamelContext().getComponentNames()) { Component comp = getCamelContext().getComponent(name); if (comp instanceof RestConsumerFactory) { factory = (RestConsumerFactory) comp; cname = name; break; } } } // lookup in registry if (factory == null) { Set<RestConsumerFactory> factories = getCamelContext().getRegistry().findByType(RestConsumerFactory.class); if (factories != null && factories.size() == 1) { factory = factories.iterator().next(); } } // no explicit factory found then try to see if we can find any of the default rest consumer components // and there must only be exactly one so we safely can pick this one if (factory == null) { RestConsumerFactory found = null; String foundName = null; for (String name : DEFAULT_REST_CONSUMER_COMPONENTS) { Object comp = getCamelContext().getComponent(name, true); if (comp instanceof RestConsumerFactory) { if (found == null) { found = (RestConsumerFactory) comp; foundName = name; } else { throw new IllegalArgumentException("Multiple RestConsumerFactory found on classpath. Configure explicit which component to use"); } } } if (found != null) { log.debug("Auto discovered {} as RestConsumerFactory", foundName); factory = found; } } if (factory != null) { // if no explicit port/host configured, then use port from rest configuration String scheme = "http"; String host = ""; int port = 80; RestConfiguration config = getCamelContext().getRestConfiguration(cname, true); if (config.getScheme() != null) { scheme = config.getScheme(); } if (config.getHost() != null) { host = config.getHost(); } int num = config.getPort(); if (num > 0) { port = num; } // if no explicit hostname set then resolve the hostname if (ObjectHelper.isEmpty(host)) { if (config.getHostNameResolver() == RestConfiguration.RestHostNameResolver.allLocalIp) { host = "0.0.0.0"; } else if (config.getHostNameResolver() == RestConfiguration.RestHostNameResolver.localHostName) { host = HostUtils.getLocalHostName(); } else if (config.getHostNameResolver() == RestConfiguration.RestHostNameResolver.localIp) { host = HostUtils.getLocalIp(); } } // calculate the url to the rest service String path = getPath(); if (!path.startsWith("/")) { path = "/" + path; } // there may be an optional context path configured to help Camel calculate the correct urls for the REST services // this may be needed when using camel-servlet where we cannot get the actual context-path or port number of the servlet engine // during init of the servlet String contextPath = config.getContextPath(); if (contextPath != null) { if (!contextPath.startsWith("/")) { path = "/" + contextPath + path; } else { path = contextPath + path; } } String baseUrl = scheme + "://" + host + (port != 80 ? ":" + port : "") + path; String url = baseUrl; if (uriTemplate != null) { // make sure to avoid double slashes if (uriTemplate.startsWith("/")) { url = url + uriTemplate; } else { url = url + "/" + uriTemplate; } } Consumer consumer = factory.createConsumer(getCamelContext(), processor, getMethod(), getPath(), getUriTemplate(), getConsumes(), getProduces(), config, getParameters()); configureConsumer(consumer); // add to rest registry so we can keep track of them, we will remove from the registry when the consumer is removed // the rest registry will automatic keep track when the consumer is removed, // and un-register the REST service from the registry getCamelContext().getRestRegistry().addRestService(consumer, url, baseUrl, getPath(), getUriTemplate(), getMethod(), getConsumes(), getProduces(), getInType(), getOutType(), getRouteId(), getDescription()); return consumer; } else { throw new IllegalStateException("Cannot find RestConsumerFactory in Registry or as a Component to use"); } } @Override public boolean isSingleton() { return true; } @Override public boolean isLenientProperties() { return true; } }
punkhorn/camel-upstream
components/camel-rest/src/main/java/org/apache/camel/component/rest/RestEndpoint.java
Java
apache-2.0
20,335
// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved. See License.txt in the project root for license information. using System.Collections.Generic; using System.Collections.Specialized; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Text; using System.Web.Mvc.Properties; namespace System.Web.Mvc { [SuppressMessage("StyleCop.CSharp.NamingRules", "SA1305:FieldNamesMustNotUseHungarianNotation", Target = "jQueryToMvcRequestNormalizationRequired", Justification = "jQuery is usually spelled like this. Hence suppressing this message.")] public class NameValueCollectionValueProvider : IValueProvider, IEnumerableValueProvider { PrefixContainer? _prefixContainer; NameValueCollection _collection; CultureInfo _culture; bool _jQueryToMvcRequestNormalizationRequired; Dictionary<string, ValueProviderResultPlaceholder>? _values = null; private Dictionary<string, ValueProviderResultPlaceholder> Values => _values ??= InitializeCollectionValues(); private PrefixContainer PrefixContainer => // Race condition on initialization has no side effects _prefixContainer ??= new PrefixContainer(Values.Keys); public NameValueCollectionValueProvider(NameValueCollection collection, CultureInfo culture) : this(collection, culture, jQueryToMvcRequestNormalizationRequired: false) { } /// <summary> /// Initializes Name Value collection provider. /// </summary> /// <param name="collection">Key value collection from request.</param> /// <param name="unvalidatedCollection">Unvalidated key value collection from the request.</param> /// <param name="culture">Culture with which the values are to be used.</param> /// <param name="jQueryToMvcRequestNormalizationRequired">jQuery POST when sending complex Javascript /// objects to server does not encode in the way understandable by MVC. This flag should be set /// if the request should be normalized to MVC form - https://aspnetwebstack.codeplex.com/workitem/1564. </param> [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "j", Justification = "jQuery is not accepted as a valid variable name in this class")] public NameValueCollectionValueProvider( NameValueCollection collection, CultureInfo culture, bool jQueryToMvcRequestNormalizationRequired) { if (collection is null) throw new ArgumentNullException(nameof(collection)); _collection = collection; _culture = culture; _jQueryToMvcRequestNormalizationRequired = jQueryToMvcRequestNormalizationRequired; } public virtual bool ContainsPrefix(string prefix) => PrefixContainer.ContainsPrefix(prefix); public virtual ValueProviderResult? GetValue(string key) { if (key is null) throw new ArgumentNullException(nameof(key)); ValueProviderResultPlaceholder placeholder; Values.TryGetValue(key, out placeholder); if (placeholder is null) { return null; } else { return placeholder.ValidatedResult; } } public virtual IDictionary<string, string> GetKeysFromPrefix(string prefix) => PrefixContainer.GetKeysFromPrefix(prefix); Dictionary<string, ValueProviderResultPlaceholder> InitializeCollectionValues() { var tempValues = new Dictionary<string, ValueProviderResultPlaceholder>(StringComparer.OrdinalIgnoreCase); // Need to read keys from the unvalidated collection, as M.W.I's granular request validation is a bit touchy // and validated entries at the time the key or value is looked at. For example, GetKey() will throw if the // value fails request validation, even though the value's not being looked at (M.W.I can't tell the difference). foreach (string key in _collection) { if (key != null) { string normalizedKey = key; if (_jQueryToMvcRequestNormalizationRequired) { normalizedKey = NormalizeJQueryToMvc(key); } // need to look up values lazily, as eagerly looking at the collection might trigger validation tempValues[normalizedKey] = new ValueProviderResultPlaceholder(key, _collection, _culture); } } return tempValues; } // This code is borrowed from WebAPI FormDataCollectionExtensions.cs // This is a helper method to use Model Binding over a JQuery syntax. // Normalize from JQuery to MVC keys. The model binding infrastructure uses MVC keys // x[] --> x // [] --> "" // x[12] --> x[12] // x[field] --> x.field, where field is not a number static string NormalizeJQueryToMvc(string key) { if (key is null) { return String.Empty; } StringBuilder? sb = null; int i = 0; while (true) { int indexOpen = key.IndexOf('[', i); if (indexOpen < 0) { // Fast path, no normalization needed. // This skips the string conversion and allocating the string builder. if (i == 0) { return key; } sb ??= new StringBuilder(); sb.Append(key, i, key.Length - i); break; // no more brackets } sb ??= new StringBuilder(); sb.Append(key, i, indexOpen - i); // everything up to "[" // Find closing bracket. int indexClose = key.IndexOf(']', indexOpen); if (indexClose == -1) { throw new ArgumentException(MvcResources.JQuerySyntaxMissingClosingBracket, nameof(key)); } if (indexClose == indexOpen + 1) { // Empty bracket. Signifies array. Just remove. } else { if (Char.IsDigit(key[indexOpen + 1])) { // array index. Leave unchanged. sb.Append(key, indexOpen, indexClose - indexOpen + 1); } else { // Field name. Convert to dot notation. sb.Append('.'); sb.Append(key, indexOpen + 1, indexClose - indexOpen - 1); } } i = indexClose + 1; if (i >= key.Length) { break; // end of string } } return sb.ToString(); } // Placeholder that can store a validated (in relation to request validation) or unvalidated // ValueProviderResult for a given key. sealed class ValueProviderResultPlaceholder { ValueProviderResult? _validatedResult; string _key; NameValueCollection _validatedCollection; CultureInfo _culture; public ValueProviderResult ValidatedResult => _validatedResult ??= GetResultFromCollection(_key, _validatedCollection, _culture); public ValueProviderResultPlaceholder(string key, NameValueCollection validatedCollection, CultureInfo culture) { _key = key; _validatedCollection = validatedCollection; _culture = culture; } static ValueProviderResult GetResultFromCollection(string key, NameValueCollection collection, CultureInfo culture) { string[]? rawValue = collection.GetValues(key); string attemptedValue = collection[key]; return new ValueProviderResult(rawValue, attemptedValue, culture); } } } }
maxtoroq/XCST-a
src/Xcst.AspNet/Framework/ModelBinding/NameValueCollectionValueProvider.cs
C#
apache-2.0
7,720
/** * Copyright (c) 2013-2021 Nikita Koksharov * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.redisson.spring.data.connection; import org.reactivestreams.Publisher; import org.redisson.api.StreamMessageId; import org.redisson.client.codec.ByteArrayCodec; import org.redisson.client.codec.StringCodec; import org.redisson.client.protocol.RedisCommand; import org.redisson.client.protocol.RedisCommands; import org.redisson.client.protocol.RedisStrictCommand; import org.redisson.reactive.CommandReactiveExecutor; import org.springframework.data.redis.connection.ReactiveRedisConnection; import org.springframework.data.redis.connection.ReactiveStreamCommands; import org.springframework.data.redis.connection.stream.ByteBufferRecord; import org.springframework.data.redis.connection.stream.RecordId; import org.springframework.data.redis.connection.stream.StreamOffset; import org.springframework.data.redis.connection.stream.StreamRecords; import org.springframework.util.Assert; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import java.nio.ByteBuffer; import java.util.*; import java.util.stream.Collectors; /** * * @author Nikita Koksharov * */ public class RedissonReactiveStreamCommands extends RedissonBaseReactive implements ReactiveStreamCommands { RedissonReactiveStreamCommands(CommandReactiveExecutor executorService) { super(executorService); } private static List<String> toStringList(List<RecordId> recordIds) { return recordIds.stream().map(RecordId::getValue).collect(Collectors.toList()); } @Override public Flux<ReactiveRedisConnection.NumericResponse<AcknowledgeCommand, Long>> xAck(Publisher<AcknowledgeCommand> publisher) { return execute(publisher, command -> { Assert.notNull(command.getKey(), "Key must not be null!"); Assert.notNull(command.getGroup(), "Group must not be null!"); Assert.notNull(command.getRecordIds(), "recordIds must not be null!"); List<Object> params = new ArrayList<>(); byte[] k = toByteArray(command.getKey()); params.add(k); params.add(command.getGroup()); params.addAll(toStringList(command.getRecordIds())); Mono<Long> m = write(k, StringCodec.INSTANCE, RedisCommands.XACK, params.toArray()); return m.map(v -> new ReactiveRedisConnection.NumericResponse<>(command, v)); }); } @Override public Flux<ReactiveRedisConnection.CommandResponse<AddStreamRecord, RecordId>> xAdd(Publisher<AddStreamRecord> publisher) { return execute(publisher, command -> { Assert.notNull(command.getKey(), "Key must not be null!"); Assert.notNull(command.getBody(), "Body must not be null!"); byte[] k = toByteArray(command.getKey()); List<Object> params = new LinkedList<>(); params.add(k); if (!command.getRecord().getId().shouldBeAutoGenerated()) { params.add(command.getRecord().getId().getValue()); } else { params.add("*"); } for (Map.Entry<ByteBuffer, ByteBuffer> entry : command.getBody().entrySet()) { params.add(toByteArray(entry.getKey())); params.add(toByteArray(entry.getValue())); } Mono<StreamMessageId> m = write(k, StringCodec.INSTANCE, RedisCommands.XADD, params.toArray()); return m.map(v -> new ReactiveRedisConnection.CommandResponse<>(command, RecordId.of(v.toString()))); }); } @Override public Flux<ReactiveRedisConnection.CommandResponse<DeleteCommand, Long>> xDel(Publisher<DeleteCommand> publisher) { return execute(publisher, command -> { Assert.notNull(command.getKey(), "Key must not be null!"); Assert.notNull(command.getRecordIds(), "recordIds must not be null!"); byte[] k = toByteArray(command.getKey()); List<Object> params = new ArrayList<>(); params.add(k); params.addAll(toStringList(command.getRecordIds())); Mono<Long> m = write(k, StringCodec.INSTANCE, RedisCommands.XDEL, params.toArray()); return m.map(v -> new ReactiveRedisConnection.CommandResponse<>(command, v)); }); } @Override public Flux<ReactiveRedisConnection.NumericResponse<ReactiveRedisConnection.KeyCommand, Long>> xLen(Publisher<ReactiveRedisConnection.KeyCommand> publisher) { return execute(publisher, command -> { Assert.notNull(command.getKey(), "Key must not be null!"); byte[] k = toByteArray(command.getKey()); Mono<Long> m = write(k, StringCodec.INSTANCE, RedisCommands.XLEN, k); return m.map(v -> new ReactiveRedisConnection.NumericResponse<>(command, v)); }); } @Override public Flux<ReactiveRedisConnection.CommandResponse<RangeCommand, Flux<ByteBufferRecord>>> xRange(Publisher<RangeCommand> publisher) { return range(RedisCommands.XRANGE, publisher); } private Flux<ReactiveRedisConnection.CommandResponse<RangeCommand, Flux<ByteBufferRecord>>> range(RedisCommand<?> rangeCommand, Publisher<RangeCommand> publisher) { return execute(publisher, command -> { Assert.notNull(command.getKey(), "Key must not be null!"); Assert.notNull(command.getRange(), "Range must not be null!"); Assert.notNull(command.getLimit(), "Limit must not be null!"); byte[] k = toByteArray(command.getKey()); List<Object> params = new LinkedList<>(); params.add(k); if (rangeCommand == RedisCommands.XRANGE) { params.add(command.getRange().getLowerBound().getValue().orElse("-")); params.add(command.getRange().getUpperBound().getValue().orElse("+")); } else { params.add(command.getRange().getUpperBound().getValue().orElse("+")); params.add(command.getRange().getLowerBound().getValue().orElse("-")); } if (command.getLimit().getCount() > 0) { params.add("COUNT"); params.add(command.getLimit().getCount()); } Mono<Map<StreamMessageId, Map<byte[], byte[]>>> m = write(k, ByteArrayCodec.INSTANCE, rangeCommand, params.toArray()); return m.map(v -> new ReactiveRedisConnection.CommandResponse<>(command, Flux.fromStream(v.entrySet().stream()).map(e -> { Map<ByteBuffer, ByteBuffer> map = e.getValue().entrySet().stream() .collect(Collectors.toMap(entry -> ByteBuffer.wrap(entry.getKey()), entry -> ByteBuffer.wrap(entry.getValue()))); return StreamRecords.newRecord() .in(command.getKey()) .withId(RecordId.of(e.getKey().toString())) .ofBuffer(map); }))); }); } @Override public Flux<ReactiveRedisConnection.CommandResponse<ReadCommand, Flux<ByteBufferRecord>>> read(Publisher<ReadCommand> publisher) { return execute(publisher, command -> { Assert.notNull(command.getStreamOffsets(), "StreamOffsets must not be null!"); Assert.notNull(command.getReadOptions(), "ReadOptions must not be null!"); List<Object> params = new ArrayList<>(); if (command.getConsumer() != null) { params.add("GROUP"); params.add(command.getConsumer().getGroup()); params.add(command.getConsumer().getName()); } if (command.getReadOptions().getCount() != null && command.getReadOptions().getCount() > 0) { params.add("COUNT"); params.add(command.getReadOptions().getCount()); } if (command.getReadOptions().getBlock() != null && command.getReadOptions().getBlock() > 0) { params.add("BLOCK"); params.add(command.getReadOptions().getBlock()); } if (command.getConsumer() != null && command.getReadOptions().isNoack()) { params.add("NOACK"); } params.add("STREAMS"); for (StreamOffset<ByteBuffer> streamOffset : command.getStreamOffsets()) { params.add(toByteArray(streamOffset.getKey())); } for (StreamOffset<ByteBuffer> streamOffset : command.getStreamOffsets()) { params.add(streamOffset.getOffset().getOffset()); } Mono<Map<String, Map<StreamMessageId, Map<byte[], byte[]>>>> m; if (command.getConsumer() == null) { if (command.getReadOptions().getBlock() != null && command.getReadOptions().getBlock() > 0) { m = read(toByteArray(command.getStreamOffsets().get(0).getKey()), ByteArrayCodec.INSTANCE, RedisCommands.XREAD_BLOCKING, params.toArray()); } else { m = read(toByteArray(command.getStreamOffsets().get(0).getKey()), ByteArrayCodec.INSTANCE, RedisCommands.XREAD, params.toArray()); } } else { if (command.getReadOptions().getBlock() != null && command.getReadOptions().getBlock() > 0) { m = read(toByteArray(command.getStreamOffsets().get(0).getKey()), ByteArrayCodec.INSTANCE, RedisCommands.XREADGROUP_BLOCKING, params.toArray()); } else { m = read(toByteArray(command.getStreamOffsets().get(0).getKey()), ByteArrayCodec.INSTANCE, RedisCommands.XREADGROUP, params.toArray()); } } return m.map(v -> new ReactiveRedisConnection.CommandResponse<>(command, Flux.fromStream(v.entrySet().stream()) .map(ee -> { return ee.getValue().entrySet().stream().map(e -> { Map<ByteBuffer, ByteBuffer> map = e.getValue().entrySet().stream() .collect(Collectors.toMap(entry -> ByteBuffer.wrap(entry.getKey()), entry -> ByteBuffer.wrap(entry.getValue()))); return StreamRecords.newRecord() .in(ee.getKey()) .withId(RecordId.of(e.getKey().toString())) .ofBuffer(map); }); }).flatMap(Flux::fromStream) )); }); } private static final RedisStrictCommand<String> XGROUP_STRING = new RedisStrictCommand<>("XGROUP"); @Override public Flux<ReactiveRedisConnection.CommandResponse<GroupCommand, String>> xGroup(Publisher<GroupCommand> publisher) { return execute(publisher, command -> { Assert.notNull(command.getKey(), "Key must not be null!"); Assert.notNull(command.getGroupName(), "GroupName must not be null!"); byte[] k = toByteArray(command.getKey()); if (command.getAction().equals(GroupCommand.GroupCommandAction.CREATE)) { Assert.notNull(command.getReadOffset(), "ReadOffset must not be null!"); Mono<String> m = write(k, StringCodec.INSTANCE, XGROUP_STRING, "CREATE", k, command.getGroupName(), command.getReadOffset().getOffset(), "MKSTREAM"); return m.map(v -> new ReactiveRedisConnection.CommandResponse<>(command, v)); } if (command.getAction().equals(GroupCommand.GroupCommandAction.DELETE_CONSUMER)) { Assert.notNull(command.getConsumerName(), "ConsumerName must not be null!"); Mono<Long> m = write(k, StringCodec.INSTANCE, RedisCommands.XGROUP_LONG, "DELCONSUMER", k, command.getGroupName(), command.getConsumerName()); return m.map(v -> new ReactiveRedisConnection.CommandResponse<>(command, v > 0 ? "OK" : "Error")); } if (command.getAction().equals(GroupCommand.GroupCommandAction.DESTROY)) { Mono<Long> m = write(k, StringCodec.INSTANCE, RedisCommands.XGROUP_LONG, "DESTROY", k, command.getGroupName()); return m.map(v -> new ReactiveRedisConnection.CommandResponse<>(command, v > 0 ? "OK" : "Error")); } throw new IllegalArgumentException("unknown command " + command.getAction()); }); } @Override public Flux<ReactiveRedisConnection.CommandResponse<RangeCommand, Flux<ByteBufferRecord>>> xRevRange(Publisher<RangeCommand> publisher) { return range(RedisCommands.XREVRANGE, publisher); } @Override public Flux<ReactiveRedisConnection.NumericResponse<ReactiveRedisConnection.KeyCommand, Long>> xTrim(Publisher<TrimCommand> publisher) { return execute(publisher, command -> { Assert.notNull(command.getKey(), "Key must not be null!"); Assert.notNull(command.getCount(), "Count must not be null!"); byte[] k = toByteArray(command.getKey()); Mono<Long> m = write(k, StringCodec.INSTANCE, RedisCommands.XTRIM, k, "MAXLEN", command.getCount()); return m.map(v -> new ReactiveRedisConnection.NumericResponse<>(command, v)); }); } }
redisson/redisson
redisson-spring-data/redisson-spring-data-22/src/main/java/org/redisson/spring/data/connection/RedissonReactiveStreamCommands.java
Java
apache-2.0
13,883
package chatty; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.*; /** * * @author tduva */ public class AddressbookTest { private Addressbook ab; public AddressbookTest() { } @BeforeClass public static void setUpClass() { } @AfterClass public static void tearDownClass() { } @Before public void setUp() { ab = new Addressbook("addressbookTest"); } @After public void tearDown() { } // TODO add test methods here. // The methods must be annotated with annotation @Test. For example: // // @Test // public void hello() {} @Test public void testAdd() { ab.add("abc", "123"); List<AddressbookEntry> desiredResult = new ArrayList<>(); Set<String> categories = new HashSet<>(); categories.add("123"); desiredResult.add(new AddressbookEntry("Abc", categories)); assertEquals(ab.getEntries(), desiredResult); assertEquals(ab.get("abc").getCategories(), categories); assertEquals(ab.getEntries().size(), 1); } }
Javaec/ChattyRus
test/chatty/AddressbookTest.java
Java
apache-2.0
1,323
import SectionUtilities from '../../transform/SectionUtilities' /** * Hide or unhide a section. * @param {!string} sectionId * @param {?boolean} hidden * @return {void} */ const setHidden = (sectionId, hidden) => { if (!document) { return } SectionUtilities.setHidden(document, sectionId, hidden) } export default { getOffsets: SectionUtilities.getSectionOffsets, setHidden }
wikimedia/mediawiki-services-mobileapps
pagelib/src/pcs/c1/Sections.js
JavaScript
apache-2.0
396
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.optimizer; import java.util.Collection; import java.util.Map; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.GroupByOperator; import org.apache.hadoop.hive.ql.exec.LimitOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorUtils; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner; import org.apache.hadoop.hive.ql.parse.GlobalLimitCtx; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.PrunedPartitionList; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.SplitSample; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.FilterDesc; import org.apache.hadoop.hive.ql.plan.GroupByDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multimap; /** * This optimizer is used to reduce the input size for the query for queries which are * specifying a limit. * <p/> * For eg. for a query of type: * <p/> * select expr from T where <filter> limit 100; * <p/> * Most probably, the whole table T need not be scanned. * Chances are that even if we scan the first file of T, we would get the 100 rows * needed by this query. * This optimizer step populates the GlobalLimitCtx which is used later on to prune the inputs. */ public class GlobalLimitOptimizer implements Transform { private final Log LOG = LogFactory.getLog(GlobalLimitOptimizer.class.getName()); public ParseContext transform(ParseContext pctx) throws SemanticException { Context ctx = pctx.getContext(); Map<String, Operator<? extends OperatorDesc>> topOps = pctx.getTopOps(); GlobalLimitCtx globalLimitCtx = pctx.getGlobalLimitCtx(); Map<TableScanOperator, ExprNodeDesc> opToPartPruner = pctx.getOpToPartPruner(); Map<String, SplitSample> nameToSplitSample = pctx.getNameToSplitSample(); // determine the query qualifies reduce input size for LIMIT // The query only qualifies when there are only one top operator // and there is no transformer or UDTF and no block sampling // is used. if (ctx.getTryCount() == 0 && topOps.size() == 1 && !globalLimitCtx.ifHasTransformOrUDTF() && nameToSplitSample.isEmpty()) { // Here we recursively check: // 1. whether there are exact one LIMIT in the query // 2. whether there is no aggregation, group-by, distinct, sort by, // distributed by, or table sampling in any of the sub-query. // The query only qualifies if both conditions are satisfied. // // Example qualified queries: // CREATE TABLE ... AS SELECT col1, col2 FROM tbl LIMIT .. // INSERT OVERWRITE TABLE ... SELECT col1, hash(col2), split(col1) // FROM ... LIMIT... // SELECT * FROM (SELECT col1 as col2 (SELECT * FROM ...) t1 LIMIT ...) t2); // TableScanOperator ts = (TableScanOperator) topOps.values().toArray()[0]; Integer tempGlobalLimit = checkQbpForGlobalLimit(ts); // query qualify for the optimization if (tempGlobalLimit != null && tempGlobalLimit != 0) { Table tab = ts.getConf().getTableMetadata(); if (!tab.isPartitioned()) { Set<FilterOperator> filterOps = OperatorUtils.findOperators(ts, FilterOperator.class); if (filterOps.size() == 0) { globalLimitCtx.enableOpt(tempGlobalLimit); } } else { // check if the pruner only contains partition columns if (PartitionPruner.onlyContainsPartnCols(tab, opToPartPruner.get(ts))) { PrunedPartitionList partsList; try { String alias = (String) topOps.keySet().toArray()[0]; partsList = PartitionPruner.prune(ts, pctx, alias); } catch (HiveException e) { // Has to use full name to make sure it does not conflict with // org.apache.commons.lang.StringUtils LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e)); throw new SemanticException(e.getMessage(), e); } // If there is any unknown partition, create a map-reduce job for // the filter to prune correctly if (!partsList.hasUnknownPartitions()) { globalLimitCtx.enableOpt(tempGlobalLimit); } } } if (globalLimitCtx.isEnable()) { LOG.info("Qualify the optimize that reduces input size for 'limit' for limit " + globalLimitCtx.getGlobalLimit()); } } } return pctx; } /** * Check the limit number in all sub queries * * @return if there is one and only one limit for all subqueries, return the limit * if there is no limit, return 0 * otherwise, return null */ private static Integer checkQbpForGlobalLimit(TableScanOperator ts) { Set<Class<? extends Operator<?>>> searchedClasses = new ImmutableSet.Builder<Class<? extends Operator<?>>>() .add(ReduceSinkOperator.class) .add(GroupByOperator.class) .add(FilterOperator.class) .add(LimitOperator.class) .build(); Multimap<Class<? extends Operator<?>>, Operator<?>> ops = OperatorUtils.classifyOperators(ts, searchedClasses); // To apply this optimization, in the input query: // - There cannot exist any order by/sort by clause, // thus existsOrdering should be false. // - There cannot exist any distribute by clause, thus // existsPartitioning should be false. // - There cannot exist any cluster by clause, thus // existsOrdering AND existsPartitioning should be false. for (Operator<?> op : ops.get(ReduceSinkOperator.class)) { ReduceSinkDesc reduceSinkConf = ((ReduceSinkOperator) op).getConf(); if (reduceSinkConf.isOrdering() || reduceSinkConf.isPartitioning()) { return null; } } // - There cannot exist any (distinct) aggregate. for (Operator<?> op : ops.get(GroupByOperator.class)) { GroupByDesc groupByConf = ((GroupByOperator) op).getConf(); if (groupByConf.isAggregate() || groupByConf.isDistinct()) { return null; } } // - There cannot exist any sampling predicate. for (Operator<?> op : ops.get(FilterOperator.class)) { FilterDesc filterConf = ((FilterOperator) op).getConf(); if (filterConf.getIsSamplingPred()) { return null; } } // If there is one and only one limit starting at op, return the limit // If there is no limit, return 0 // Otherwise, return null Collection<Operator<?>> limitOps = ops.get(LimitOperator.class); if (limitOps.size() == 1) { return ((LimitOperator) limitOps.iterator().next()).getConf().getLimit(); } else if (limitOps.size() == 0) { return 0; } return null; } }
winningsix/hive
ql/src/java/org/apache/hadoop/hive/ql/optimizer/GlobalLimitOptimizer.java
Java
apache-2.0
8,347
package com.luciofm.presentation.androidsalao; public interface DisplayService { void connection(boolean connetion); void next(); void previous(); void advance(); }
luciofm/AndroidSalao
src/com/luciofm/presentation/androidsalao/DisplayService.java
Java
apache-2.0
175
package io.kaif.mobile.app; import rx.Observable; import rx.android.app.support.RxFragment; import rx.android.lifecycle.LifecycleObservable; import rx.android.schedulers.AndroidSchedulers; public class BaseFragment extends RxFragment { protected <T> Observable<T> bind(Observable<T> observable) { return LifecycleObservable.bindFragmentLifecycle(lifecycle(), observable.observeOn(AndroidSchedulers.mainThread())); } }
yongjhih/kaif-android
app/src/main/java/io/kaif/mobile/app/BaseFragment.java
Java
apache-2.0
438
package mil.nga.giat.geowave.accumulo; import java.nio.ByteBuffer; import java.util.Arrays; import org.apache.accumulo.core.data.Key; /** * This class encapsulates the elements that compose the row ID in Accumulo, and * can serialize and deserialize the individual elements to/from the row ID. The * row ID consists of the index ID, followed by an adapter ID, followed by a * data ID, followed by data ID length and adapter ID length, and lastly the * number of duplicate row IDs for this entry. The data ID must be unique for an * adapter, so the combination of adapter ID and data ID is intended to * guarantee uniqueness for this row ID. * */ public class AccumuloRowId { private final byte[] indexId; private final byte[] dataId; private final byte[] adapterId; private final int numberOfDuplicates; public AccumuloRowId( final Key key ) { this( key.getRow().copyBytes()); } public AccumuloRowId( final byte[] accumuloRowId ) { final byte[] metadata = Arrays.copyOfRange( accumuloRowId, accumuloRowId.length - 12, accumuloRowId.length); final ByteBuffer metadataBuf = ByteBuffer.wrap(metadata); final int adapterIdLength = metadataBuf.getInt(); final int dataIdLength = metadataBuf.getInt(); final int numberOfDuplicates = metadataBuf.getInt(); final ByteBuffer buf = ByteBuffer.wrap( accumuloRowId, 0, accumuloRowId.length - 12); final byte[] indexId = new byte[accumuloRowId.length - 12 - adapterIdLength - dataIdLength]; final byte[] adapterId = new byte[adapterIdLength]; final byte[] dataId = new byte[dataIdLength]; buf.get(indexId); buf.get(adapterId); buf.get(dataId); this.indexId = indexId; this.dataId = dataId; this.adapterId = adapterId; this.numberOfDuplicates = numberOfDuplicates; } public AccumuloRowId( final byte[] indexId, final byte[] dataId, final byte[] adapterId, final int numberOfDuplicates ) { this.indexId = indexId; this.dataId = dataId; this.adapterId = adapterId; this.numberOfDuplicates = numberOfDuplicates; } public byte[] getRowId() { final ByteBuffer buf = ByteBuffer.allocate(12 + dataId.length + adapterId.length + indexId.length); buf.put(indexId); buf.put(adapterId); buf.put(dataId); buf.putInt(adapterId.length); buf.putInt(dataId.length); buf.putInt(numberOfDuplicates); return buf.array(); } public byte[] getIndexId() { return indexId; } public byte[] getDataId() { return dataId; } public byte[] getAdapterId() { return adapterId; } public int getNumberOfDuplicates() { return numberOfDuplicates; } }
state-hiu/geowave
geowave-accumulo/src/main/java/mil/nga/giat/geowave/accumulo/AccumuloRowId.java
Java
apache-2.0
2,604
//include files #include "Table1.h" namespace hoge{ /* * This class is generated automatically. * Never change from your hand. */ /** * <pre> * schema name : "tiny_query_helper_test" * table name : "table1" * remarks : "" * * Persistable: * Available to use Persistor-methods, like DBManager\#insert(IPersistable),\#update(IPersistable),\#delete(IPersistable),else. * * </pre> */ //実体を定義 //カラム情報オブジェクトを定義 const std::string Table1::column::id::name_ ("id"); const std::string Table1::column::data1_int::name_ ("data1_int"); const std::string Table1::column::data2_string::name_ ("data2_string"); //カラム情報の実態を定義 const typename Table1::column::id Table1::column::id; const typename Table1::column::data1_int Table1::column::data1_int; const typename Table1::column::data2_string Table1::column::data2_string; }
yamada28go/tiny_query_helper
test/Table1.cpp
C++
apache-2.0
904
<?php /* @var $this DefaultController */ $this->breadcrumbs=array( $this->module->id, ); ?>
olijen/asana-api
protected/modules/admin/views/default/index.php
PHP
apache-2.0
95
/* * Copyright 1999-2018 Alibaba Group Holding Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.csp.sentinel.dashboard.rule.zookeeper; import org.apache.commons.lang.StringUtils; public class ZookeeperConfigUtil { public static final String RULE_ROOT_PATH = "/sentinel_rule_config"; public static final int RETRY_TIMES = 3; public static final int SLEEP_TIME = 1000; public static String getPath(String appName) { StringBuilder stringBuilder = new StringBuilder(RULE_ROOT_PATH); if (StringUtils.isBlank(appName)) { return stringBuilder.toString(); } if (appName.startsWith("/")) { stringBuilder.append(appName); } else { stringBuilder.append("/") .append(appName); } return stringBuilder.toString(); } }
alibaba/Sentinel
sentinel-dashboard/src/test/java/com/alibaba/csp/sentinel/dashboard/rule/zookeeper/ZookeeperConfigUtil.java
Java
apache-2.0
1,381
package cli import ( "flag" "fmt" "os" "strconv" "strings" "time" ) // This flag enables bash-completion for all commands and subcommands var BashCompletionFlag = BoolFlag{ Name: "generate-bash-completion", Hide: true, } // This flag prints the version for the application var VersionFlag = BoolFlag{ Name: "version, v", Usage: "print the version", } // This flag prints the help for all commands and subcommands // Set to the zero value (BoolFlag{}) to disable flag -- keeps subcommand // unless HideHelp is set to true) var HelpFlag = BoolFlag{ Name: "help, h", Usage: "show help", Hide: true, } // Flag is a common interface related to parsing flags in cli. // For more advanced flag parsing techniques, it is recomended that // this interface be implemented. type Flag interface { fmt.Stringer // Apply Flag settings to the given flag set Apply(*flag.FlagSet) getName() string isNotHidden() bool } func flagSet(name string, flags []Flag) *flag.FlagSet { set := flag.NewFlagSet(name, flag.ContinueOnError) for _, f := range flags { f.Apply(set) } return set } func eachName(longName string, fn func(string)) { parts := strings.Split(longName, ",") for _, name := range parts { name = strings.Trim(name, " ") fn(name) } } // Generic is a generic parseable type identified by a specific flag type Generic interface { Set(value string) error String() string } // GenericFlag is the flag type for types implementing Generic type GenericFlag struct { Name string Value Generic Usage string EnvVar string Hide bool } // String returns the string representation of the generic flag to display the // help text to the user (uses the String() method of the generic flag to show // the value) func (f GenericFlag) String() string { return withEnvHint(f.EnvVar, fmt.Sprintf("%s%s \"%v\"\t%v", prefixFor(f.Name), f.Name, f.Value, f.Usage)) } // Apply takes the flagset and calls Set on the generic flag with the value // provided by the user for parsing by the flag func (f GenericFlag) Apply(set *flag.FlagSet) { val := f.Value if f.EnvVar != "" { for _, envVar := range strings.Split(f.EnvVar, ",") { envVar = strings.TrimSpace(envVar) if envVal := os.Getenv(envVar); envVal != "" { val.Set(envVal) break } } } eachName(f.Name, func(name string) { set.Var(f.Value, name, f.Usage) }) } func (f GenericFlag) getName() string { return f.Name } func (f GenericFlag) isNotHidden() bool { return !f.Hide } type StringSlice []string func (f *StringSlice) Set(value string) error { *f = append(*f, value) return nil } func (f *StringSlice) String() string { return fmt.Sprintf("%s", *f) } func (f *StringSlice) Value() []string { return *f } type StringSliceFlag struct { Name string Value *StringSlice Usage string EnvVar string Hide bool } func (f StringSliceFlag) String() string { firstName := strings.Trim(strings.Split(f.Name, ",")[0], " ") pref := prefixFor(firstName) return withEnvHint(f.EnvVar, fmt.Sprintf("%s [%v]\t%v", prefixedNames(f.Name), pref+firstName+" option "+pref+firstName+" option", f.Usage)) } func (f StringSliceFlag) Apply(set *flag.FlagSet) { if f.EnvVar != "" { for _, envVar := range strings.Split(f.EnvVar, ",") { envVar = strings.TrimSpace(envVar) if envVal := os.Getenv(envVar); envVal != "" { newVal := &StringSlice{} for _, s := range strings.Split(envVal, ",") { s = strings.TrimSpace(s) newVal.Set(s) } f.Value = newVal break } } } eachName(f.Name, func(name string) { set.Var(f.Value, name, f.Usage) }) } func (f StringSliceFlag) getName() string { return f.Name } func (f StringSliceFlag) isNotHidden() bool { return !f.Hide } type IntSlice []int func (f *IntSlice) Set(value string) error { tmp, err := strconv.Atoi(value) if err != nil { return err } else { *f = append(*f, tmp) } return nil } func (f *IntSlice) String() string { return fmt.Sprintf("%d", *f) } func (f *IntSlice) Value() []int { return *f } type IntSliceFlag struct { Name string Value *IntSlice Usage string EnvVar string Hide bool } func (f IntSliceFlag) String() string { firstName := strings.Trim(strings.Split(f.Name, ",")[0], " ") pref := prefixFor(firstName) return withEnvHint(f.EnvVar, fmt.Sprintf("%s [%v]\t%v", prefixedNames(f.Name), pref+firstName+" option "+pref+firstName+" option", f.Usage)) } func (f IntSliceFlag) Apply(set *flag.FlagSet) { if f.EnvVar != "" { for _, envVar := range strings.Split(f.EnvVar, ",") { envVar = strings.TrimSpace(envVar) if envVal := os.Getenv(envVar); envVal != "" { newVal := &IntSlice{} for _, s := range strings.Split(envVal, ",") { s = strings.TrimSpace(s) err := newVal.Set(s) if err != nil { fmt.Fprintf(os.Stderr, err.Error()) } } f.Value = newVal break } } } eachName(f.Name, func(name string) { set.Var(f.Value, name, f.Usage) }) } func (f IntSliceFlag) getName() string { return f.Name } func (f IntSliceFlag) isNotHidden() bool { return !f.Hide } type BoolFlag struct { Name string Usage string EnvVar string Hide bool } func (f BoolFlag) String() string { return withEnvHint(f.EnvVar, fmt.Sprintf("%s\t%v", prefixedNames(f.Name), f.Usage)) } func (f BoolFlag) Apply(set *flag.FlagSet) { val := false if f.EnvVar != "" { for _, envVar := range strings.Split(f.EnvVar, ",") { envVar = strings.TrimSpace(envVar) if envVal := os.Getenv(envVar); envVal != "" { envValBool, err := strconv.ParseBool(envVal) if err == nil { val = envValBool } break } } } eachName(f.Name, func(name string) { set.Bool(name, val, f.Usage) }) } func (f BoolFlag) getName() string { return f.Name } func (f BoolFlag) isNotHidden() bool { return !f.Hide } type BoolTFlag struct { Name string Usage string EnvVar string Hide bool } func (f BoolTFlag) String() string { return withEnvHint(f.EnvVar, fmt.Sprintf("%s\t%v", prefixedNames(f.Name), f.Usage)) } func (f BoolTFlag) Apply(set *flag.FlagSet) { val := true if f.EnvVar != "" { for _, envVar := range strings.Split(f.EnvVar, ",") { envVar = strings.TrimSpace(envVar) if envVal := os.Getenv(envVar); envVal != "" { envValBool, err := strconv.ParseBool(envVal) if err == nil { val = envValBool break } } } } eachName(f.Name, func(name string) { set.Bool(name, val, f.Usage) }) } func (f BoolTFlag) getName() string { return f.Name } func (f BoolTFlag) isNotHidden() bool { return !f.Hide } type StringFlag struct { Name string Value string Usage string EnvVar string Hide bool } func (f StringFlag) String() string { var fmtString string fmtString = "%s %v\t%v" if len(f.Value) > 0 { fmtString = "%s \"%v\"\t%v" } else { fmtString = "%s %v\t%v" } return withEnvHint(f.EnvVar, fmt.Sprintf(fmtString, prefixedNames(f.Name), f.Value, f.Usage)) } func (f StringFlag) Apply(set *flag.FlagSet) { if f.EnvVar != "" { for _, envVar := range strings.Split(f.EnvVar, ",") { envVar = strings.TrimSpace(envVar) if envVal := os.Getenv(envVar); envVal != "" { f.Value = envVal break } } } eachName(f.Name, func(name string) { set.String(name, f.Value, f.Usage) }) } func (f StringFlag) getName() string { return f.Name } func (f StringFlag) isNotHidden() bool { return !f.Hide } type IntFlag struct { Name string Value int Usage string EnvVar string Hide bool } func (f IntFlag) String() string { return withEnvHint(f.EnvVar, fmt.Sprintf("%s \"%v\"\t%v", prefixedNames(f.Name), f.Value, f.Usage)) } func (f IntFlag) Apply(set *flag.FlagSet) { if f.EnvVar != "" { for _, envVar := range strings.Split(f.EnvVar, ",") { envVar = strings.TrimSpace(envVar) if envVal := os.Getenv(envVar); envVal != "" { envValInt, err := strconv.ParseInt(envVal, 0, 64) if err == nil { f.Value = int(envValInt) break } } } } eachName(f.Name, func(name string) { set.Int(name, f.Value, f.Usage) }) } func (f IntFlag) getName() string { return f.Name } func (f IntFlag) isNotHidden() bool { return !f.Hide } type DurationFlag struct { Name string Value time.Duration Usage string EnvVar string Hide bool } func (f DurationFlag) String() string { return withEnvHint(f.EnvVar, fmt.Sprintf("%s \"%v\"\t%v", prefixedNames(f.Name), f.Value, f.Usage)) } func (f DurationFlag) Apply(set *flag.FlagSet) { if f.EnvVar != "" { for _, envVar := range strings.Split(f.EnvVar, ",") { envVar = strings.TrimSpace(envVar) if envVal := os.Getenv(envVar); envVal != "" { envValDuration, err := time.ParseDuration(envVal) if err == nil { f.Value = envValDuration break } } } } eachName(f.Name, func(name string) { set.Duration(name, f.Value, f.Usage) }) } func (f DurationFlag) getName() string { return f.Name } func (f DurationFlag) isNotHidden() bool { return !f.Hide } type Float64Flag struct { Name string Value float64 Usage string EnvVar string Hide bool } func (f Float64Flag) String() string { return withEnvHint(f.EnvVar, fmt.Sprintf("%s \"%v\"\t%v", prefixedNames(f.Name), f.Value, f.Usage)) } func (f Float64Flag) Apply(set *flag.FlagSet) { if f.EnvVar != "" { for _, envVar := range strings.Split(f.EnvVar, ",") { envVar = strings.TrimSpace(envVar) if envVal := os.Getenv(envVar); envVal != "" { envValFloat, err := strconv.ParseFloat(envVal, 10) if err == nil { f.Value = float64(envValFloat) } } } } eachName(f.Name, func(name string) { set.Float64(name, f.Value, f.Usage) }) } func (f Float64Flag) getName() string { return f.Name } func (f Float64Flag) isNotHidden() bool { return !f.Hide } func prefixFor(name string) (prefix string) { if len(name) == 1 { prefix = "-" } else { prefix = "--" } return } func prefixedNames(fullName string) (prefixed string) { parts := strings.Split(fullName, ",") for i, name := range parts { name = strings.Trim(name, " ") prefixed += prefixFor(name) + name if i < len(parts)-1 { prefixed += ", " } } return } func withEnvHint(envVar, str string) string { envText := "" if envVar != "" { envText = fmt.Sprintf(" [$%s]", strings.Join(strings.Split(envVar, ","), ", $")) } return str + envText }
harshavardhana/donut
Godeps/_workspace/src/github.com/minio-io/cli/flag.go
GO
apache-2.0
10,329
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ranger.authorization.elasticsearch.authorizer; import java.util.List; import org.apache.logging.log4j.Logger; import org.apache.ranger.plugin.classloader.RangerPluginClassLoader; import org.elasticsearch.common.logging.ESLoggerFactory; public class RangerElasticsearchAuthorizer { private static final Logger LOG = ESLoggerFactory.getLogger(RangerElasticsearchAuthorizer.class); private static final String RANGER_PLUGIN_TYPE = "elasticsearch"; private static final String RANGER_ELASTICSEARCH_AUTHORIZER_IMPL_CLASSNAME = "org.apache.ranger.authorization.elasticsearch.authorizer.RangerElasticsearchAuthorizer"; private static RangerPluginClassLoader rangerPluginClassLoader = null; private static ClassLoader esClassLoader = null; private RangerElasticsearchAccessControl rangerElasticsearchAccessControl = null; public RangerElasticsearchAuthorizer() { if (LOG.isDebugEnabled()) { LOG.debug("==> RangerElasticsearchAuthorizer.RangerElasticsearchAuthorizer()"); } this.init(); if (LOG.isDebugEnabled()) { LOG.debug("<== RangerElasticsearchAuthorizer.RangerElasticsearchAuthorizer()"); } } public void init() { if (LOG.isDebugEnabled()) { LOG.debug("==> RangerElasticsearchAuthorizer.init()"); } try { // In elasticsearch this.getClass().getClassLoader() is FactoryURLClassLoader, // but Thread.currentThread().getContextClassLoader() is AppClassLoader. esClassLoader = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader()); rangerPluginClassLoader = RangerPluginClassLoader.getInstance(RANGER_PLUGIN_TYPE, this.getClass()); Thread.currentThread().setContextClassLoader(esClassLoader); @SuppressWarnings("unchecked") Class<RangerElasticsearchAccessControl> cls = (Class<RangerElasticsearchAccessControl>) Class .forName(RANGER_ELASTICSEARCH_AUTHORIZER_IMPL_CLASSNAME, true, rangerPluginClassLoader); activatePluginClassLoader(); rangerElasticsearchAccessControl = cls.newInstance(); } catch (Exception e) { LOG.error("Error Enabling RangerElasticsearchAuthorizer", e); } finally { deactivatePluginClassLoader(); } if (LOG.isDebugEnabled()) { LOG.debug("<== RangerElasticsearchAuthorizer.init()"); } } public boolean checkPermission(String user, List<String> groups, String index, String action, String clientIPAddress) { boolean ret = false; if (LOG.isDebugEnabled()) { LOG.debug("==> RangerElasticsearchAuthorizer.checkPermission()"); } try { activatePluginClassLoader(); ret = rangerElasticsearchAccessControl.checkPermission(user, groups, index, action, clientIPAddress); } finally { deactivatePluginClassLoader(); } if (LOG.isDebugEnabled()) { LOG.debug("<== RangerElasticsearchAuthorizer.checkPermission()"); } return ret; } private void activatePluginClassLoader() { if (rangerPluginClassLoader != null) { Thread.currentThread().setContextClassLoader(rangerPluginClassLoader); } } private void deactivatePluginClassLoader() { if (esClassLoader != null) { Thread.currentThread().setContextClassLoader(esClassLoader); } } }
gzsombor/ranger
ranger-elasticsearch-plugin-shim/src/main/java/org/apache/ranger/authorization/elasticsearch/authorizer/RangerElasticsearchAuthorizer.java
Java
apache-2.0
3,996
package org.opencompare; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import org.junit.Test; import org.opencompare.api.java.Cell; import org.opencompare.api.java.Feature; import org.opencompare.api.java.PCM; import org.opencompare.api.java.PCMContainer; import org.opencompare.api.java.Product; import org.opencompare.api.java.Value; import org.opencompare.api.java.impl.io.KMFJSONLoader; import org.opencompare.api.java.io.PCMLoader; import org.opencompare.api.java.value.BooleanValue; import org.opencompare.api.java.value.IntegerValue; import org.opencompare.api.java.value.RealValue; import org.opencompare.api.java.value.StringValue; public class StatisticsPlusPlusTest { @Test public void DSLtest() throws IOException { //On lit le fichier de paramètres .fact crée avec le DSL : //Initialisation des paramètres : double threshold= 0.9; //Si threshold est <0.5, on le met à 0.5 if(threshold<0.5){ threshold=0.5; } int maxFacts=5; ArrayList<String> features = new ArrayList<String>(); features.add("number"); features.add("string"); features.add("boolean"); //On vérifie les valeurs présentes dans la liste features du fichier de paramètres : //On initialise tout à false : boolean feature_nombre = false; boolean feature_string = false; boolean feature_boolean = false; //number : Si l'utilisateur veut les quantiles if (isIn("number",features)){ feature_nombre = true; } //string : Si l'utilisateur les modalités les plus présentes if (isIn("string",features)){ feature_string=true; } //boolean : Si l'utilisateur veut les booleans les plus présents if (isIn("boolean",features)){ feature_boolean=true; } //Initialisation du chemin pour créer le dossier txt: String path = "/home/nicolasd/Bureau/OpenCompare_data/data"; String path_txt = "/home/nicolasd/Bureau/OpenCompare_data/txt"; File theDir = new File(path_txt); // if the directory does not exist, create it if (!theDir.exists()) { System.out.println("creating directory txt :"); boolean result = false; try{ theDir.mkdir(); result = true; } catch(SecurityException se){ //handle it } if(result) { System.out.println("DIR created"); } } File pcmFile = new File(path); File[] filesInDir = pcmFile.listFiles(); PCMLoader loader = new KMFJSONLoader(); for (File f : filesInDir) { List<PCMContainer> pcmContainers = loader.load(f); //Récupération du nom de fichier String name = f.getName(); name = name.substring(0,name.length()-4); //Création des fichiers dans lesquels on stocke les faits File file = new File(path_txt + "/" + name + ".txt"); file.createNewFile(); BufferedWriter writer = new BufferedWriter(new FileWriter(file)); for (PCMContainer pcmContainer : pcmContainers ) { System.out.println("--------------------------------------"); writer.write("--------------------------------------"); writer.newLine(); System.out.println("Fichier : "+f.getName()); writer.write("Fichier : "+f.getName()); writer.newLine(); System.out.println("--------------------------------------"); writer.write("--------------------------------------"); System.out.println("\n\n"); writer.newLine(); // Get the PCM PCM pcm = pcmContainer.getPcm(); ArrayList<String> facts = new ArrayList<String>(); // On parcourt les colonnes for (Feature feature : pcm.getConcreteFeatures()) { //On repère le nom de la colonne String nom_variable = feature.getName(); //On initialise des booleens pour pouvoir connaitre le type des variables boolean nombre = true; boolean chaine_caract = true; boolean bool = true; ArrayList<Cell> colonne = new ArrayList<Cell>(); // On parcourt les lignes for (Product product : pcm.getProducts()) { // Find the cell corresponding to the current feature and product Cell cell = product.findCell(feature); Value interp = cell.getInterpretation(); //On vérifie le type de la colonne if (!(interp instanceof IntegerValue || interp instanceof RealValue)){ nombre=false; } if (!(interp instanceof StringValue)){ chaine_caract=false; } if (!(interp instanceof BooleanValue)){ bool=false; } //On stocke les variables dans une liste colonne.add(cell); } // Si la clonne contient des entiers, on fait un calcul des quantiles si l'utilisateur l'a demandé if (nombre & feature_nombre){ //On stocke le contenu de la colonne dans un tableau de Doubles double[] valeurs = new double[colonne.size()]; for (int i=0;i<colonne.size();i++){ if(colonne.get(i).getInterpretation() instanceof IntegerValue){ valeurs[i]= Integer.parseInt(colonne.get(i).getContent()); } else if(colonne.get(i).getInterpretation() instanceof RealValue){ valeurs[i]= Double.parseDouble(colonne.get(i).getContent()); } else{ System.out.println("Problème de type"); writer.write("Problème de type"); writer.newLine(); } } double quant = quantile(valeurs,threshold); if (!(quant==-1)){ String fact = "Plus de "+(threshold*100)+"% des produits ont une valeur de " +nom_variable+" supérieure à "+quant; //On vérifie si le nombre max de faits est atteint if(facts.size()>=maxFacts){ //System.out.println("Le nombre de faits max pour la matrice est atteint."); } else{ facts.add(fact); } } } //Si la colonne contient des chaînes de caractères, on fait un compte par catégorie si l'utilisateur l'a demandé if(chaine_caract & feature_string){ //On stocke le contenu de la colonne dans un tableau de Strings ArrayList<String> valeurs = new ArrayList<String>(); for (int i=0;i<colonne.size();i++){ valeurs.add(colonne.get(i).getContent()); } ArrayList<String> categoriePrincipale = categorie(valeurs,threshold); if (categoriePrincipale.size() != 0){ String fact = "Plus de "+(threshold*100)+"% des produits ont une valeur de " +nom_variable+" égale à "; fact+= categoriePrincipale.get(0); if(categoriePrincipale.size()>1){ for (int i=1;i<categoriePrincipale.size()-1;i++){ fact+= ", " +categoriePrincipale.get(i); } fact+= " et "+categoriePrincipale.get(categoriePrincipale.size()-1); } //On vérifie si le nombre max de faits est atteint if(facts.size()>=maxFacts){ //System.out.println("Le nombre de faits max pour la matrice est atteint."); } else{ facts.add(fact); } } } // Si la clonne contient des booleens, on regarde le pourcentae de true et false if (bool & feature_boolean){ //On stocke le contenu de la colonne dans un tableau de booleens boolean[] valeurs = new boolean[colonne.size()]; for (int i=0;i<colonne.size();i++){ if(colonne.get(i).getInterpretation() instanceof BooleanValue){ valeurs[i]= Boolean.parseBoolean(colonne.get(i).getContent()); } else{ System.out.println("Problème de type"); writer.write("Problème de type"); writer.newLine(); } } String bool_value = pourcentage(valeurs,threshold); if (!(bool_value.equals(""))){ String fact = "Plus de "+(threshold*100)+"% des produits ont une valeur de " +nom_variable+" égale à "+bool_value; //On vérifie si le nombre max de faits est atteint if(facts.size()>=maxFacts){ //System.out.println("Le nombre de faits max pour la matrice est atteint."); } else{ facts.add(fact); } } } } if(facts.size()!=0){ for (int i=0;i<facts.size();i++){ System.out.println(facts.get(i)); writer.write(facts.get(i)); writer.newLine(); } } else{ System.out.println("La matrice ne contient pas de fait intéressant d'après les features demandées."); writer.write("La matrice ne contient pas de fait intéressant d'après les features demandées."); writer.newLine(); } System.out.println("\n\n"); writer.newLine(); writer.newLine(); } writer.close(); } } public static double quantile(double[] values, double threshold) { double res = -1; //On vérifie que la matrice a une ligne if (!(values == null || values.length == 0)) { // Tri des valeurs et calcul du quantile double[] v = new double[values.length]; System.arraycopy(values, 0, v, 0, values.length); Arrays.sort(v); int n = (int) Math.round(v.length * threshold / 100); res = v[n]; } return res; } public static ArrayList<String> categorie(ArrayList<String> values, double threshold) { ArrayList<String> res = new ArrayList<String>(); if (!(values == null || values.size() == 0)) { //On récupère toutes les différentes modalités HashSet<String> uniqueValues = new HashSet<String>(values); //On transforme le set en liste List<String> modalites = new ArrayList<>(uniqueValues); //On initialise un tableau vide qui contiendra les occurences de chaque modalité double[] occurences = new double[modalites.size()]; for (int i=0;i<occurences.length;i++){ occurences[i]=0; } //On parcourt chaque valeur de la colonne for (int i=0;i<values.size();i++){ //On la stocke dans une valeur temporaire String temp = values.get(i); //On incrémente la modalité reliée for (int j=0;j<modalites.size();j++) { if(temp.equals(modalites.get(j))){ occurences[j]++; } } } //On regarde si une modalité est présente dans plus de threshold % des cas for (int k=0;k<occurences.length;k++){ double percent = occurences[k]/values.size(); if(percent>=threshold){ //Si la modalité est une chaine vide, on ne la prend pas en compte if(!(modalites.get(k).equals(""))){ res.add(modalites.get(k)); } } } } return res; } public static String pourcentage(boolean[] values, double threshold) { String res = ""; //On vérifie que la matrice a une ligne if (!(values == null || values.length == 0)) { double nb_bool=0; //On parcours le tableau de booleans et on calcule le pourcentage de true et false for (int i=0;i<values.length;i++){ if(values[i]){ nb_bool++; } } //On regarde la valeur du pourcentage pour regarder la valeur à retourner double percent = nb_bool/values.length; if(percent>=threshold){ res="True"; } else{ res="False"; } } return res; } //Fonction qui vérifie si une chaine de caractères est présente dans une liste : public boolean isIn(String element, ArrayList<String> liste){ boolean res=false; for (int i=0;i<liste.size();i++){ if(element.equals(liste.get(i))){ res=true; } } return res; } }
jbqueyrie/TPOpenCompare
src/test/java/org/opencompare/StatisticsPlusPlusTest.java
Java
apache-2.0
11,353
package org.zstack.network.service.flat; import org.zstack.header.message.MessageReply; import org.zstack.header.network.l3.IpRangeInventory; import org.zstack.header.network.l3.UsedIpInventory; /** * Created by frank on 10/11/2015. */ public class FlatDhcpAcquireDhcpServerIpReply extends MessageReply { private String ip; private String netmask; private String usedIpUuid; private UsedIpInventory usedIp; private IpRangeInventory ipr; public String getNetmask() { return netmask; } public void setNetmask(String netmask) { this.netmask = netmask; } public String getIp() { return ip; } public void setIp(String ip) { this.ip = ip; } public String getUsedIpUuid() { return usedIpUuid; } public void setUsedIpUuid(String usedIpUuid) { this.usedIpUuid = usedIpUuid; } public UsedIpInventory getUsedIp() { return usedIp; } public void setUsedIp(UsedIpInventory usedIp) { this.usedIp = usedIp; } public IpRangeInventory getIpr() { return ipr; } public void setIpr(IpRangeInventory ipr) { this.ipr = ipr; } }
AlanJager/zstack
plugin/flatNetworkProvider/src/main/java/org/zstack/network/service/flat/FlatDhcpAcquireDhcpServerIpReply.java
Java
apache-2.0
1,199
// -------------------------------------------------------------------------------- // Copyright (c) 2014, XLR8 Development // -------------------------------------------------------------------------------- // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // -------------------------------------------------------------------------------- using System; using System.Collections.Generic; using System.Linq; using System.Net.Mime; using System.Text; using System.Text.RegularExpressions; using Antlr4.Runtime; using Antlr4.Runtime.Tree; using Common.Logging; using Common.Logging.Configuration; namespace tsql2pgsql.visitors { using antlr; using collections; using grammar; using pipeline; internal class PgsqlConverter : PipelineVisitor { /// <summary> /// Logger for instance /// </summary> private static readonly ILog Log = LogManager.GetCurrentClassLogger(); /// <summary> /// Map of all variables. /// </summary> private IDictionary<string, TSQLParser.VariableDeclarationContext> Variables; private string _returnType; /// <summary> /// An index that indicates the line after which the declare block should be inserted. /// </summary> private int _declareBlockAfter; /// <summary> /// Defines the string that will be used to replace the '@' in front of parameters. /// </summary> public string ParameterPrefix { get; set; } /// <summary> /// Defines the string that will be used to replace the '@' in front of variables. /// </summary> public string VariablePrefix { get; set; } /// <summary> /// Gets or sets the parameters. /// </summary> /// <value> /// The parameters. /// </value> public ISet<string> Parameters { get; set; } /// <summary> /// Gets or sets the capitalization style. /// </summary> /// <value> /// The capitalization style. /// </value> public CapitalizationStyle CapitalizationStyle { get; set; } /// <summary> /// Gets the basic function map table. /// </summary> /// <value> /// The basic function map table. /// </value> public IDictionary<string, string> BasicFunctionMapTable { get; private set; } /// <summary> /// Gets the advanced function map table. /// </summary> /// <value> /// The advanced function map table. /// </value> public IDictionary<string, Func<string, string>> AdvancedFunctionMapTable { get; private set; } /// <summary> /// Provides us with an indication that we have used a smart record. /// </summary> private bool _useSmartRecord; /// <summary> /// Creates a pgsql converter. /// </summary> public PgsqlConverter() : base(false) { _useSmartRecord = false; CapitalizationStyle = CapitalizationStyle.PascalCase; Parameters = new HashSet<string>(); ParameterPrefix = "_p"; Variables = new Dictionary<string, TSQLParser.VariableDeclarationContext>(); VariablePrefix = "_v"; // basic function mapping BasicFunctionMapTable = new Dictionary<string, string>(); BasicFunctionMapTable["getdate"] = "utcnow"; BasicFunctionMapTable["scope_identity"] = "lastval"; BasicFunctionMapTable["error_number"] = "SQLSTATE"; BasicFunctionMapTable["error_message"] = "SQLERRM"; // advanced function mapping AdvancedFunctionMapTable = new Dictionary<string, Func<string, string>>(); } /// <summary> /// Visits the specified pipeline. /// </summary> /// <param name="pipeline">The pipeline.</param> /// <returns></returns> public override PipelineResult Visit(Pipeline pipeline) { base.Visit(pipeline.ParseTree); return new PipelineResult { RebuildPipeline = false, Contents = GetContent() }; } /// <summary> /// Gets the refined and filtered content for the procedure. /// </summary> /// <returns></returns> private IEnumerable<string> GetContent() { var prevLine = string.Empty; foreach (var line in GetRawContent()) { var trimLine = line.TrimEnd(); while (trimLine.EndsWith(";;")) trimLine = trimLine.Substring(0, trimLine.Length - 1); if (trimLine.TrimStart() == ";") trimLine = string.Empty; if (trimLine != string.Empty || prevLine != string.Empty) yield return trimLine; prevLine = trimLine; } } /// <summary> /// Gets the unfiltered raw content for the procedure. /// </summary> /// <returns></returns> private IEnumerable<string> GetRawContent() { var contents = Pipeline.Contents; foreach (var line in contents.Take(_declareBlockAfter)) yield return line; foreach (var line in GetDeclareBlock()) yield return line; var bodyLines = string.Join("\n", contents.Skip(_declareBlockAfter)).Split('\n'); // next line should be the "BEGIN" token for the block yield return bodyLines[0]; foreach (var line in bodyLines.Skip(1).Take(bodyLines.Length - 2).Select(l => "\t" + l)) yield return line; // next line should be the "END" token for the block yield return bodyLines[bodyLines.Length - 1]; yield return "$$ LANGUAGE plpgsql"; } /// <summary> /// Parses the embedded parameter list. /// </summary> /// <param name="content">The content.</param> /// <returns></returns> private TSQLParser.EmbeddedParameterListContext EmbeddedParameterList(string content) { var stream = new CaseInsensitiveStream(content); var lexer = new TSQLLexer(stream); var parser = new TSQLParser(new CommonTokenStream(lexer)); return parser.embeddedParameterList(); } /// <summary> /// Ports the type. /// </summary> /// <param name="typeName">Name of the type.</param> /// <returns></returns> private string PortDataType(string typeName) { switch (typeName.ToLowerInvariant()) { case "bit": return "boolean"; case "date": case "datetime": case "smalldatetime": return "date"; case "sysname": return "text"; } return typeName; } /// <summary> /// Ports the type of the data. /// </summary> /// <param name="context">The context.</param> /// <returns></returns> private string PortDataType(TSQLParser.NumericTypeContext context) { return context.GetText(); } /// <summary> /// Ports the type of the data. /// </summary> /// <param name="context">The context.</param> /// <returns></returns> private string PortDataType(TSQLParser.CharacterStringTypeContext context) { var characterStringTypeLength = context.characterStringTypeLength(); if (characterStringTypeLength != null) { if (characterStringTypeLength.MAX() != null) { return "text"; } if (context.NVARCHAR() != null) return string.Format("varchar{0}", characterStringTypeLength.GetText()); if (context.NCHAR() != null) return string.Format("char{0}", characterStringTypeLength.GetText()); } else { if (context.NVARCHAR() != null) return "varchar"; if (context.NCHAR() != null) return "char"; } return context.GetText(); } /// <summary> /// Ports the type. /// </summary> /// <param name="context">The context.</param> /// <returns></returns> private string PortDataType(TSQLParser.TypeContext context) { if (context.integerType() != null) return PortDataType(context.integerType().GetText()); if (context.typeInBracket() != null) return PortDataType(context.typeInBracket().type()); if (context.XML() != null) return PortDataType(context.GetText()); if (context.CURSOR() != null) return PortDataType(context.GetText()); if (context.qualifiedName() != null) return PortDataType(context.GetText()); if (context.numericType() != null) return PortDataType(context.numericType()); if (context.characterStringType() != null) return PortDataType(context.characterStringType()); throw new ArgumentException("you used something we didnt plan on"); } /// <summary> /// Capitalizes to style. /// </summary> /// <param name="value">The value.</param> private string Capitalize(string value) { switch (CapitalizationStyle) { case CapitalizationStyle.None: return value; case CapitalizationStyle.PascalCase: return Char.ToUpperInvariant(value[0]) + value.Substring(1); case CapitalizationStyle.CamelCase: return Char.ToLowerInvariant(value[0]) + value.Substring(1); } return value; } /// <summary> /// Ports the name of the variable. /// </summary> /// <param name="variableName">Name of the variable.</param> /// <returns></returns> private string PortVariableName(string variableName) { if (variableName[0] == '@') { return Parameters.Contains(variableName) ? ParameterPrefix + Capitalize(variableName.Substring(1)) : VariablePrefix + Capitalize(variableName.Substring(1)) ; } return variableName; } /// <summary> /// Ports the name of a table. /// </summary> /// <param name="tableName">Name of the table.</param> /// <returns></returns> private string PortTableName(string tableName) { if (tableName.StartsWith("#tmp")) return "_tmp" + tableName.Substring(4); if (tableName.StartsWith("#")) return "_tmp" + tableName.Substring(1); return tableName; } /// <summary> /// Gets the declare block /// </summary> /// <returns></returns> private IEnumerable<string> GetDeclareBlock() { if (Variables.Values.Count > 0) { yield return "DECLARE"; foreach (var variable in GetVariables()) { var pgsqlDeclaration = new StringBuilder(); pgsqlDeclaration.Append('\t'); pgsqlDeclaration.Append(variable.A); pgsqlDeclaration.Append(' '); pgsqlDeclaration.Append(variable.B); pgsqlDeclaration.Append(';'); yield return pgsqlDeclaration.ToString(); } } } public IEnumerable<Pair<string, string>> GetVariables() { foreach (var variableDeclarationContext in Variables.Values) { if (variableDeclarationContext.type() != null) { yield return new Pair<string, string>( PortVariableName(variableDeclarationContext.variable().GetText()), PortDataType(variableDeclarationContext.type())); } } if (_useSmartRecord) { yield return new Pair<string, string>( "_vSmartRecord", "RECORD"); } } /// <summary> /// Finds the statement context. /// </summary> /// <param name="parseTree">The parse tree.</param> /// <returns></returns> public TSQLParser.StatementContext GetStatementContext(IParseTree parseTree) { return parseTree.FindParent<TSQLParser.StatementContext>(); } /// <summary> /// Removes the statement. /// </summary> /// <param name="parseTree">The parse tree.</param> public void RemoveStatement(IParseTree parseTree) { var statementContext = GetStatementContext(parseTree); if (statementContext != null) { RemoveLeaves(statementContext); } } /// <summary> /// Visits the variable declaration. /// </summary> /// <param name="context">The context.</param> /// <returns></returns> public override object VisitVariableDeclaration(TSQLParser.VariableDeclarationContext context) { Variables[context.variable().Unwrap()] = context; if (context.TABLE() != null) { var parentContext = (TSQLParser.DeclareStatementContext)context.Parent; ReplaceToken(parentContext.DECLARE(), "CREATE TEMPORARY TABLE"); Remove(context.TABLE()); } else { var assignment = context.variableDeclarationAssignment(); if (assignment != null) { Console.WriteLine("assignment"); var assignmentExpression = assignment.expression(); if (assignmentExpression != null) { // convert the statement into an assignment ... all variable declarations should be // single line by the time they get to this point. this allows us to go up to the // parent and remove the unnecessary parts var parentContext = (TSQLParser.DeclareStatementContext) context.Parent; Remove(parentContext.DECLARE()); InsertAfter(context.variable(), " := ", false); } } else { Log.DebugFormat("VisitVariableDeclaration: Removing declaration {0}", context); RemoveStatement(context); // no further processing to be performed return null; } } //else //{ // RemoveStatement(context); //} return base.VisitVariableDeclaration(context); } /// <summary> /// Visit a parse tree produced by <see cref="TSQLParser.variable" />. /// </summary> /// <param name="context">The parse tree.</param> /// <returns></returns> /// <return>The visitor result.</return> public override object VisitVariable(TSQLParser.VariableContext context) { var variableName = context.GetText(); if (variableName.StartsWith("@@")) { } else if (ConfirmConsistency(context)) { ReplaceText( context.Start.Line, context.Start.Column, context.GetText(), PortVariableName(variableName), false); } return base.VisitVariable(context); } /// <summary> /// Called when we encounter a type that has been quoted according to TSQL convention. /// </summary> /// <param name="context"></param> /// <returns></returns> public override object VisitTypeInBracket(TSQLParser.TypeInBracketContext context) { return base.VisitTypeInBracket(context); } /// <summary> /// Called when we encounter a name part. Since nameparts often contain quotation symbology specific to /// TSQL, we need to convert it to PL/PGSQL friendly notation. /// </summary> /// <param name="context"></param> /// <returns></returns> public override object VisitQualifiedNamePart(TSQLParser.QualifiedNamePartContext context) { var identifierTree = context.Identifier(); var identifier = identifierTree.GetText().Trim(); if ((identifier.Length > 2) && (identifier[0] == '[') && (identifier[identifier.Length - 1] == ']')) { identifier = identifier.Substring(1, identifier.Length - 2); if (!Regex.IsMatch(identifier, "^[a-zA-Z][a-zA-Z0-9_]*")) { identifier = string.Format("\"{0}\"", identifier); } ReplaceToken(identifierTree.Symbol, identifier); } return base.VisitQualifiedNamePart(context); } #region type /// <summary> /// Visits the type. /// </summary> /// <param name="context">The context.</param> /// <returns></returns> public override object VisitType(TSQLParser.TypeContext context) { // some of the internal types are going to get modified ... if (context.qualifiedName() != null) { var name = context.qualifiedName(); if (ConfirmConsistency(name)) { var nameText = name.Unwrap(); var nameTextNew = PortDataType(nameText); if (nameText != nameTextNew) { ReplaceText( name.Start.Line, name.Start.Column, nameText.Length, nameTextNew); } } } return base.VisitType(context); } /// <summary> /// Visit a parse tree produced by <see cref="TSQLParser.characterStringType" />. /// </summary> /// <param name="context">The parse tree.</param> /// <returns></returns> /// <return>The visitor result.</return> public override object VisitCharacterStringType(TSQLParser.CharacterStringTypeContext context) { var characterStringTypeLength = context.characterStringTypeLength(); if (characterStringTypeLength != null) { if (characterStringTypeLength.MAX() != null) { Replace(context, "text"); return null; } } if (context.NVARCHAR() != null) ReplaceToken(context.NVARCHAR(), "varchar"); else if (context.NCHAR() != null) ReplaceToken(context.NCHAR(), "char"); return base.VisitCharacterStringType(context); } #endregion #region variable assignment /// <summary> /// Visits the set variable assignment. /// </summary> /// <param name="context">The context.</param> /// <returns></returns> public override object VisitSetVariableAssignment(TSQLParser.SetVariableAssignmentContext context) { var setContext = (TSQLParser.SetStatementContext) context.Parent; var setAssignment = context.assignmentOperator(); // see if the target expression is using @@ROWCOUNT as this requires use of the // get diagnostics function if (IsUsingRowCount(context.expression())) { var expression = context.expression(); if (expression.primary() != null && expression.primary().variable() != null) { // this means that the expression is a direct assignment from the @@ROWCOUNT - this // can be translated directly into a GET DIAGNOSTIC call rather than needing an // intermediary ReplaceToken(setContext.SET(), "GET DIAGNOSTICS"); Replace(expression.primary().variable(), "ROW_COUNT"); } } else { Remove(setContext.SET()); var tokEquals = setAssignment.GetToken(TSQLParser.EQUALS, 0); if (tokEquals != null) { ReplaceToken(tokEquals, ":=", false); } } return base.VisitSetVariableAssignment(context); } /// <summary> /// Determines whether the parse tree is using @@ROWCOUNT /// </summary> /// <param name="parseTree">The parse tree.</param> /// <returns></returns> private static bool IsUsingRowCount(IParseTree parseTree) { if (parseTree is TSQLParser.VariableContext) { var variableContext = (TSQLParser.VariableContext) parseTree; if (variableContext.IsRowCountVariable()) return true; } else if (parseTree is ITerminalNode) { return false; // @@ROWCOUNT is not a terminal } else { for (int ii = 0; ii < parseTree.ChildCount; ii++) { if (IsUsingRowCount(parseTree.GetChild(ii))) { return true; } } } return false; } /// <summary> /// Visit a parse tree produced by <see cref="TSQLParser.insertStatement" />. /// </summary> /// <param name="context">The parse tree.</param> /// <returns></returns> /// <return>The visitor result.</return> public override object VisitInsertStatement(TSQLParser.InsertStatementContext context) { // there is a case, where T-SQL can use an insert with an OUTPUT clause. to my // knowledge, there is no exact equivalent in PGSQL, however, the RETURNING keyword // should be more than adequate for providing equivalent behavior. The catch is that // the RETURNING clause returns the value to the caller which can then insert // that data into a table if that's what desired. var insertOutputClause = context.insertOutputClause(); if (insertOutputClause != null) { if (insertOutputClause.INTO() != null) { var indentation = GetIndentationFor(context); var selectListText = GetTextFor(insertOutputClause.selectList()); var targetTable = insertOutputClause.tableTarget(); var targetColumns = insertOutputClause.qualifiedColumnNameList(); var targetName = targetTable.variable() != null ? PortVariableName(targetTable.variable().Unwrap()) : targetTable.tempTable() != null ? PortTableName(targetTable.tempTable().Unwrap()) : targetTable.Unwrap(); var returningText = string.Format("\n{0}\tRETURNING {1}", indentation, selectListText); var insertText = string.Format("\n{0}) INSERT INTO {1} SELECT * FROM _tempContext", indentation, targetName); if (targetColumns != null) insertText = insertText + '(' + targetColumns + ')'; InsertBefore(context.insertPreamble(), "WITH _tempContext AS (\n" + indentation); InsertAfter(context.RightMostToken(), returningText, false); InsertAfter(context.RightMostToken(), insertText, false); IndentRegion(context.insertPreamble().Start, context.RightMostToken()); RemoveLeaves(insertOutputClause); } else { ReplaceToken(insertOutputClause.OUTPUT(), "RETURNING", false); } } return base.VisitInsertStatement(context); } /// <summary> /// Visit a parse tree produced by <see cref="TSQLParser.insertPreamble" />. /// </summary> /// <param name="context">The parse tree.</param> /// <returns></returns> /// <return>The visitor result.</return> public override object VisitInsertPreamble(TSQLParser.InsertPreambleContext context) { if (context.INTO() == null) { InsertAfter(context.INSERT(), " INTO ", false); } return base.VisitInsertPreamble(context); } /// <summary> /// Visits the transaction block. /// </summary> /// <param name="context">The context.</param> /// <returns></returns> public override object VisitTransactionBlock(TSQLParser.TransactionBlockContext context) { // PL/PGSQL functions are automatically enrolled into transactions RemoveToken(context.BEGIN(), false); RemoveToken(context.TRANSACTION(), false); return base.VisitTransactionBlock(context); } /// <summary> /// Visits the set session other. /// </summary> /// <param name="context">The context.</param> /// <returns></returns> public override object VisitSetSessionOther(TSQLParser.SetSessionOtherContext context) { if (context.TRANSACTION() != null) { // SET TRANSACTION is supported by PGSQL } else if (context.ROWCOUNT() != null) { RemoveStatement(context); } else if (!context.setSessionParameter().IsNullOrEmpty()) { var sessionParameterList = context.setSessionParameter(); if (sessionParameterList.Length == 1) { switch (sessionParameterList[0].GetText().ToLower()) { case "nocount": case "quoted_identifier": RemoveStatement(context); return null; } } } return base.VisitSetSessionOther(context); } #endregion /// <summary> /// Visit a parse tree produced by <see cref="TSQLParser.raiseError" />. /// </summary> /// <param name="context">The parse tree.</param> /// <returns></returns> /// <return>The visitor result.</return> public override object VisitRaiseError(TSQLParser.RaiseErrorContext context) { ReplaceToken(context.RAISE_ERROR().Symbol, "RAISE EXCEPTION "); if (context.LPAREN() != null) RemoveToken(context.LPAREN()); if (context.RPAREN() != null) RemoveToken(context.RPAREN()); if (context.argumentList() != null) { RaiseErrorCheckArguments(context.argumentList().argument()); } else if (context.argument() != null) { RaiseErrorCheckArguments(context.argument()); } return base.VisitRaiseError(context); } private void RaiseErrorCheckArguments(TSQLParser.ArgumentContext[] argumentList) { var argument0 = argumentList[0]; if (argument0.expression() != null && argument0.expression().primary() != null && argument0.expression().primary().literalValue() != null && argument0.expression().primary().literalValue().StringLiteral() != null) { // all this to determine if this is a valid argument0 } else { // argument0 cannot be a variable or other like item... var stringText = new StringBuilder("'"); for (int ii = 0; ii < argumentList.Length; ii++) stringText.Append('%'); stringText.Append("', "); InsertBefore(argument0, stringText.ToString(), false); } } /// <summary> /// Visit a parse tree produced by <see cref="TSQLParser.tryBlock" />. /// </summary> /// <param name="context">The parse tree.</param> /// <returns></returns> /// <return>The visitor result.</return> public override object VisitTryBlock(TSQLParser.TryBlockContext context) { RemoveToken(context.TRY(0).Symbol, false); RemoveToken(context.TRY(1).Symbol, false); RemoveToken(context.END(0).Symbol, false); RemoveToken(context.CATCH(0).Symbol, false); RemoveToken(context.CATCH(1).Symbol, false); // exception handling in PLPGSQL is exception specific, much like // try catch blocks in other languages. however, SQL Server provides // the error details in variables that are exposed to the exception // handler. ReplaceToken(context.BEGIN(1).Symbol, "EXCEPTION WHEN OTHERS THEN "); return base.VisitTryBlock(context); } /// <summary> /// Set of temp tables that have been declared. /// </summary> private readonly ISet<string> _declaredTempTables = new HashSet<string>(); /// <summary> /// Visit a parse tree produced by <see cref="TSQLParser.createTable" />. /// </summary> /// <param name="context">The parse tree.</param> /// <returns></returns> /// <return>The visitor result.</return> public override object VisitCreateTable(TSQLParser.CreateTableContext context) { if (context.tempTable() != null) { _declaredTempTables.Add(context.tempTable().Unwrap()); InsertAfter(context.CREATE(), " TEMPORARY", false); } return base.VisitCreateTable(context); } /// <summary> /// Visit a parse tree produced by <see cref="TSQLParser.columnDefinition" />. /// </summary> /// <param name="context">The parse tree.</param> /// <returns></returns> /// <return>The visitor result.</return> public override object VisitColumnDefinition(TSQLParser.ColumnDefinitionContext context) { var identity = context.identitySpec(); if (identity == null) { var type = context.type(); if (type != null && type.identityType() != null) { identity = type.identityType().identitySpec(); } if (identity != null) { Replace(type, "SERIAL"); } } return base.VisitColumnDefinition(context); } /// <summary> /// Visit a parse tree produced by <see cref="TSQLParser.createIndex" />. /// </summary> /// <param name="context">The parse tree.</param> /// <returns></returns> /// <return>The visitor result.</return> public override object VisitCreateIndex(TSQLParser.CreateIndexContext context) { if (context.clusterType() != null) { RemoveLeaves(context.clusterType()); } return base.VisitCreateIndex(context); } /// <summary> /// Visit a parse tree produced by <see cref="TSQLParser.tempTable" />. /// </summary> /// <param name="context">The parse tree.</param> /// <returns></returns> /// <return>The visitor result.</return> public override object VisitTempTable(TSQLParser.TempTableContext context) { // We need to determine which type of temporary table reference this is. // The first kind is the traditional #name or ##name. // The second kind is qualified by schema like app.#name // // We will only concern ourselves with the first kind since the second // kind will be handled in a recursive visit. var hash = context.HASH(); if (hash != null && hash.Length > 0) { var tmpTablePrefix = string.Join("", hash.Select(h => "#")); var tmpTableSuffix = string.Empty; if (context.qualifiedNamePart() != null) { tmpTableSuffix = context.qualifiedNamePart().Unwrap(); } else { tmpTableSuffix = context.keyword().Unwrap(); } if (ConfirmConsistency(context)) { ReplaceText( context.Start.Line, context.Start.Column, context.GetText(), PortTableName(tmpTablePrefix + tmpTableSuffix), true); } else { Console.WriteLine("FAILED: \"{0}\" | \"{1}\"", context.GetText(), GetTextFor(context)); } } return base.VisitTempTable(context); } #region create procedure /// <summary> /// Called when we encounter "CREATE PROCEDURE" /// </summary> /// <param name="context"></param> /// <returns></returns> public override object VisitCreateProcedure(TSQLParser.CreateProcedureContext context) { ReplaceToken(context.CREATE(), "CREATE OR REPLACE"); ReplaceToken(context.PROCEDURE(), "FUNCTION"); // we need to add a return value for this function... assuming there is one, is there // any way that we can introspect the rest of the file to determine the return type? // in the absence of a return type, we're returning SETOF RECORD var returnTypeVisitor = new ReturnTypeVisitor(); var returnType = returnTypeVisitor.Visit(context.procedureBody()); ReplaceToken(context.AS(), string.Format("RETURNS {0} AS\n$$", returnType)); _declareBlockAfter = context.AS().Symbol.Line; return base.VisitCreateProcedure(context); } /// <summary> /// A set of initial values to ignore. /// </summary> private readonly ISet<TSQLParser.ProcedureParameterInitialValueContext> _initialValuesToIgnore = new HashSet<TSQLParser.ProcedureParameterInitialValueContext>(); /// <summary> /// Visit a parse tree produced by <see cref="TSQLParser.procedureParameters" />. /// </summary> /// <param name="context">The parse tree.</param> /// <returns></returns> /// <return>The visitor result.</return> public override object VisitProcedureParameters(TSQLParser.ProcedureParametersContext context) { // PGSQL does not allow non-defaulted parameters after a defaulted parameter. Therefore, // if we detect a non-default parameter in one of the children, then any defaulted // parameters that occur before that one will have their default values stripped. var procedureParameters = context.procedureParameter(); // Find the last index that contains a non-defaulted parameter var lastNonDefaultParameter = Array.FindLastIndex(procedureParameters, p => p.procedureParameterInitialValue() == null); if (lastNonDefaultParameter != -1) { for (int ii = 0; ii < lastNonDefaultParameter; ii++) { var procedureParam = procedureParameters[ii]; var procedureParamInitial = procedureParam.procedureParameterInitialValue(); if (procedureParamInitial != null) { _initialValuesToIgnore.Add(procedureParamInitial); RemoveLeaves(procedureParamInitial); } } } return base.VisitProcedureParameters(context); } /// <summary> /// Visits the procedure parameter. /// </summary> /// <param name="context">The context.</param> /// <returns></returns> public override object VisitProcedureParameter(TSQLParser.ProcedureParameterContext context) { if (context != null) { Parameters.Add(context.procedureParameterName().variable().Unwrap()); var procedureParameterInitialValue = context.procedureParameterInitialValue(); if (procedureParameterInitialValue != null && !_initialValuesToIgnore.Contains(procedureParameterInitialValue)) { switch(context.type().Unwrap().ToLowerInvariant()) { case "bit": // we know that bits are converted into booleans and that the // default values are not portable as a result. convert the // bit value to a boolean value. if (procedureParameterInitialValue.literalValue() != null && procedureParameterInitialValue.literalValue().integerValue() != null) { var integerValue = procedureParameterInitialValue.literalValue().integerValue(); var integerValueText = integerValue.GetText().Replace("(", "").Replace(")", ""); var integerValueValue = Int32.Parse(integerValueText); Replace(procedureParameterInitialValue.literalValue(), integerValueValue == 1 ? "TRUE" : "FALSE"); } break; } } var outputToken = context.OUT() ?? context.OUTPUT(); if (outputToken != null) { InsertBefore(context, "OUT ", false); RemoveToken(outputToken.Symbol, false); } } return base.VisitProcedureParameter(context); } #endregion /// <summary> /// Visits the execute statement. /// </summary> /// <param name="context">The context.</param> /// <returns></returns> public override object VisitExecuteStatement(TSQLParser.ExecuteStatementContext context) { var result = base.VisitExecuteStatement(context); if (context.qualifiedName() != null) { // this appears to be a function call... var functionName = context.qualifiedName().Unwrap().ToLowerInvariant(); if (functionName == "sp_executesql") { // sp_executesql is special because we're being given a "command" to run and probably a // host of out parameters. However, out parameters are different in pgsql than from tsql // instead, they are returned to the caller in the resulting rows. As such, we need to // use EXECUTE instead of PERFORM Replace(context.EXECUTE(), "EXECUTE", false); RemoveLeaves(context.qualifiedName()); var sargs = context.executeArgumentList().executeArgument(); // @stmt is arg[0] var pstmt = sargs[0]; // @params is args[1] - if it exists at all var pparams = sargs.Length > 1 ? sargs[1] : null; if (pparams != null) { // alright, we no longer care about this argument - not relevant to PGSQL execute, // so go ahead and remove it RemoveLeaves(pparams); RemoveToken(context.executeArgumentList().GetToken(TSQLParser.COMMA, 0)); } // @param1+ are args[2+] and must match any parameters that are visible in stmt and // subsequently declared in @params. depending on direction, these may or may // not need to be passed along. out parameters are not translated as they are // returned and must be part of the "INTO" conversion. var usingDelimiter = ""; var usingBlock = new StringBuilder(""); var intoBlock = false; var pparamValues = sargs.Skip(2).ToArray(); // each argument must be rewritten in pstmt in addition to being added to a special // using block at the end of the execute call for (int ii = 0; ii < pparamValues.Length; ii++) { var pparamValue = pparamValues[ii]; if (pparamValue.OUT() != null && pparamValue.OUTPUT() != null) { usingBlock.Append(usingDelimiter); usingBlock.Append(pparamValue.variable().Unwrap()); usingDelimiter = ","; } else { // output parameters need to be removed ... at the same time they need to function // as part of the INTO portion of the return intoBlock = _useSmartRecord = true; RemoveBetween( context.executeArgumentList().GetToken(TSQLParser.COMMA, 1 + ii).Symbol, pparamValue.Stop); } } var insertAfter = intoBlock ? " INTO _vSmartRecord" : ""; if (usingBlock.Length > 0) { insertAfter += " USING " + usingBlock; } if (! string.IsNullOrWhiteSpace(insertAfter)) { InsertAfter(pparams, insertAfter, false); } } else { Replace(context.EXECUTE(), "PERFORM", false); if (context.executeArgumentList() != null) { InsertBefore(context.executeArgumentList(), "("); InsertAfter(context.executeArgumentList(), ")"); // the argument list in T-SQL is a stream of named arguments... in // pgsql, the arguments are unnamed and assumed to positional. this // makes ensuring soundness more difficult as we actually need the // order of positional arguments for the given stored procedure. // for the time-being, we assume that the named order matches the positional // order... this is a *bad* assumption but we will need to get the positional // argument order in order to make the magic happen. foreach (var executeArgument in context.executeArgumentList().executeArgument()) { if (executeArgument.EQUALS() != null) { RemoveLeaves(executeArgument.variable()); RemoveToken(executeArgument.EQUALS()); } } } else { InsertAfter(context.qualifiedName(), "()"); } } } else { Replace(context.EXECUTE(), "PERFORM", false); } return result; } /// <summary> /// Visit a parse tree produced by <see cref="TSQLParser.ifStatement" />. /// </summary> /// <param name="context">The parse tree.</param> /// <returns></returns> /// <return>The visitor result.</return> public override object VisitIfStatement(TSQLParser.IfStatementContext context) { var result = base.VisitIfStatement(context); // lets find out what our indentation looks like.. sucks, but we like to // ensure consistent indentation on the line. var indentation = GetIndentationFor(context.IF()); var endIfText = string.Format("\n{0}{1}", indentation, "END IF"); var thenText = string.Format("{1}\n{0}\t", indentation, " THEN"); InsertAfter(context.predicateList(), thenText, false); var topStatement = context.statement(0); if (topStatement.BEGIN() != null) { RemoveToken(topStatement.BEGIN()); RemoveToken(topStatement.END()); } var botStatement = context.statement(1); if (botStatement != null && botStatement.BEGIN() != null) { RemoveToken(botStatement.BEGIN()); RemoveToken(botStatement.END()); } if (context.ELSE() == null) { InsertAfter(context.statement(0), endIfText, false); } else { InsertAfter(context.statement(1), endIfText, false); } return result; } /// <summary> /// Visits the convert expression. /// </summary> /// <param name="context">The context.</param> /// <returns></returns> public override object VisitConvertExpression(TSQLParser.ConvertExpressionContext context) { var result = base.VisitConvertExpression(context); // CONVERT is a SQL Server specific conversion // CAST is an ANSI-SQL conversion. if (context.integerValue() == null) { var expressionText = GetTextFor(context.expression()); var typeText = GetTextFor(context.type()); var newText = expressionText + " AS " + typeText; ReplaceToken(context.CONVERT(), "CAST"); RemoveBetween(context.LPAREN().Symbol, context.RPAREN().Symbol); InsertAfter(context.LPAREN().Symbol, newText); } return result; } /// <summary> /// Visit a parse tree produced by <see cref="TSQLParser.functionCall" />. /// </summary> /// <param name="context">The parse tree.</param> /// <returns></returns> /// <return>The visitor result.</return> public override object VisitFunctionCall(TSQLParser.FunctionCallContext context) { var functionName = context.functionName().Unwrap().ToLowerInvariant(); var functionArgs = context.argumentList(); // use a lookup table to determine how we map from T-SQL functions to PL/PGSQL functions string remapFunctionName; if (BasicFunctionMapTable.TryGetValue(functionName, out remapFunctionName)) { ReplaceText( context.functionName().Start.Line, context.functionName().Start.Column, context.functionName().GetText(), // for soundness remapFunctionName); } return base.VisitFunctionCall(context); } /// <summary> /// Visits the additive expression. /// </summary> /// <param name="context">The context.</param> /// <returns></returns> public override object VisitAdditiveExpression(TSQLParser.AdditiveExpressionContext context) { // check additive expressions ... if they contain string literals, then the additive form must be // modified to use the '||' operator rather than the '+' operator. if (context.additiveExpression() != null && context.GetToken(TSQLParser.PLUS, 0) != null) { var expressionTypeVisitor = new ExpressionTypeVisitor(Variables); var expressionType = expressionTypeVisitor.VisitAdditiveExpression(context); if (expressionType == typeof(string)) { ReplaceToken(context.GetToken(TSQLParser.PLUS, 0), "||", false); } } return base.VisitAdditiveExpression(context); } /// <summary> /// Visits the print expression. /// </summary> /// <param name="context">The context.</param> /// <returns></returns> public override object VisitPrintExpression(TSQLParser.PrintExpressionContext context) { ReplaceToken(context.PRINT(), "RAISE DEBUG '%',"); return base.VisitPrintExpression(context); } /// <summary> /// Visit a parse tree produced by <see cref="TSQLParser.selectStatementPart" />. /// </summary> /// <param name="context">The parse tree.</param> /// <returns></returns> /// <return>The visitor result.</return> public override object VisitSelectStatementPart(TSQLParser.SelectStatementPartContext context) { var result = base.VisitSelectStatementPart(context); if (context.selectTopLimit() != null) { // limits under MS-SQL support a few options that dont translate well into ANSI // limits ... look for percent as this doesnt translate var topLimit = context.selectTopLimit(); if (topLimit.PERCENT != null) { throw new ArgumentException("unable to handle percent based limits"); } var limitLevel = topLimit.integerValue() != null ? topLimit.integerValue().GetText() : topLimit.variable().GetText(); var limitText = string.Format(" LIMIT {0}", limitLevel); RemoveToken(topLimit.TOP()); Remove(topLimit.integerValue()); Remove(topLimit.GetToken(TSQLParser.LPAREN, 1)); Remove(topLimit.variable()); Remove(topLimit.GetToken(TSQLParser.RPAREN, 1)); InsertAfter(context.RightMostToken(), limitText, false); } if (context.selectList() != null) { // Look select statements that are saving state into a variable. These are typically // queries with a single-row result. There must only be one assignment in the entire // block for this to work. var selectListElements = context.selectList().selectListElement(); if (selectListElements.Count(s => s.variable() != null) == 1) { foreach (var selectListElement in selectListElements) { var selectListVariable = selectListElement.variable(); if (selectListVariable != null) { // this needs to be moved into the "INTO" section of the query var insertText = string.Format(" INTO {0} ", PortVariableName(selectListVariable.Unwrap())); Remove(selectListVariable); RemoveToken(selectListElement.op); InsertAfter(context.selectList(), insertText, false); } } } } return result; } public override object VisitCaseExpression(TSQLParser.CaseExpressionContext context) { return base.VisitCaseExpression(context); } /// <summary> /// Visit a parse tree produced by <see cref="TSQLParser.tableTarget" />. /// </summary> /// <param name="context">The parse tree.</param> /// <returns></returns> /// <return>The visitor result.</return> public override object VisitTableTarget(TSQLParser.TableTargetContext context) { var result = base.VisitTableTarget(context); if (context.tempTable() != null) { string tempTableName = context.tempTable().Unwrap(); if (!_declaredTempTables.Contains(tempTableName)) { // if my parent is part of a select into or an insert, then I can manufacture // the schema for the temporary table from the parent call. if (context.IsParentChain<TSQLParser.IntoClauseContext, TSQLParser.SelectStatementPartContext>()) { InsertBefore( context.Parent.Parent, string.Format("CREATE TEMPORARY TABLE {0} ON COMMIT DROP AS ", PortTableName(tempTableName))); RemoveLeaves(context.Parent); EatWhitespaceAt(context.Parent); } else if (context.IsParentChain<TSQLParser.InsertPreambleContext>()) { var insertPreamble = context.Parent as TSQLParser.InsertPreambleContext; } else if (context.IsParentChain<TSQLParser.InsertOutputClauseContext>()) { var insertOutputClause = context.Parent as TSQLParser.InsertOutputClauseContext; } } } return result; } /// <summary> /// Visit a parse tree produced by <see cref="TSQLParser.joinOrApply" />. /// </summary> /// <param name="context">The parse tree.</param> /// <returns></returns> /// <return>The visitor result.</return> public override object VisitJoinOrApply(TSQLParser.JoinOrApplyContext context) { if (context.APPLY() != null) { #if PG93 ReplaceToken(context.APPLY(), "LATERAL"); #else ReplaceToken(context.APPLY(), "JOIN"); #endif } return base.VisitJoinOrApply(context); } /// <summary> /// Visit a parse tree produced by <see cref="TSQLParser.tableTargetOptions" />. /// </summary> /// <param name="context">The parse tree.</param> /// <returns></returns> /// <return>The visitor result.</return> public override object VisitTableTargetOptions(TSQLParser.TableTargetOptionsContext context) { RemoveLeaves(context); return base.VisitTableTargetOptions(context); } /// <summary> /// Visit a parse tree produced by <see cref="TSQLParser.tableSourceOptions" />. /// </summary> /// <param name="context">The parse tree.</param> /// <returns></returns> /// <return>The visitor result.</return> public override object VisitTableSourceOptions(TSQLParser.TableSourceOptionsContext context) { RemoveLeaves(context); return base.VisitTableSourceOptions(context); } } }
ajaxx/tsql2pgsql
src/visitors/PgsqlConverter.cs
C#
apache-2.0
59,116
package com.example.coolweather; import android.support.v7.app.ActionBarActivity; import android.os.Bundle; import android.view.Menu; import android.view.MenuItem; public class MainActivity extends ActionBarActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_settings) { return true; } return super.onOptionsItemSelected(item); } }
yijia1992/coolweather
app/src/main/java/com/example/coolweather/MainActivity.java
Java
apache-2.0
1,121
"use strict"; window.mushroom.state.play2 = { preload: function(){ console.log("loading play2 state"); }, create: function(){ console.log("starting play2 state"); this.game.camera.x = 1800; this.redsea = mt.create("redsea"); this.Pathway2 = mt.create("Pathway2"); this.player=mt.create("weirdmushroom1"); this.enemy=mt.create("evilmushroom1"); this.enemy2=mt.create("evilmushroom2"); this.coins2=mt.create("coins2"); this.score=mt.create("score"); this.timer1=mt.create("timer1"); this.seconds = 0; this.timer1_count = 0; this.score_count = 0; this.Lkey = this.input.keyboard.addKey(Phaser.Keyboard.LEFT); this.Rkey = this.input.keyboard.addKey(Phaser.Keyboard.RIGHT); this.Ukey = this.input.keyboard.addKey(Phaser.Keyboard.UP); }, update: function(){ //Increase time by one second this.timer1_count +=1; if(this.timer1_count == 60 ){ this.seconds +=1; this.timer1.text = this.seconds; this.timer1_count = 0; } //this.game.physics.arcade.collide(this.player, this.pathway); this.game.physics.arcade.collide(this.player, this.Pathway2); this.game.physics.arcade.collide(this.player, this.enemy.self,this.restartGame, null, this); this.game.physics.arcade.collide(this.player, this.enemy2.self,this.restartGame, null, this); this.game.physics.arcade.overlap(this.player, this.coins2.self, this.destroyObject, null, this); this.game.camera.x = this.player.position.x - 400; this.game.camera.y = this.player.position.y - 300; if(this.enemy.body.position.y > 820){ this.enemy.body.position.y = -100; this.enemy.body.position.x = 1800 + Math.random() * 1800; this.enemy.body.velocity.y = 0; } if(this.enemy2.body.position.y > 820){ this.enemy2.body.position.y = -100; this.enemy2.body.position.x = 1800 + Math.random() * 1800; this.enemy2.body.velocity.y = 0; } if(this.Lkey.isDown){ this.player.body.velocity.x -= 5; } else if(this.Rkey.isDown){ this.player.body.velocity.x += 5; } else if (this.Ukey.isDown && this.player.body.touching.down ){ this.player.body.velocity.y -=400; } else{ this.player.body.velocity.x = 0; } /*if(this.score_count == 10){ this.switchLevel(); }*/ }, destroyObject: function(player,coin){ this.score_count +=10; this.score.text = this.score_count; coin.destroy(); }, restartGame: function(){ this.game.state.start("play2"); }, switchLevel: function(){ this.game.state.start("play3"); } };
Byte-Camp/Website
static/best-ofs/2Dgame/Shengming - Mushroom/js/state/play2.js
JavaScript
apache-2.0
2,523
/* * Copyright (C) 2016 Pablo Guardiola Sánchez. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.pguardiola; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Path; import android.graphics.Region; import android.util.AttributeSet; import android.view.View; public class HexagonViewNorthSouth extends View { private Path hexagonPath; private Path hexagonBorderPath; private float radius; private float width, height; private int maskColor; public HexagonViewNorthSouth(Context context) { super(context); init(); } public HexagonViewNorthSouth(Context context, AttributeSet attrs) { super(context, attrs); init(); } public HexagonViewNorthSouth(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(); } public void setRadius(float r) { this.radius = r; calculatePath(); } public void setMaskColor(int color) { this.maskColor = color; invalidate(); } @Override public void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); width = MeasureSpec.getSize(widthMeasureSpec); height = MeasureSpec.getSize(heightMeasureSpec); radius = height / 2; calculatePath(); } @Override public void onDraw(Canvas c) { super.onDraw(c); c.clipPath(hexagonBorderPath, Region.Op.DIFFERENCE); c.drawColor(Color.WHITE); c.save(); c.clipPath(hexagonPath, Region.Op.DIFFERENCE); c.drawColor(maskColor); c.save(); } private void init() { hexagonPath = new Path(); hexagonBorderPath = new Path(); maskColor = 0xFFb2c311; } private void calculatePath() { float centerX = width / 2; float centerY = height / 2; float adjacent = (float) (Math.sqrt(3) * radius / 2); float opposite = radius / 2; float hypotenuse = radius; // North-South hexagonPath.moveTo(centerX, centerY + hypotenuse); hexagonPath.lineTo(centerX - adjacent, centerY + opposite); hexagonPath.lineTo(centerX - adjacent, centerY - opposite); hexagonPath.lineTo(centerX, centerY - hypotenuse); hexagonPath.lineTo(centerX + adjacent, centerY - opposite); hexagonPath.lineTo(centerX + adjacent, centerY + opposite); hexagonPath.moveTo(centerX, centerY + hypotenuse); float radiusBorder = radius - 5; float adjacentBorder = (float) (Math.sqrt(3) * radiusBorder / 2); float oppositeBorder = radiusBorder / 2; float hypotenuseBorder = radiusBorder; // North-South hexagonBorderPath.moveTo(centerX, centerY + hypotenuseBorder); hexagonBorderPath.lineTo(centerX - adjacentBorder, centerY + oppositeBorder); hexagonBorderPath.lineTo(centerX - adjacentBorder, centerY - oppositeBorder); hexagonBorderPath.lineTo(centerX, centerY - hypotenuseBorder); hexagonBorderPath.lineTo(centerX + adjacentBorder, centerY - oppositeBorder); hexagonBorderPath.lineTo(centerX + adjacentBorder, centerY + oppositeBorder); hexagonBorderPath.moveTo(centerX, centerY + hypotenuseBorder); invalidate(); } }
Guardiola31337/HexGrid
hexgrid/src/main/java/com/pguardiola/HexagonViewNorthSouth.java
Java
apache-2.0
3,676
from keras import backend as K class Config: def __init__(self): self.verbose = True self.network = 'resnet50' # setting for data augmentation self.use_horizontal_flips = True self.use_vertical_flips = True self.rot_90 = True # anchor box scales self.anchor_box_scales = [1, 2, 4, 8, 16, 32, 64, 124, 256, 512] # anchor box ratios self.anchor_box_ratios = [[1, 1], [1, 2], [2, 1],[1,3],[3,1],[4,1],[1,4],[1,5],[5,1],[1,6],[6,1],[1,7],[7,1],[1,8],[8,1],[1,9],[9,1]] # size to resize the smallest side of the image self.im_size = 600 # image channel-wise mean to subtract self.img_channel_mean = [103.939, 116.779, 123.68] self.img_scaling_factor = 1.0 # number of ROIs at once self.num_rois = 8 # stride at the RPN (this depends on the network configuration) self.rpn_stride = 16 self.balanced_classes = False # scaling the stdev self.std_scaling = 4.0 self.classifier_regr_std = [8.0, 8.0, 4.0, 4.0] # overlaps for RPN self.rpn_min_overlap = 0.3 self.rpn_max_overlap = 0.7 # overlaps for classifier ROIs self.classifier_min_overlap = 0.1 self.classifier_max_overlap = 0.5 # placeholder for the class mapping, automatically generated by the parser self.class_mapping = None #location of pretrained weights for the base network # weight files can be found at: # https://github.com/fchollet/deep-learning-models/releases/download/v0.2/resnet50_weights_th_dim_ordering_th_kernels_notop.h5 # https://github.com/fchollet/deep-learning-models/releases/download/v0.2/resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5 self.model_path = 'model_frcnn.vgg.hdf5'
yhalk/vw_challenge_ECR
src/jetson/keras_frcnn/config.py
Python
apache-2.0
1,646
package configuration; import org.agle4j.framework.constant.ConfigConstant; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.PropertiesConfiguration; import org.junit.Test; /** * commons-configuration 包测试 * 一个java应用程序的配置管理类库 * * @author hanyx * @since */ public class ConfigurationTest { @Test public void testConfiguration() { try { PropertiesConfiguration config = new PropertiesConfiguration(ConfigConstant.CONFIG_FILE) ; config.setProperty("colors.background", "#000000"); config.save(); Integer num = config.getInt("app.upload_limit") ; System.out.println(num); } catch (ConfigurationException e) { e.printStackTrace(); } } }
yongxu16/agile-test
src/test/java/configuration/ConfigurationTest.java
Java
apache-2.0
787
/* * Copyright 2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.vtnweb.web; import static com.google.common.base.Preconditions.checkNotNull; import static org.onlab.util.Tools.nullIsIllegal; import org.onlab.packet.IpPrefix; import org.onosproject.codec.CodecContext; import org.onosproject.codec.JsonCodec; import org.onosproject.vtnrsc.DefaultFlowClassifier; import org.onosproject.vtnrsc.FlowClassifier; import org.onosproject.vtnrsc.FlowClassifierId; import org.onosproject.vtnrsc.TenantId; import org.onosproject.vtnrsc.VirtualPortId; import com.fasterxml.jackson.databind.node.ObjectNode; /** * Flow Classifier JSON codec. */ public final class FlowClassifierCodec extends JsonCodec<FlowClassifier> { private static final String FLOW_CLASSIFIER_ID = "id"; private static final String TENANT_ID = "tenant_id"; private static final String NAME = "name"; private static final String DESCRIPTION = "description"; private static final String ETHER_TYPE = "ethertype"; private static final String PROTOCOL = "protocol"; private static final String MIN_SRC_PORT_RANGE = "source_port_range_min"; private static final String MAX_SRC_PORT_RANGE = "source_port_range_max"; private static final String MIN_DST_PORT_RANGE = "destination_port_range_min"; private static final String MAX_DST_PORT_RANGE = "destination_port_range_max"; private static final String SRC_IP_PREFIX = "source_ip_prefix"; private static final String DST_IP_PREFIX = "destination_ip_prefix"; private static final String SRC_PORT = "logical_source_port"; private static final String DST_PORT = "logical_destination_port"; private static final String MISSING_MEMBER_MESSAGE = " member is required in Flow Classifier."; @Override public FlowClassifier decode(ObjectNode json, CodecContext context) { if (json == null || !json.isObject()) { return null; } FlowClassifier.Builder resultBuilder = new DefaultFlowClassifier.Builder(); String flowClassifierId = nullIsIllegal(json.get(FLOW_CLASSIFIER_ID), FLOW_CLASSIFIER_ID + MISSING_MEMBER_MESSAGE).asText(); resultBuilder.setFlowClassifierId(FlowClassifierId.of(flowClassifierId)); String tenantId = nullIsIllegal(json.get(TENANT_ID), TENANT_ID + MISSING_MEMBER_MESSAGE).asText(); resultBuilder.setTenantId(TenantId.tenantId(tenantId)); String flowClassiferName = nullIsIllegal(json.get(NAME), NAME + MISSING_MEMBER_MESSAGE).asText(); resultBuilder.setName(flowClassiferName); String flowClassiferDescription = (json.get(DESCRIPTION)).asText(); resultBuilder.setDescription(flowClassiferDescription); String etherType = nullIsIllegal(json.get(ETHER_TYPE), ETHER_TYPE + MISSING_MEMBER_MESSAGE).asText(); resultBuilder.setEtherType(etherType); String protocol = (json.get(PROTOCOL)).asText(); resultBuilder.setProtocol(protocol); int minSrcPortRange = (json.get(MIN_SRC_PORT_RANGE)).asInt(); resultBuilder.setMinSrcPortRange(minSrcPortRange); int maxSrcPortRange = (json.get(MAX_SRC_PORT_RANGE)).asInt(); resultBuilder.setMaxSrcPortRange(maxSrcPortRange); int minDstPortRange = (json.get(MIN_DST_PORT_RANGE)).asInt(); resultBuilder.setMinDstPortRange(minDstPortRange); int maxDstPortRange = (json.get(MAX_DST_PORT_RANGE)).asInt(); resultBuilder.setMaxDstPortRange(maxDstPortRange); String srcIpPrefix = (json.get(SRC_IP_PREFIX)).asText(); if (!srcIpPrefix.isEmpty()) { resultBuilder.setSrcIpPrefix(IpPrefix.valueOf(srcIpPrefix)); } String dstIpPrefix = (json.get(DST_IP_PREFIX)).asText(); if (!dstIpPrefix.isEmpty()) { resultBuilder.setDstIpPrefix(IpPrefix.valueOf(dstIpPrefix)); } String srcPort = json.get(SRC_PORT) != null ? (json.get(SRC_PORT)).asText() : ""; if (!srcPort.isEmpty()) { resultBuilder.setSrcPort(VirtualPortId.portId(srcPort)); } String dstPort = json.get(DST_PORT) != null ? (json.get(DST_PORT)).asText() : ""; if (!dstPort.isEmpty()) { resultBuilder.setDstPort(VirtualPortId.portId(dstPort)); } return resultBuilder.build(); } @Override public ObjectNode encode(FlowClassifier flowClassifier, CodecContext context) { checkNotNull(flowClassifier, "flowClassifier cannot be null"); ObjectNode result = context.mapper().createObjectNode() .put(FLOW_CLASSIFIER_ID, flowClassifier.flowClassifierId().toString()) .put(TENANT_ID, flowClassifier.tenantId().toString()) .put(NAME, flowClassifier.name()) .put(DESCRIPTION, flowClassifier.description()) .put(ETHER_TYPE, flowClassifier.etherType()) .put(PROTOCOL, flowClassifier.protocol()) .put(MIN_SRC_PORT_RANGE, flowClassifier.minSrcPortRange()) .put(MAX_SRC_PORT_RANGE, flowClassifier.maxSrcPortRange()) .put(MIN_DST_PORT_RANGE, flowClassifier.minDstPortRange()) .put(MAX_DST_PORT_RANGE, flowClassifier.maxDstPortRange()) .put(SRC_IP_PREFIX, flowClassifier.srcIpPrefix().toString()) .put(DST_IP_PREFIX, flowClassifier.dstIpPrefix().toString()) .put(SRC_PORT, flowClassifier.srcPort().toString()) .put(DST_PORT, flowClassifier.dstPort().toString()); return result; } }
planoAccess/clonedONOS
apps/vtn/vtnweb/src/main/java/org/onosproject/vtnweb/web/FlowClassifierCodec.java
Java
apache-2.0
6,133
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.volley; import android.annotation.TargetApi; import android.net.TrafficStats; import android.os.Build; import android.os.Process; import android.os.SystemClock; import java.util.concurrent.BlockingQueue; /** * Provides a thread for performing network dispatch from a queue of requests. * * Requests added to the specified queue are processed from the network via a * specified {@link Network} interface. Responses are committed to cache, if * eligible, using a specified {@link Cache} interface. Valid responses and * errors are posted back to the caller via a {@link ResponseDelivery}. */ /* * NetworkDispatcher 是用于处理 Volley 中的网络请求 的 网络线程。 会将 网络 Request 队列的 Request 逐个抽出 * 然后进行网络请求: * 1. 成功,拿到数据进行解析,然后将 Response 进行硬盘缓存,缓存成 Cache.Entry 的形式,最后 * 传递 Request 和 Response * 2. 失败,失败的话,一般会抛出异常,然后进行 记录请求时长 和 传递错误( VolleyError ) */ public class NetworkDispatcher extends Thread { /** The queue of requests to service. */ /* * 保存 网络 Request,因为这里会涉及到 并发 * 所以,采用 BlockingQueue */ private final BlockingQueue<Request<?>> mQueue; /** The network interface for processing requests. */ /* * 用于执行 网络请求 的 Network 接口 * HttpClientStack 或 HurlStack */ private final Network mNetwork; /** The cache to write to. */ /* * 这里的 Cache 其实是一个 DiskBasedCache 缓存 * 用于将网络请求 回调的 Response 数据进行缓存 */ private final Cache mCache; /** For posting responses and errors. */ /* * 1. 用于 传递网络请求成功后的 Request 和 Response * 2. 用于 传递网络请求失败后的 只有 error 的 Response */ private final ResponseDelivery mDelivery; /** Used for telling us to die. */ // 结束标记,标记这个 NetworkDispatcher 线程是否结束 private volatile boolean mQuit = false; /** * Creates a new network dispatcher thread. You must call {@link #start()} * in order to begin processing. * * @param queue Queue of incoming requests for triage * @param network Network interface to use for performing requests * @param cache Cache interface to use for writing responses to cache * @param delivery Delivery interface to use for posting responses */ public NetworkDispatcher(BlockingQueue<Request<?>> queue, Network network, Cache cache, ResponseDelivery delivery) { mQueue = queue; mNetwork = network; mCache = cache; mDelivery = delivery; } /** * Forces this dispatcher to quit immediately. If any requests are still in * the queue, they are not guaranteed to be processed. */ /* * 线程结束 */ public void quit() { // 设置 结束标记 mQuit = true; // 线程中断,run() 内会抛出一个 InterruptedException interrupt(); } /* * 使用 Android 4.0 以后,DDMS 中的 Network Traffic Tool * * 这里为 NetworkDispatcher 的打上 Traffic 的 tag * 实时地监测网络的使用情况 */ @TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) private void addTrafficStatsTag(Request<?> request) { // Tag the request (if API >= 14) if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { /* * 设置 该线程 的 监测网络的使用情况 * 在 Network Traffic Tool 工具中查到 */ TrafficStats.setThreadStatsTag(request.getTrafficStatsTag()); } } @Override public void run() { // 设置 该线程优先级为 THREAD_PRIORITY_BACKGROUND Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND); while (true) { /* * 记录 循环开始时的时间 * 代指 一个 请求开始时的时间 */ long startTimeMs = SystemClock.elapsedRealtime(); Request<?> request; try { // Take a request from the queue. // 从 网络 Request 队列中拿出一个 Request request = mQueue.take(); } catch (InterruptedException e) { // We may have been interrupted because it was time to quit. // 查看 结束标记 是否为 true if (mQuit) { // 退出 循环体 return; } // 结束标记 为 false,跳过此次,然后继续循环 continue; } try { // 为请求添加一个 "cache-queue-take" MarkLog request.addMarker("network-queue-take"); // If the request was cancelled already, do not perform the // network request. // 如果 Request 已经被取消了 if (request.isCanceled()) { // 关闭请求,打印 请求中的 MarkLog request.finish("network-discard-cancelled"); // 跳过此次,然后继续循环 continue; } // 为 NetworkDispatcher 的打上 Traffic 的 tag addTrafficStatsTag(request); // Perform the network request. /* * 用于执行 网络请求 的 Network 接口 * 调用 Network 接口( HttpClientStack 或 HurlStack )去请求网络 * 但是 HttpStack 处理后,都返回 Apache 的请求结果( HttpResponse ) * performRequest(...) 接下来会将:Apache HttpResponse -> Volley NetworkResponse 进行转化 */ NetworkResponse networkResponse = mNetwork.performRequest(request); // 为请求添加一个 "network-http-complete" MarkLog request.addMarker("network-http-complete"); // If the server returned 304 AND we delivered a response already, // we're done -- don't deliver a second identical response. /* * 状态码 304: Not Modified 并且 该请求的请求结果 Response ( 响应 )已经被传递 */ if (networkResponse.notModified && request.hasHadResponseDelivered()) { // 关闭请求,打印 请求中的 MarkLog request.finish("not-modified"); // 跳过此次,然后继续循环 continue; } // Parse the response here on the worker thread. // 解析 请求结果 Response( 响应 ) Response<?> response = request.parseNetworkResponse(networkResponse); // 为请求添加一个 "network-parse-complete" MarkLog request.addMarker("network-parse-complete"); // Write to cache if applicable. // TODO: Only update cache metadata instead of entire record for 304s. /* * response.cacheEntry:会在 parseNetworkResponse(...) 的时候 执行 * Response<T> success(T result, Cache.Entry cacheEntry) 方法 构造一个 * Response<T> 对象,并且设置上 cacheEntry * * 所以这里判断了 * 1. 请求是否需要缓存 * 2. 请求结果 Response( 响应 )的 cacheEntry 是否存在 */ if (request.shouldCache() && response.cacheEntry != null) { // 在 DiskBasedCache 上添加缓存,即要缓存到硬盘中 mCache.put(request.getCacheKey(), response.cacheEntry); // 为请求添加一个 "network-cache-written" MarkLog request.addMarker("network-cache-written"); } // Post the response back. // 修改 传递标识,标识已经被传递了( 下面就开始传递 ) request.markDelivered(); // 传递 Request 和 Response mDelivery.postResponse(request, response); } catch (VolleyError volleyError) { /* * performRequest(Request<?> request) throws VolleyError * 会抛出一个 VolleyError * 所以这里被理解为 请求网络 的时候发生错误 */ // 设置 请求时长 volleyError.setNetworkTimeMs(SystemClock.elapsedRealtime() - startTimeMs); // 解析 并 传递 网络错误 parseAndDeliverNetworkError(request, volleyError); } catch (Exception e) { VolleyLog.e(e, "Unhandled exception %s", e.toString()); // 其他异常的话,也会实例化一个 VolleyError VolleyError volleyError = new VolleyError(e); // 设置 请求时长 volleyError.setNetworkTimeMs(SystemClock.elapsedRealtime() - startTimeMs); // 开始传递 错误 mDelivery.postError(request, volleyError); } } } /* * 解析 并 传递 网络错误 * 会封装成一个 VolleyError */ private void parseAndDeliverNetworkError(Request<?> request, VolleyError error) { error = request.parseNetworkError(error); mDelivery.postError(request, error); } }
CaMnter/SaveVolley
volley-comments/src/main/java/com/android/volley/NetworkDispatcher.java
Java
apache-2.0
10,456
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0, (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tle.common.harvester; import com.tle.beans.entity.LanguageBundle; import java.util.Map; public abstract class HarvesterProfileSettings { private LanguageBundle name; private Map<String, String> attributes; public HarvesterProfileSettings() { super(); } public HarvesterProfileSettings(HarvesterProfile gateway) { this(); load(gateway); } public void load(HarvesterProfile gateway1) { this.attributes = gateway1.getAttributes(); this.name = gateway1.getName(); _load(); } public void save(HarvesterProfile gateway1) { gateway1.setType(getType()); this.attributes = gateway1.getAttributes(); gateway1.setName(name); _save(); for (Map.Entry<String, String> entry : attributes.entrySet()) { gateway1.setAttribute(entry.getKey(), entry.getValue()); } } public String get(String key, String defaultValue) { String value = attributes.get(key); if (value == null) { value = defaultValue; } return value; } public boolean get(String key, boolean defaultValue) { String value = attributes.get(key); boolean v; if (value == null) { v = defaultValue; } else { v = Boolean.valueOf(value); } return v; } public int get(String key, int defaultValue) { String value = attributes.get(key); int v; if (value != null) { try { v = Integer.parseInt(value); } catch (Exception e) { v = defaultValue; } } else { v = defaultValue; } return v; } public void put(String key, Object value) { attributes.put(key, value.toString()); } public void put(String key, String value) { attributes.put(key, value); } protected abstract String getType(); protected abstract void _load(); protected abstract void _save(); public LanguageBundle getName() { return name; } public void setName(LanguageBundle name) { this.name = name; } }
equella/Equella
Source/Plugins/Core/com.equella.base/src/com/tle/common/harvester/HarvesterProfileSettings.java
Java
apache-2.0
2,776
package ru.job4j.list; /** * Created on 24.07.17. * Simple queue realization. * @author Wamdue * @version 1.0 * @param <E> - class to store. */ public class SimpleQueue<E> extends SimpleLinkedList<E> { /** * Link to the first element. */ private Node<E> first = null; /** * Link to the last element. */ private Node<E> last = null; /** * Size. */ private int size = 0; /** * Removes first element from list, and returns it. * @return - first element, or null if size == 0. */ public E poll() { E temp = this.first.item; if (this.size > 0) { this.first = this.first.next; this.size--; } return temp; } /** * Removes first element from list, and returns it. * @return - first element, or null if size == 0. */ public E remove() { return this.poll(); } /** * Returns first element from the list, without deleting. * @return first element from the list. */ public E peek() { return first.item; } /** * adding element to the end of the list. * @param e - element to add. */ public void offer(E e) { Node<E> l = last; Node<E> newNode = new Node<>(e, l, null); if (l == null) { first = newNode; last = newNode; } else { l.next = newNode; last = l.next; } size++; } /** * Private class to store elements in list. * @param <E> - class to store. */ private class Node<E> { /** * main element. */ private E item; /** * Link to previous item. */ private Node<E> previous; /** * link ot next item. */ private Node<E> next; /** * Main constructor. * @param item - main item. * @param previous - link to previous item. * @param next - link to next item. */ Node(E item, Node<E> previous, Node<E> next) { this.item = item; this.previous = previous; this.next = next; } } }
wamdue/agorbunov
chapter_005/src/main/java/ru/job4j/list/SimpleQueue.java
Java
apache-2.0
2,235
// ============================================================================ // Copyright 2006-2012 Daniel W. Dyer // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ============================================================================ package org.uncommons.maths.random; import java.util.Random; import org.testng.annotations.Test; import org.uncommons.maths.Maths; import org.uncommons.maths.number.AdjustableNumberGenerator; import org.uncommons.maths.number.NumberGenerator; import org.uncommons.maths.statistics.DataSet; /** * Unit test for the Poisson number generator. * @author Daniel Dyer */ public class PoissonGeneratorTest { private final Random rng = new MersenneTwisterRNG(); /** * Check that the observed mean and standard deviation are consistent * with the specified distribution parameters. */ @Test(groups = "non-deterministic") public void testDistribution() { final double mean = 19; NumberGenerator<Integer> generator = new PoissonGenerator(mean, rng); checkDistribution(generator, mean); } @Test(groups = "non-deterministic") public void testDynamicParameters() { final double initialMean = 19; AdjustableNumberGenerator<Double> meanGenerator = new AdjustableNumberGenerator<Double>(initialMean); NumberGenerator<Integer> generator = new PoissonGenerator(meanGenerator, rng); checkDistribution(generator, initialMean); // Adjust parameters and ensure that the generator output conforms to this new // distribution. final double adjustedMean = 13; meanGenerator.setValue(adjustedMean); checkDistribution(generator, adjustedMean); } /** * The mean must be greater than zero to be useful. This test ensures * that an appropriate exception is thrown if the mean is not positive. Not * throwing an exception is an error because it permits undetected bugs in * programs that use {@link PoissonGenerator}. */ @Test(expectedExceptions = IllegalArgumentException.class) public void testMeanTooLow() { new PoissonGenerator(0d, rng); } private void checkDistribution(NumberGenerator<Integer> generator, double expectedMean) { // Variance of a Possion distribution equals its mean. final double expectedStandardDeviation = Math.sqrt(expectedMean); final int iterations = 10000; DataSet data = new DataSet(iterations); for (int i = 0; i < iterations; i++) { int value = generator.nextValue(); assert value >= 0 : "Value must be non-negative: " + value; data.addValue(value); } assert Maths.approxEquals(data.getArithmeticMean(), expectedMean, 0.02) : "Observed mean outside acceptable range: " + data.getArithmeticMean(); assert Maths.approxEquals(data.getSampleStandardDeviation(), expectedStandardDeviation, 0.02) : "Observed standard deviation outside acceptable range: " + data.getSampleStandardDeviation(); } }
dwdyer/uncommons-maths
core/src/java/test/org/uncommons/maths/random/PoissonGeneratorTest.java
Java
apache-2.0
3,742
/****************************************************************************** * supplier.js * * Copyright 2016 Marcos Salomão * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @version 1.0 * @author Marcos Salomão (salomao.marcos@gmail.com) *****************************************************************************/ /** * Objeto global relativo aos fornecedores da loja. */ ! function($) { /* * Inserindo o escopo de fornecedor. */ $.supplier = {}; /***************************************************************************** * Controller API *****************************************************************************/ /** * Métodos relativos à API do recurso fornecedor. */ $.supplier.api = { SERVICE_NAME : '/supplier', service : function(pathVariable) { return $.supplier.api.SERVICE_NAME + (pathVariable? '/' + pathVariable : ''); }, /** * Método persiste o fornecedor. */ save: function(_data) { // Execute custumers delete endpoint return $.api.request({ path : $.supplier.api.service(), method : 'POST', body : _data, dialogSuccess : { title : messages.supplier.save.dialog.title, message : messages.supplier.save.dialog.success }, dialogError : { title : messages.supplier.save.dialog.title, message : messages.supplier.save.dialog.errormessage } }).then(function(response) { $('form.supplier-form').populate(response.result); return response; }); }, // End save() /** * Método realiza a exclusão do fornecedor. */ delete: function(_id) { // Execute custumers delete endpoint return $.api.request({ path : $.supplier.api.service(_id), method : 'DELETE', dialogError : { title : messages.supplier.delete.dialog.title, message : messages.supplier.delete.dialog.errormessage } }); }, // End delete() }; // Fim API /***************************************************************************** * View components *****************************************************************************/ $.supplier.view = { /** * Método destinado à criar a tabela com os fornecedors. */ bindTable: function(_data) { // Construir tabela $('table.table-suppliers').dataTable({ service: $.supplier.api.service(), errorMessage: messages.supplier.list.dialog.errormessage, columns: [{ field: 'id', visible: false }, { field: 'name', title: messages.supplier.name, searchable: true }, { title: '', align: 'center', searchable: false, 'class': 'col-sm-2', formatter: $.common.view.tableactionbuttons, events: { 'click button.delete': function(e, value, row, index) { $.supplier.api.delete(row.id).then( function() { $('table.table-suppliers').bootstrapTable('remove', { field: 'id', values: [row.id] }); }); }, 'click button.update': function(e, value, row, index) { // Preencher form, precisa ser primeiro show tab // senão não atualiza o map $('form.supplier-form').populate(row); // mostar tab do form $('.nav-tabs a[href="#tab_2"]').tab('show'); } } }] }); }, // Fim bindTable /** * Método destinado à carregar a tabela com os fornecedors. */ loadTable: function() { $.supplier.view.bindTable(); }, // Fim loadTable /** * Load page event. */ loadPage : function() { // Aplicar i18n $('span.tab_list').text(messages.supplier.tab.list); $('span.tab_save').text(messages.supplier.tab.save); $('h3.supplier_save_title').text(messages.supplier.save.title); $('span.new-item').text(messages.action.new_item); $('small.supplier_save_subtitle').text(messages.supplier.save.subtitle); $('label.name').text(messages.supplier.name); $('input[name="name"]').attr('placeholder', messages.supplier.form.name.placeholder); $('label.email').text(messages.supplier.email); $('input[name="email"]').attr('placeholder', messages.supplier.form.email.placeholder); $('label.phone').text(messages.supplier.phone); $('input[name="phone"]').attr('placeholder', messages.supplier.form.phone.placeholder); $('label.location').text(messages.supplier.location); $('input[name="location"]').attr('placeholder', messages.supplier.form.location.placeholder); $('button.save').text(messages.action.save); // Carregar a lista de fornecedors $.supplier.view.loadTable(); // Criar a validação do formulário $('form.supplier-form').validate({ // initialize the plugin rules: { name: { required: true, minlength: 3 }, email : { email: true } }, messages: { name: messages.supplier.form.name.required, email: messages.supplier.form.email.valid }, /** * Ação ao submeter o formulário. */ submitHandler: function(form, event) { // não submete form event.preventDefault(); // Convert form to JSON Object var data = $(form).serializeObject(); // Submeter ao endpoint $.supplier.api.save(data).then(function(_data) { // Atualizar lista var row = $('table.table-suppliers').bootstrapTable( 'getRowByUniqueId', _data.id); // Insere se não existe ou atualiza caso já esteja inserida if (row == null) { $('table.table-suppliers').bootstrapTable('insertRow', { index: 0, row: _data }); } else { $('table.table-suppliers').bootstrapTable('updateByUniqueId', { id: _data.id, row: _data }); } }); } }); // Fim validate $('.nav-tabs-custom').on('shown.bs.tab', function(e) { if ($(e.target).attr('href') != '#tab_2') return; $('.map-canvas').maps({ autocomplete : $('input[name="location"]') }); }); } }; }(jQuery);
salomax/livremkt
src/main/resources/static/app/supplier/supplier.js
JavaScript
apache-2.0
8,615