repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
boost-entropy-golang/buildbuddy
|
server/testutil/testbazel/testbazel.go
|
<reponame>boost-entropy-golang/buildbuddy
package testbazel
import (
"context"
"io/ioutil"
"os"
"path/filepath"
"testing"
"time"
"github.com/buildbuddy-io/buildbuddy/server/testutil/testfs"
"github.com/buildbuddy-io/buildbuddy/server/util/bazel"
"github.com/buildbuddy-io/buildbuddy/server/util/log"
"github.com/stretchr/testify/require"
bazelgo "github.com/bazelbuild/rules_go/go/tools/bazel"
)
const (
// BazelBinaryPath specifies the path to the bazel binary used for invocations.
// Must match the path in the build rule.
BazelBinaryPath = "server/util/bazel/bazel-5.0.0rc3"
)
// BinaryPath returns the path to the bazel binary.
func BinaryPath(t *testing.T) string {
path, err := bazelgo.Runfile(BazelBinaryPath)
require.NoError(t, err, "look up bazel binary path")
return path
}
// Invoke the bazel CLI from within the given workspace dir.
func Invoke(ctx context.Context, t *testing.T, workspaceDir string, subCommand string, args ...string) *bazel.InvocationResult {
bazelBinaryPath := BinaryPath(t)
return bazel.Invoke(ctx, bazelBinaryPath, workspaceDir, subCommand, args...)
}
// Clean runs `bazel clean` within the given workspace.
func Clean(ctx context.Context, t *testing.T, workspaceDir string) *bazel.InvocationResult {
return Invoke(ctx, t, workspaceDir, "clean")
}
// shutdown runs `bazel shutdown` within the given workspace.
func shutdown(ctx context.Context, t *testing.T, workspaceDir string) {
start := time.Now()
defer func() {
log.Infof("bazel shutdown completed in %s", time.Since(start))
}()
if shutdownResult := Invoke(ctx, t, workspaceDir, "shutdown"); shutdownResult.Error != nil {
t.Fatal(shutdownResult.Error)
}
}
func MakeTempWorkspace(t *testing.T, contents map[string]string) string {
workspaceDir := testfs.MakeTempDir(t)
t.Cleanup(func() {
// Run Bazel shutdown so that the server process associated with the temp
// workspace doesn't stick around. The bazel server eventually shuts down
// automatically, but we want to shut the server down ASAP so that high test
// volume doesn't cause tons of idle server processes to be running.
shutdown(context.Background(), t, workspaceDir)
})
for path, fileContents := range contents {
fullPath := filepath.Join(workspaceDir, path)
err := os.MkdirAll(filepath.Dir(fullPath), 0777)
require.NoError(t, err, "failed to create bazel workspace contents")
err = ioutil.WriteFile(fullPath, []byte(fileContents), 0777)
require.NoError(t, err, "failed to create bazel workspace contents")
}
return workspaceDir
}
|
jecinfo2016/open-capacity-platform-backend
|
business-center/file-center/src/main/java/com/open/capacity/oss/service/HFileService.java
|
package com.open.capacity.oss.service;
import com.open.capacity.common.web.PageResult;
import com.open.capacity.oss.model.FileInfo;
import com.open.capacity.oss.model.SmallFile;
import java.util.List;
import java.util.Map;
import java.util.Optional;
/**
* @BelongsProject: servlet-photo-storage-system
* @BelongsPackage: com.sinorail.service.impl
* @Author: shiyu
* @CreateTime: 2019-05-05
*/
public interface HFileService {
/**
* 保存文件
*
* @param smallFile
* @return
*/
boolean saveFile(SmallFile smallFile);
/**
* 保存多个文件
* @param smallFiles
* @return
*/
boolean saveFiles(List<SmallFile> smallFiles);
/**
* 删除文件
*
* @param id
* @return
*/
void removeFile(String namespace, String id);
/**
* 删除文件
*
* @param name
* @return
*/
void removeFileByName(String namespace, String name);
/**
* 根据id获取文件
*
* @param id
* @return
*/
Optional<SmallFile> getFileById(String namespace, String id);
/**
*
* @param name
* @return
*/
Optional<SmallFile> getFileByName(String namespace, String name);
/**
*展示
* @param params
* @return
*/
PageResult<FileInfo> findList(Map<String, Object> params);
/**
*添加
* @param fileInfo
* @return
*/
int save(FileInfo fileInfo);
/**
* 删除
* @param id
* @return
*/
int delete(String id);
}
|
CoprHD/sds-controller
|
vipr-portal/portal/app/models/datatable/VirtualPoolDataTable.java
|
<reponame>CoprHD/sds-controller
/*
* Copyright (c) 2015 EMC Corporation
* All Rights Reserved
*/
package models.datatable;
import java.util.Collection;
import models.PoolTypes;
import models.ProvisioningTypes;
import org.apache.commons.lang.StringUtils;
import util.datatable.DataTable;
import com.emc.storageos.model.vpool.VirtualPoolCommonRestRep;
public class VirtualPoolDataTable extends DataTable {
public VirtualPoolDataTable() {
addColumn("storageType").hidden();
addColumn("name").setRenderFunction("renderLink");
addColumn("description");
addColumn("poolType").hidden();
addColumn("provisioningType").hidden();
addColumn("provisionedAs");
addColumn("storagePoolAssignment");
addColumn("protocols");
addColumn("numPools");
addColumn("numResources");
sortAllExcept("storageType");
setDefaultSort("name", "asc");
}
public static class VirtualPoolInfo {
public String id;
public String name;
public String description;
public Integer numPools;
public Integer numResources;
public String poolType;
public String poolTypeDisplay;
public String provisioningType;
public String provisioningTypeDisplay;
public String protocols;
public Boolean useMatchedPools;
public String storagePoolAssignment;
public Boolean warning;
public String provisionedAs;
public VirtualPoolInfo() {
}
public VirtualPoolInfo(VirtualPoolCommonRestRep vsp) {
this.id = vsp.getId().toString();
this.name = vsp.getName();
this.poolType = vsp.getType();
this.provisioningType = vsp.getProvisioningType();
this.poolTypeDisplay = PoolTypes.getDisplayValue(vsp.getType());
this.provisioningTypeDisplay = ProvisioningTypes.getDisplayValue(vsp.getProvisioningType());
this.provisionedAs = String.format("%s (%s)", poolTypeDisplay, provisioningTypeDisplay);
this.description = vsp.getDescription();
if (Boolean.TRUE.equals(vsp.getUseMatchedPools())) {
this.storagePoolAssignment = "automatic";
this.numPools = size(vsp.getMatchedStoragePools());
}
else {
this.storagePoolAssignment = "manual";
this.numPools = size(vsp.getAssignedStoragePools());
}
this.numResources = defaultInt(vsp.getNumResources());
this.protocols = StringUtils.join(vsp.getProtocols(), ", ");
}
private static int size(Collection<?> collection) {
return collection != null ? collection.size() : 0;
}
private int defaultInt(Integer value) {
return value != null ? value : 0;
}
}
}
|
Bhaskers-Blu-Org1/FuzzMon
|
src/entities/entities.js
|
<reponame>Bhaskers-Blu-Org1/FuzzMon
// Licensed Materials - Property of IBM
// (C) Copyright IBM Corp. 2018, 2018 All Rights Reserved
// IBM-Review-Requirement: Art30.3 marking
// Developed by <NAME> at Haifa Research Lab
const logger = require("../common/logger");
require("../common/utils"); // Creates the insert method for the generalized input sequence.
const isHexNumTest = /^[0-9a-fA-F]+$/;
class Type {
static isStringJSON(string) {
try {
return JSON.parse(string);
} catch (e) {
return false;
}
}
static determineType(rawItem) {
let rawItemType = typeof(rawItem);
switch (rawItemType) {
case 'string':
{
let json = Type.isStringJSON(rawItem);
if (json) {
return Type.determineType(json);
} else if (isHexNumTest.test(rawItem)) {
return 'hexNumber';
} else {
return rawItemType;
}
}
break; // making the linter happy
case 'number':
case 'boolean':
case 'null':
case 'undefined':
return rawItemType;
case 'object':
{
switch (rawItem) {
case null:
return 'null';
case undefined:
return 'undefined';
}
let rawItemCopy = JSON.parse(JSON.stringify(rawItem));
if (rawItemCopy instanceof Array) {
rawItemCopy = rawItemCopy.map(item => Type.determineType(item));
} else {
Object.keys(rawItemCopy).map(key => rawItemCopy[key] = Type.determineType(rawItemCopy[key]));
}
return rawItemCopy;
}
default:
throw Error('Unsupported type: ' + rawItemType + '\nraw string representation: ' + rawItem);
}
}
}
/**
* Class representing the value of a single param
*
* @class ParamVal (name)
*/
class ParamVal {
/**
* Constructs the object.
*
* @param {EntryParam} paramPtr Reference to an instance of EntryParam.
* @param {?} value The value of the param pointed by paramPtr.
* @param {?} newType The type of the param pointed by paramPtr. This is used when the mutator changes the type of paramPtr.
* If left blank, newType == paramPtr.type;
*/
constructor(name, value, type) {
this.name = name;
this.value = value;
this.type = type || Type.determineType(value);
}
/**
* Creates a new instance of the object with same properties than original.
*
* @return {ParamVal} Copy of this object.
*/
clone() {
return new ParamVal(this.name, JSON.parse(JSON.stringify(this.value)), JSON.parse(JSON.stringify(this.type)));
}
/**
* Returns a string representation of the object.
*
* @return {string} String representation of the object.
*/
toString() {
let paramValueStr = null;
if (typeof(this.type) === 'object') {
paramValueStr = JSON.stringify(this.value);
} else {
if (!this.value) {
paramValueStr = '';
} else {
switch (this.type) {
case 'string':
case 'hexNumber':
paramValueStr = this.value;
break;
case 'number':
paramValueStr = this.value.toString();
break;
case 'boolean':
paramValueStr = this.value + '';
break;
default:
throw Error('Unsupported type: ' + this.type);
}
}
}
return 'ParamVal ' + this.name + ': ' + paramValueStr + ' (' + this.type + ')';
}
}
class BodyVal {
constructor(value, type, /*flatType*/ /*flattened type object. used for optimization*/ ) {
this.value = value;
this.type = type || Type.determineType(value);
// this.flatType = flatType || type.flattenJSON();
}
clone() {
try {
// this.value can be null/undefined because of the mutation
let newValue = {};
let newType = {};
// let newFlatType = {};
if (this.value && this.type /*&& this.flatType*/ ) {
newValue = JSON.parse(JSON.stringify(this.value));
newType = JSON.parse(JSON.stringify(this.type));
// newFlatType = JSON.parse(JSON.stringify(this.flatType));
}
return new BodyVal(newValue, newType /*, newFlatType*/ );
} catch (e) {
logger.error(e);
logger.exitAfterFlush();
}
}
toString() {
return 'BodyVal: ' + JSON.stringify(this.value);
}
}
class AbstractInput {
constructor() {
if (!this.getFilename) {
throw Error('`filename` method is required in an input object');
}
if (!this.clone) {
throw Error('`clone` method is required in an input object');
}
if (!this.toString) {
throw Error('`toString` method is required in an input object');
}
}
}
/**
* A class that encapsulates a sequence of objects of type `GeneralizedInput`
*
* @class InputSequence (name)
* @note This could as well be a simple list, but this was it's more organized
*/
class InputSequence {
/**
* Constructs the object.
*
* @param {[GeneralizedInput]} inputSequence List of objects of type GeneralizedInput
*/
constructor(inputSequence, isFromUserInput = false) {
this.inputSequence = inputSequence || [];
this.isFromUserInput = isFromUserInput;
}
/**
* Inserts a given generalized input to a certain location in the *ordered* list
*
* @param {number} index The index where `generalizedInput` will be inserted to
* @param {GeneralizedInput} generalizedInput An instance of the class GeneralizedInput
*/
insertTo(index, generalizedInput) {
if (generalizedInput instanceof Array) {
// assuming this.inputSequence is not too big i.e., it's << 10K items
// reference: https://stackoverflow.com/questions/5080028/what-is-the-most-efficient-way-to-concatenate-n-arrays
this.inputSequence.insert(index, ...generalizedInput);
} else {
this.inputSequence.insert(index, generalizedInput);
}
}
/**
* Removes a single generalized input from the given index.
*
* @param {number} index The index to remove the generalized input from
*/
removeFrom(index) {
this.inputSequence.removeAt(index);
}
at(index) {
return this.inputSequence[index];
}
setAt(index, value) {
this.inputSequence[index] = value;
}
map(func) {
return this.inputSequence.map(func);
}
some(func) {
return this.inputSequence.some(func);
}
filter(func) {
return this.inputSequence.filter(func);
}
/**
* Returns the size of the generalized input
*/
get size() {
return this.inputSequence.length;
}
/**
* Creates a new instance of the object with same properties than original.
*
* @return {InputSequence} Copy of this object.
*/
clone() {
return new InputSequence(this.inputSequence.map(input => input.clone()), false);
}
toString() {
return 'isFromUserInput: ' + this.isFromUserInput +
' InputSequence(' + this.size + '): ' +
this.inputSequence.map(gis => gis.toString()).join('\n');
}
}
/**
* Class representing a single parameter to a single entry point.
*
* @class EntryParam (name)
*/
class EntryParam {
/**
* Constructs the object.
*
* @param {String} name The name of the entry point param
* @param {String} type The type of the param. If an object, it should have a json format
* @param {number} weight Level of importance of this param. Will be used during mutation.
*/
constructor(name, type, extraInfo, weight = 1) {
this.name = name;
this.type = type;
this.extraInfo = extraInfo;
this.weight = weight;
}
/**
* Returns a string representation of the object.
*
* @return {string} String representation of the object.
*/
toString() {
return 'EntryParam: ' + this.name + ' ' + this.type;
}
eq(rhs) {
return (this.name === rhs.name) &&
(this.type == rhs.type);
}
}
/**
* Class representation of a single entry point to the project
*
* @class EntryPoint (name)
*/
class EntryPoint {
/**
* Constructs the object.
*
* @param {String} filename Name of the file of the entry point
* @param {String} entryName Name of the entry point.
* In case of a simple run it's a name of function. In case of express it's a path s.a. '/api/deal'
* @param {[EntryParam]} enteryParams Parameters required for the entry point. This is a list of instances of the class EntryParam.
* @param {?} extraInfo In case it is required some additional info, this is the place for it
* @param {number} entryWeight Level of importance of the entry point. Will be used during the mutation.
*/
constructor(filename, entryName, enteryParams, extraInfo, entryWeight = 0.1) {
this.filename = filename;
this.entryName = entryName;
this.enteryParams = enteryParams || [];
this.extraInfo = extraInfo;
this.entryWeight = entryWeight; // enteryParams.length ? enteryParams.length : entryWeight;
}
/**
* Returns a string representation of the object.
*
* @return {string} String representation of the object.
*/
toString() {
return 'EntryPoint: ' +
this.entryName + ' (' + (this.enteryParams ? this.enteryParams.map(p => p.toString()).join(', ') : 'nothing') +
') [' + this.filename + ']\n\t' + (this.extraInfo ? this.extraInfo.toString() : '');
}
eq(rhs) {
return (this.filename === rhs.filename) &&
(this.entryName === rhs.entryName) // &&
// (this.entryParams #######)
}
};
exports.EntryPoint = EntryPoint;
exports.EntryParam = EntryParam;
exports.ParamVal = ParamVal;
exports.InputSequence = InputSequence;
exports.Type = Type;
exports.BodyVal = BodyVal;
exports.AbstractInput = AbstractInput;
|
fabiolee/android-run
|
base/src/main/java/com/blogspot/carirunners/run/ui/common/NavigationController.java
|
package com.blogspot.carirunners.run.ui.common;
import com.blogspot.carirunners.run.R;
import com.blogspot.carirunners.run.MainFragment;
import com.blogspot.carirunners.run.ui.search.SearchFragment;
import com.blogspot.carirunners.run.ui.settings.SettingsFragment;
import com.blogspot.carirunners.run.ui.user.UserFragment;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v7.app.AppCompatActivity;
import javax.inject.Inject;
/**
* A utility class that handles navigation in {@link AppCompatActivity}.
*/
public class NavigationController {
private final int containerId;
private final FragmentManager fragmentManager;
@Inject
public NavigationController(AppCompatActivity activity) {
this.containerId = R.id.container;
this.fragmentManager = activity.getSupportFragmentManager();
}
public void navigateToSearch() {
SearchFragment searchFragment = new SearchFragment();
fragmentManager.beginTransaction()
.replace(containerId, searchFragment)
.commitAllowingStateLoss();
}
public void navigateToFavorite() {
String tag = "FavoriteFragment";
MainFragment fragment = (MainFragment) fragmentManager.findFragmentByTag(tag);
if (fragment == null) {
fragment = MainFragment.newInstance(tag);
}
changeTab(fragment, tag);
}
public MainFragment navigateToPage() {
String tag = "PageFragment";
MainFragment fragment = (MainFragment) fragmentManager.findFragmentByTag(tag);
if (fragment == null) {
fragment = MainFragment.newInstance(tag);
}
changeTab(fragment, tag);
return fragment;
}
public void navigateToPost(String id, String title, String path, boolean favorite) {
MainFragment parentFragment = navigateToPage();
parentFragment.navigateToPostWhenReady(id, title, path, favorite);
}
public void navigateToSettings() {
String tag = "SettingsFragment";
SettingsFragment fragment = (SettingsFragment) fragmentManager.findFragmentByTag(tag);
if (fragment == null) {
fragment = new SettingsFragment();
}
changeTab(fragment, tag);
}
public void navigateToUser(String login) {
String tag = "user" + "/" + login;
UserFragment userFragment = UserFragment.create(login);
fragmentManager.beginTransaction()
.replace(containerId, userFragment, tag)
.addToBackStack(null)
.commitAllowingStateLoss();
}
private void changeTab(Fragment fragment, String tag) {
if (fragment.isAdded()) {
return;
}
FragmentTransaction transaction = fragmentManager.beginTransaction();
Fragment currentFragment = fragmentManager.findFragmentById(containerId);
if (currentFragment != null) {
currentFragment.setMenuVisibility(false);
currentFragment.setUserVisibleHint(false);
transaction.detach(currentFragment);
}
if (fragment.isDetached()) {
transaction.attach(fragment);
} else {
transaction.add(containerId, fragment, tag);
}
fragment.setMenuVisibility(true);
fragment.setUserVisibleHint(true);
transaction.commitAllowingStateLoss();
}
}
|
rightson/network-programming
|
np3-rbs/cgi/client.hpp
|
<gh_stars>0
#ifndef __CLIENT_H__
#define __CLIENT_H__
#include "utils.hpp"
extern void log(const char *fmt, ...);
extern void printServerOutput(int column, const std::string &str, bool isCmd);
class Client {
public:
void setServreID(const int &id) {
this->id = id;
}
void setHostname(const std::string &name) {
this->hostname = name;
}
void setPort(const int &port) {
this->port = port;
}
void setCmdfile(const std::string &name) {
this->cmdfile = name;
}
void dump(int serverID) {
log("clients[%d]: { id: %d, hostname: %s, port: %d, cmdfile: %s, status: %s }\n",
serverID, id, hostname.c_str(), port, cmdfile.c_str(), statusName());
}
bool connect() {
connfd = utils::getClientSocket(sockAddr, hostname.c_str(), port, true);
int status = ::connect(connfd, (sockaddr *) &sockAddr, sizeof(sockAddr));
if (status == -1 && errno != EINPROGRESS) {
printf("Internal Error: failed to connect to ras/rwg server %d\n", id);
printf("%s\n", strerror(errno));
exit(EXIT_FAILURE);
}
return true;
}
int getServerID() {
return id;
}
int getConnfd() {
return connfd;
}
const std::string& getHostname() {
return hostname;
}
const int getPort() {
return port;
}
const std::string& getCmdfile() {
return cmdfile;
}
const char *getHostIP() {
if (connfd != -1) {
ip = inet_ntoa(sockAddr.sin_addr);
return ip.c_str();
}
return "";
}
void startReading() {
status = FD_READ;
}
void startWriting() {
status = FD_WRITE;
}
void startClosing() {
status = FD_CLOSE;
}
bool isConnectionOk() {
int error = 0;
socklen_t len = sizeof (error);
int status = getsockopt(connfd, SOL_SOCKET, SO_ERROR, &error, &len);
log("clients[%d] isConnectionOk status:%s error:%d\n", id, statusName(), error);
return status == 0 && error == 0;
}
bool isConnecting() {
//log("clients[%d] isConnecting status:%s\n", id, statusName());
return status == FD_CONNECT;
}
bool isReading() {
//log("clients[%d] isReading status:%s\n", id, statusName());
return status == FD_READ;
}
bool isWriting() {
//log("clients[%d] isWriting status:%s\n", id, statusName());
return status == FD_WRITE;
}
bool isClose() {
//log("clients[%d] isClose status:%s\n", id, statusName());
return status == FD_CLOSE;
}
void writeHTML(const std::string &str, bool isCmd = false) {
printServerOutput(id, str, isCmd);
}
std::string read() {
char buffer[utils::MAX_BUFFER_SIZE];
memset(buffer, 0, utils::MAX_BUFFER_SIZE);
log("clients[%d] read fd:%d\n", id, connfd);
int n = utils::readFD(connfd, buffer);
if (n == -1 && (errno != EWOULDBLOCK || errno != EAGAIN)) {
status = FD_CLOSE;
writeHTML("socket read error!");
log("clients[%d] socket %d read error\n", id, connfd);
return "";
}
else if (n == 0) {
log("clients[%d] socket %d read EOF\n", id, connfd);
return "";
} else {
log("clients[%d] socket %d read [%s]\n", id, connfd, buffer);
return buffer;
}
}
bool write() {
if (!ifs.is_open()) {
ifs.open(cmdfile, std::ifstream::in);
}
if (!ifs.good()) {
writeHTML("failed to open file %s\n", cmdfile.c_str());
utils::die("This program should be closed\n");
}
if (writeBuffer.empty()) {
getline(ifs, writeBuffer);
writeBuffer += "\n";
}
writeHTML(writeBuffer, true);
ssize_t n = utils::writeFD(connfd, (char *)writeBuffer.c_str());
if (n == -1 && (errno != EWOULDBLOCK || errno != EAGAIN)) {
writeHTML("Error: failed to write to server");
utils::die("This program should be closed\n");
}
else if (n != (ssize_t)writeBuffer.size() && n != -1) {
log("clients[%d] write imcompleted\n", id);
writeBuffer = writeBuffer.substr(n);
return false;
} else if (n == (ssize_t)writeBuffer.size()) {
log("clients[%d] write completed\n", id);
writeBuffer.clear();
}
return true;
}
const char *statusName() {
switch (status) {
case FD_CONNECT:
return "FD_CONNECT";
case FD_READ:
return "FD_READ";
case FD_WRITE:
return "FD_WRITE";
case FD_CLOSE:
return "FD_CLOSE";
default:
return "Unknown";
}
}
private:
enum {
FD_CONNECT,
FD_READ,
FD_WRITE,
FD_CLOSE,
};
int id = -1;
int connfd = -1;
int status = FD_CONNECT;
struct sockaddr_in sockAddr;
std::string hostname;
std::string ip;
int port = 0;
std::string cmdfile;
std::ifstream ifs;
std::string writeBuffer;
};
typedef std::map<int, Client> Clients;
#endif
|
liangzhuo1/projrct
|
heima-leadnews-model/src/main/java/com/heima/model/admin/pojos/AdUser.java
|
<gh_stars>0
package com.heima.model.admin.pojos;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import java.io.Serializable;
import java.util.Date;
/**
* <p>
* 管理员用户信息表
* </p>
*
* @author itheima
*/
@Data
@TableName("ad_user")
public class AdUser implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键
*/
@TableId(value = "id", type = IdType.AUTO)
private Integer id;
/**
* 登录用户名
*/
@TableField("name")
private String name;
/**
* 登录密码
*/
@TableField("password")
private String password;
/**
* 盐
*/
@TableField("salt")
private String salt;
/**
* 昵称
*/
@TableField("nickname")
private String nickname;
/**
* 头像
*/
@TableField("image")
private String image;
/**
* 手机号
*/
@TableField("phone")
private String phone;
/**
* 状态
0 暂时不可用
1 永久不可用
9 正常可用
*/
@TableField("status")
private Integer status;
/**
* 邮箱
*/
@TableField("email")
private String email;
/**
* 最后一次登录时间
*/
@TableField("login_time")
private Date loginTime;
/**
* 创建时间
*/
@TableField("created_time")
private Date createdTime;
}
|
Mepomep/scala-loci
|
scala-loci-lang/shared/src/main/scala/loci/runtime/RemoteRequest.scala
|
package loci
package runtime
import loci.transmitter.RemoteRef
final class RemoteRequest[V, R, T, L, M, U](
arguments: U,
placedValue: PlacedValue[U, T],
peer: Peer.Signature,
remotes: Seq[RemoteRef],
system: System)
extends transmitter.Transmission[V, R, T, L, M] {
private[this] val remoteId = (placedValue, peer, remotes, system)
@inline private[loci] def cache[B <: AnyRef](id: Any, body: => B): B =
if (placedValue.stable)
system.cache((remoteId, id), body)
else
body
@inline private[loci] val remoteJoined: Notification[Remote[R]] =
system.remoteJoined(peer, remotes, earlyAccess = true)
@inline private[loci] val remoteLeft: Notification[Remote[R]] =
system.remoteLeft(peer, remotes, earlyAccess = true)
@inline private[loci] def remotesReferences: Seq[Remote[R]] =
system.remoteReferences(peer, remotes, earlyAccess = true)
@inline private[loci] def retrieveValues: Seq[T] =
system.invokeRemoteAccess(arguments, placedValue, peer, remotes, requestResult = true)
}
|
sahibdhanjal/astrobee
|
mobility/planner_qp/traj_opt_pro/include/traj_opt_pro/nonlinear_trajectory.h
|
/* Copyright (c) 2017, United States Government, as represented by the
* Administrator of the National Aeronautics and Space Administration.
*
* All rights reserved.
*
* The Astrobee platform is licensed under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
#include <traj_opt_pro/nonlinear_solver.h>
#include <traj_opt_pro/polynomial_basis.h>
#include <traj_opt_pro/trajectory_solver.h>
#include <boost/algorithm/clamp.hpp>
#include <map>
#include <tuple>
#include <utility>
#include <vector>
#ifndef TRAJ_OPT_PRO_NONLINEAR_TRAJECTORY_H_
#define TRAJ_OPT_PRO_NONLINEAR_TRAJECTORY_H_
namespace traj_opt {
typedef std::vector<Variable *> NLTimes;
typedef std::vector<Variable *> NLPoly;
typedef std::vector<NLPoly> NLSpline;
typedef std::vector<NLSpline> NLTraj;
// to maintain same tensor ordering of (dim,segment) for ball confinement
typedef std::vector<Variable *> NLChain; // (xc,yc,zc,radius)
typedef std::vector<NLChain> NLBeads;
class AxbConstraint;
class BallConstraint;
class SymbolicPoly {
typedef std::tuple<Variable *, Variable *, int>
PolyTriple; // for linear polys in c
typedef std::tuple<Variable *, Variable *, Variable *, int>
PolyQuad; // for quadratic polys in c
public:
SymbolicPoly() {} // null constructor
// form of a*coeff*t^n
SymbolicPoly(Variable *coeff, Variable *time, int n, decimal_t a);
// form of a*coeff0*coeff1*t^n
SymbolicPoly(Variable *coeff0, Variable *coeff1, Variable *time, int n,
decimal_t a);
SymbolicPoly &operator+=(const SymbolicPoly &rhs);
friend SymbolicPoly operator*(decimal_t lhs, const SymbolicPoly &rhs);
decimal_t evaluate();
// for these derrivaties assume only linear terms
ETV gradient(int u_id);
ETV hessian();
// for these derrivaties assume only quadratic terms
ETV quad_gradient(int u_id);
ETV quad_gradient();
ETV quad_hessian();
SymbolicPoly square(); // returns the square the linear parts
void add(const SymbolicPoly &rhs);
friend std::ostream &operator<<(std::ostream &os, const SymbolicPoly &poly);
private:
std::map<PolyTriple, decimal_t> poly_map;
std::map<PolyQuad, decimal_t> quad_map;
};
class PolyCost;
class TimeBound;
class NonlinearTrajectory : public Trajectory {
public:
// standard contruction
NonlinearTrajectory(
const std::vector<Waypoint> &waypoints,
const std::vector<std::pair<MatD, VecD> > &cons, int deg = 7,
int min_dim = 3, boost::shared_ptr<std::vector<decimal_t> > ds =
boost::shared_ptr<std::vector<decimal_t> >(),
boost::shared_ptr<VecDVec> path = boost::shared_ptr<VecDVec>());
// nonconvex pointcloud test
NonlinearTrajectory(const std::vector<Waypoint> &waypoints,
const Vec3Vec &points, int segs, decimal_t dt);
virtual ~NonlinearTrajectory() {}
decimal_t getTotalTime() const;
bool evaluate(decimal_t t, uint derr, VecD &out) const;
decimal_t getCost();
TrajData serialize();
bool isSolved() { return solved_; }
Vec4Vec getBeads();
void scaleTime(decimal_t ratio);
protected:
void allocate_poly(
boost::shared_ptr<std::vector<decimal_t> > ds =
boost::shared_ptr<std::vector<decimal_t> >(),
boost::shared_ptr<VecDVec> path = boost::shared_ptr<VecDVec>());
void allocate_beads();
void link_sections(); // note, requires endpoint basis
void add_boundary(
const std::vector<Waypoint> &waypoints); // add boundary values
void add_Axb(const std::vector<std::pair<MatD, VecD> > &cons);
void addCloudConstraint(const Vec3Vec &points);
void addPosTime(); // ensures time segments are >
void addTimeBound(boost::shared_ptr<std::vector<decimal_t> > ds =
boost::shared_ptr<std::vector<decimal_t> >());
void make_convex(boost::shared_ptr<std::vector<decimal_t> > ds);
NonlinearSolver solver{10000};
NLTraj traj;
NLTimes times;
NLBeads beads;
boost::shared_ptr<PolyCost> cost;
boost::shared_ptr<BasisTransformer> basisT;
// sizes
// dim_ in parent
int seg_, deg_;
BasisBundlePro basis;
bool solved_{false};
friend class AxbConstraint;
friend class BallConstraint;
};
class PolyCost : public CostFunction {
public:
PolyCost(const NLTraj &traj, const NLTimes ×, BasisBundlePro &basis,
int min_dim);
decimal_t evaluate();
ETV gradient();
ETV hessian();
private:
SymbolicPoly poly;
MatD cost_v_;
Eigen::Matrix<int, Eigen::Dynamic, Eigen::Dynamic> cost_n_;
void init_constants();
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
};
} // namespace traj_opt
#endif // TRAJ_OPT_PRO_NONLINEAR_TRAJECTORY_H_
|
ncultra/Pegasus-2.5
|
src/Providers/ManagedSystem/OperatingSystem/OperatingSystem_Stub.cpp
|
//%2005////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2000, 2001, 2002 BMC Software; Hewlett-Packard Development
// Company, L.P.; IBM Corp.; The Open Group; Tivoli Systems.
// Copyright (c) 2003 BMC Software; Hewlett-Packard Development Company, L.P.;
// IBM Corp.; EMC Corporation, The Open Group.
// Copyright (c) 2004 BMC Software; Hewlett-Packard Development Company, L.P.;
// IBM Corp.; EMC Corporation; VERITAS Software Corporation; The Open Group.
// Copyright (c) 2005 Hewlett-Packard Development Company, L.P.; IBM Corp.;
// EMC Corporation; VERITAS Software Corporation; The Open Group.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// THE ABOVE COPYRIGHT NOTICE AND THIS PERMISSION NOTICE SHALL BE INCLUDED IN
// ALL COPIES OR SUBSTANTIAL PORTIONS OF THE SOFTWARE. THE SOFTWARE IS PROVIDED
// "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
// ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//==============================================================================
//
// Author: <NAME> <<EMAIL>>
// <NAME> <<EMAIL>>
//
// Modified By: <NAME> <<EMAIL>>
// <NAME> <<EMAIL>>
// <NAME> <<EMAIL>>
//
//%////////////////////////////////////////////////////////////////////////////
PEGASUS_USING_STD;
OperatingSystem::OperatingSystem(void)
{
}
OperatingSystem::~OperatingSystem(void)
{
}
Boolean OperatingSystem::getCSName(String& csName)
{
return false;
}
Boolean OperatingSystem::getName(String& osName)
{
return false;
}
Boolean OperatingSystem::getCaption(String& caption)
{
return false;
}
Boolean OperatingSystem::getDescription(String& description)
{
return false;
}
Boolean OperatingSystem::getInstallDate(CIMDateTime& installDate)
{
return false;
}
Boolean OperatingSystem::getStatus(String& status)
{
return false;
}
Boolean OperatingSystem::getVersion(String& osVersion)
{
return false;
}
Boolean OperatingSystem::getOSType(Uint16& osType)
{
return false;
}
Boolean OperatingSystem::getOtherTypeDescription(String& otherTypeDescription)
{
return false;
}
Boolean OperatingSystem::getLastBootUpTime(CIMDateTime& lastBootUpTime)
{
return false;
}
Boolean OperatingSystem::getLocalDateTime(CIMDateTime& localDateTime)
{
return false;
}
Boolean OperatingSystem::getCurrentTimeZone(Sint16& currentTimeZone)
{
return false;
}
Boolean OperatingSystem::getNumberOfLicensedUsers(Uint32& numberOfLicensedUsers)
{
return false;
}
Boolean OperatingSystem::getNumberOfUsers(Uint32& numberOfUsers)
{
return false;
}
Boolean OperatingSystem::getNumberOfProcesses(Uint32& numberOfProcesses)
{
return false;
}
Boolean OperatingSystem::getMaxNumberOfProcesses(Uint32& mMaxProcesses)
{
return false;
}
Boolean OperatingSystem::getTotalSwapSpaceSize(Uint64& mTotalSwapSpaceSize)
{
return false;
}
Boolean OperatingSystem::getTotalVirtualMemorySize(Uint64& total)
{
return false;
}
Boolean OperatingSystem::getFreeVirtualMemory(Uint64& freeVirtualMemory)
{
return false;
}
Boolean OperatingSystem::getFreePhysicalMemory(Uint64& total)
{
return false;
}
Boolean OperatingSystem::getTotalVisibleMemorySize(Uint64& memory)
{
return false;
}
Boolean OperatingSystem::getSizeStoredInPagingFiles(Uint64& total)
{
return false;
}
Boolean OperatingSystem::getFreeSpaceInPagingFiles(
Uint64& freeSpaceInPagingFiles)
{
return false;
}
Boolean OperatingSystem::getMaxProcessMemorySize(Uint64& maxProcessMemorySize)
{
return false;
}
Boolean OperatingSystem::getDistributed(Boolean& distributed)
{
return false;
}
Boolean OperatingSystem::getMaxProcsPerUser(Uint32& maxProcsPerUser)
{
return false;
}
Boolean OperatingSystem::getSystemUpTime(Uint64& mUpTime)
{
return false;
}
Boolean OperatingSystem::getOperatingSystemCapability(String& scapability)
{
return false;
}
Uint32 OperatingSystem::_reboot()
{
return false;
}
Uint32 OperatingSystem::_shutdown()
{
return false;
}
|
qussarah/declare
|
idea/testData/refactoring/extractFunction/parameters/candidateTypes/mutablyFlexibleType.java
|
import java.util.*;
class J {
public List<String> getData() {
return new ArrayList<String>();
}
}
|
eroma2014/seagrid-rich-client
|
src/main/java/org/seagrid/desktop/util/date/LocalDateAdapter.java
|
package org.seagrid.desktop.util.date;
import java.time.LocalDate;
import javax.xml.bind.annotation.adapters.XmlAdapter;
public class LocalDateAdapter extends XmlAdapter<String, LocalDate> {
@Override
public LocalDate unmarshal(String v) throws Exception {
return LocalDate.parse(v);
}
@Override
public String marshal(LocalDate v) throws Exception {
return v.toString();
}
}
|
njhale/openshift-installer
|
terraform/providers/aws/vendor/github.com/hashicorp/terraform-provider-aws/internal/service/iot/thing_type.go
|
<filename>terraform/providers/aws/vendor/github.com/hashicorp/terraform-provider-aws/internal/service/iot/thing_type.go
package iot
import (
"fmt"
"log"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/iot"
"github.com/hashicorp/aws-sdk-go-base/v2/awsv1shim/v2/tfawserr"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"github.com/hashicorp/terraform-provider-aws/internal/conns"
tftags "github.com/hashicorp/terraform-provider-aws/internal/tags"
"github.com/hashicorp/terraform-provider-aws/internal/tfresource"
"github.com/hashicorp/terraform-provider-aws/internal/verify"
)
// https://docs.aws.amazon.com/iot/latest/apireference/API_CreateThingType.html
func ResourceThingType() *schema.Resource {
return &schema.Resource{
Create: resourceThingTypeCreate,
Read: resourceThingTypeRead,
Update: resourceThingTypeUpdate,
Delete: resourceThingTypeDelete,
Importer: &schema.ResourceImporter{
State: func(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
d.Set("name", d.Id())
return []*schema.ResourceData{d}, nil
},
},
Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
ValidateFunc: validThingTypeName,
},
"properties": {
Type: schema.TypeList,
Optional: true,
MaxItems: 1,
DiffSuppressFunc: verify.SuppressMissingOptionalConfigurationBlock,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"description": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
ValidateFunc: validThingTypeDescription,
},
"searchable_attributes": {
Type: schema.TypeSet,
Optional: true,
Computed: true,
ForceNew: true,
MaxItems: 3,
Elem: &schema.Schema{
Type: schema.TypeString,
ValidateFunc: validThingTypeSearchableAttribute,
},
},
},
},
},
"deprecated": {
Type: schema.TypeBool,
Optional: true,
Default: false,
},
"tags": tftags.TagsSchema(),
"tags_all": tftags.TagsSchemaComputed(),
"arn": {
Type: schema.TypeString,
Computed: true,
},
},
CustomizeDiff: verify.SetTagsDiff,
}
}
func resourceThingTypeCreate(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*conns.AWSClient).IoTConn
defaultTagsConfig := meta.(*conns.AWSClient).DefaultTagsConfig
tags := defaultTagsConfig.MergeTags(tftags.New(d.Get("tags").(map[string]interface{})))
params := &iot.CreateThingTypeInput{
ThingTypeName: aws.String(d.Get("name").(string)),
}
if v, ok := d.GetOk("properties"); ok {
configs := v.([]interface{})
config, ok := configs[0].(map[string]interface{})
if ok && config != nil {
params.ThingTypeProperties = expandThingTypeProperties(config)
}
}
if len(tags) > 0 {
params.Tags = Tags(tags.IgnoreAWS())
}
log.Printf("[DEBUG] Creating IoT Thing Type: %s", params)
out, err := conn.CreateThingType(params)
if err != nil {
return err
}
d.SetId(aws.StringValue(out.ThingTypeName))
if v := d.Get("deprecated").(bool); v {
params := &iot.DeprecateThingTypeInput{
ThingTypeName: aws.String(d.Id()),
UndoDeprecate: aws.Bool(false),
}
log.Printf("[DEBUG] Deprecating IoT Thing Type: %s", params)
_, err := conn.DeprecateThingType(params)
if err != nil {
return err
}
}
return resourceThingTypeRead(d, meta)
}
func resourceThingTypeRead(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*conns.AWSClient).IoTConn
defaultTagsConfig := meta.(*conns.AWSClient).DefaultTagsConfig
ignoreTagsConfig := meta.(*conns.AWSClient).IgnoreTagsConfig
params := &iot.DescribeThingTypeInput{
ThingTypeName: aws.String(d.Id()),
}
log.Printf("[DEBUG] Reading IoT Thing Type: %s", params)
out, err := conn.DescribeThingType(params)
if err != nil {
if tfawserr.ErrCodeEquals(err, iot.ErrCodeResourceNotFoundException) {
log.Printf("[WARN] IoT Thing Type %q not found, removing from state", d.Id())
d.SetId("")
}
return err
}
if out.ThingTypeMetadata != nil {
d.Set("deprecated", out.ThingTypeMetadata.Deprecated)
}
d.Set("arn", out.ThingTypeArn)
tags, err := ListTags(conn, aws.StringValue(out.ThingTypeArn))
if err != nil {
return fmt.Errorf("error listing tags for IoT Thing Type (%s): %w", aws.StringValue(out.ThingTypeArn), err)
}
tags = tags.IgnoreAWS().IgnoreConfig(ignoreTagsConfig)
//lintignore:AWSR002
if err := d.Set("tags", tags.RemoveDefaultConfig(defaultTagsConfig).Map()); err != nil {
return fmt.Errorf("error setting tags: %w", err)
}
if err := d.Set("tags_all", tags.Map()); err != nil {
return fmt.Errorf("error setting tags_all: %w", err)
}
if err := d.Set("properties", flattenIoTThingTypeProperties(out.ThingTypeProperties)); err != nil {
return fmt.Errorf("error setting properties: %s", err)
}
return nil
}
func resourceThingTypeUpdate(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*conns.AWSClient).IoTConn
if d.HasChange("deprecated") {
params := &iot.DeprecateThingTypeInput{
ThingTypeName: aws.String(d.Id()),
UndoDeprecate: aws.Bool(!d.Get("deprecated").(bool)),
}
log.Printf("[DEBUG] Updating IoT Thing Type: %s", params)
_, err := conn.DeprecateThingType(params)
if err != nil {
return err
}
}
if d.HasChange("tags_all") {
o, n := d.GetChange("tags_all")
if err := UpdateTags(conn, d.Get("arn").(string), o, n); err != nil {
return fmt.Errorf("error updating tags: %s", err)
}
}
return resourceThingTypeRead(d, meta)
}
func resourceThingTypeDelete(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*conns.AWSClient).IoTConn
// In order to delete an IoT Thing Type, you must deprecate it first and wait
// at least 5 minutes.
deprecateParams := &iot.DeprecateThingTypeInput{
ThingTypeName: aws.String(d.Id()),
}
log.Printf("[DEBUG] Deprecating IoT Thing Type: %s", deprecateParams)
_, err := conn.DeprecateThingType(deprecateParams)
if err != nil {
return err
}
deleteParams := &iot.DeleteThingTypeInput{
ThingTypeName: aws.String(d.Id()),
}
log.Printf("[DEBUG] Deleting IoT Thing Type: %s", deleteParams)
err = resource.Retry(6*time.Minute, func() *resource.RetryError {
_, err := conn.DeleteThingType(deleteParams)
if err != nil {
if tfawserr.ErrMessageContains(err, iot.ErrCodeInvalidRequestException, "Please wait for 5 minutes after deprecation and then retry") {
return resource.RetryableError(err)
}
// As the delay post-deprecation is about 5 minutes, it may have been
// deleted in between, thus getting a Not Found Exception.
if tfawserr.ErrCodeEquals(err, iot.ErrCodeResourceNotFoundException) {
return nil
}
return resource.NonRetryableError(err)
}
return nil
})
if tfresource.TimedOut(err) {
_, err = conn.DeleteThingType(deleteParams)
if tfawserr.ErrCodeEquals(err, iot.ErrCodeResourceNotFoundException) {
return nil
}
}
if err != nil {
return fmt.Errorf("Error deleting IOT thing type: %s", err)
}
return nil
}
|
lechium/iOS1351Headers
|
System/Library/PrivateFrameworks/UIFoundation.framework/NSHTMLReader.h
|
<reponame>lechium/iOS1351Headers<gh_stars>1-10
/*
* This header is generated by classdump-dyld 1.5
* on Friday, April 30, 2021 at 11:34:56 AM Mountain Standard Time
* Operating System: Version 13.5.1 (Build 17F80)
* Image Source: /System/Library/PrivateFrameworks/UIFoundation.framework/UIFoundation
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. Updated by <NAME>.
*/
#import <UIFoundation/UIFoundation-Structs.h>
@class NSMutableAttributedString, NSMutableDictionary, NSData, NSURL, NSDictionary, WebView, DOMDocument, DOMRange, NSMutableArray, WebDataSource, NSString;
@interface NSHTMLReader : NSObject {
NSMutableAttributedString* _attrStr;
NSMutableDictionary* _documentAttrs;
NSData* _data;
NSURL* _baseURL;
NSDictionary* _options;
WebView* _webView;
id _webDelegate;
DOMDocument* _document;
DOMRange* _domRange;
NSMutableArray* _domStartAncestors;
WebDataSource* _dataSource;
NSString* _standardFontFamily;
double _textSizeMultiplier;
double _webViewTextSizeMultiplier;
double _defaultTabInterval;
double _defaultFontSize;
double _minimumFontSize;
NSMutableArray* _textLists;
NSMutableArray* _textBlocks;
NSMutableArray* _textTables;
NSMutableDictionary* _textTableFooters;
NSMutableArray* _textTableSpacings;
NSMutableArray* _textTablePaddings;
NSMutableArray* _textTableRows;
NSMutableArray* _textTableRowArrays;
NSMutableArray* _textTableRowBackgroundColors;
NSMutableDictionary* _computedStylesForElements;
NSMutableDictionary* _specifiedStylesForElements;
NSMutableDictionary* _stringsForNodes;
NSMutableDictionary* _floatsForNodes;
NSMutableDictionary* _colorsForNodes;
NSMutableDictionary* _attributesForElements;
NSMutableDictionary* _elementIsBlockLevel;
NSMutableDictionary* _fontCache;
NSMutableArray* _writingDirectionArray;
long long _domRangeStartIndex;
long long _indexingLimit;
long long _thumbnailLimit;
long long _errorCode;
long long _quoteLevel;
struct {
unsigned isSoft : 1;
unsigned reachedStart : 1;
unsigned reachedEnd : 1;
unsigned isIndexing : 1;
unsigned isTesting : 1;
unsigned hasTrailingNewline : 1;
unsigned pad : 26;
} _flags;
}
+(void)initialize;
+(id)defaultParagraphStyle;
+(BOOL)allowsAttributedStringAgentForOptions:(id)arg1 ;
+(BOOL)_usesLibXML2ForOptions:(id)arg1 ;
-(void)dealloc;
-(id)initWithData:(id)arg1 options:(id)arg2 ;
-(id)attributedString;
-(void)setMutableAttributedString:(id)arg1 ;
-(id)documentAttributes;
-(id)_computedStyleForElement:(id)arg1 ;
-(id)_specifiedStyleForElement:(id)arg1 ;
-(id)_stringForNode:(id)arg1 property:(id)arg2 ;
-(id)_computedStringForNode:(id)arg1 property:(id)arg2 ;
-(BOOL)_getFloat:(double*)arg1 forNode:(id)arg2 property:(id)arg3 ;
-(BOOL)_getComputedFloat:(double*)arg1 forNode:(id)arg2 property:(id)arg3 ;
-(BOOL)_elementIsBlockLevel:(id)arg1 ;
-(id)_blockLevelElementForNode:(id)arg1 ;
-(BOOL)_elementHasOwnBackgroundColor:(id)arg1 ;
-(id)_colorForNode:(id)arg1 property:(id)arg2 ;
-(id)_computedColorForNode:(id)arg1 property:(id)arg2 ;
-(id)_computedAttributesForElement:(id)arg1 ;
-(id)_attributesForElement:(id)arg1 ;
-(Class)_DOMHTMLTableCellElementClass;
-(id)_childrenForNode:(id)arg1 ;
-(void)_processMetaElementWithName:(id)arg1 content:(id)arg2 ;
-(void)_processHeadElement:(id)arg1 ;
-(void)_newParagraphForElement:(id)arg1 tag:(id)arg2 allowEmpty:(BOOL)arg3 suppressTrailingSpace:(BOOL)arg4 isEntering:(BOOL)arg5 ;
-(void)_fillInBlock:(id)arg1 forElement:(id)arg2 backgroundColor:(id)arg3 extraMargin:(double)arg4 extraPadding:(double)arg5 isTable:(BOOL)arg6 ;
-(void)_addTableCellForElement:(id)arg1 ;
-(void)_addTableForElement:(id)arg1 ;
-(BOOL)_addAttachmentForElement:(id)arg1 URL:(id)arg2 needsParagraph:(BOOL)arg3 usePlaceholder:(BOOL)arg4 ;
-(void)_traverseNode:(id)arg1 depth:(long long)arg2 embedded:(BOOL)arg3 ;
-(void)_newLineForElement:(id)arg1 ;
-(void)_addQuoteForElement:(id)arg1 opening:(BOOL)arg2 level:(long long)arg3 ;
-(void)_addValue:(id)arg1 forElement:(id)arg2 ;
-(void)_newTabForElement:(id)arg1 ;
-(void)_traverseFooterNode:(id)arg1 depth:(long long)arg2 ;
-(void)_addMarkersToList:(id)arg1 range:(NSRange)arg2 ;
-(BOOL)_enterElement:(id)arg1 tag:(id)arg2 display:(id)arg3 depth:(long long)arg4 embedded:(BOOL)arg5 ;
-(BOOL)_processElement:(id)arg1 tag:(id)arg2 display:(id)arg3 depth:(long long)arg4 ;
-(void)_exitElement:(id)arg1 tag:(id)arg2 display:(id)arg3 depth:(long long)arg4 startIndex:(unsigned long long)arg5 ;
-(void)_processText:(id)arg1 ;
-(BOOL)_sanitizeWebArchiveDictionary:(id)arg1 ;
-(BOOL)_sanitizeWebArchiveArray:(id)arg1 ;
-(Class)_webArchiveClass;
-(Class)_webViewClass;
-(id)_createWebArchiveForData:(id)arg1 ;
-(id)_webPreferences;
-(void)_parseNode:(id)arg1 ;
-(void)_adjustTrailingNewline;
-(void)_loadUsingWebKit;
-(void)_loadFromDOMRange;
-(void)_loadUsingWebKitOnMainThread;
-(void)_load;
-(id)initWithPath:(id)arg1 options:(id)arg2 ;
-(id)initWithDOMRange:(id)arg1 ;
@end
|
atkins126/Easy3D_webGL
|
ver5/core.js
|
// Easy3D_WebGL
// Core container for state and engine
// <NAME> 2017-2020
"use strict"
// Config
// Camera settings
var E3D_FOV = 45 * DegToRad;
var E3D_NEAR = 0.1;
var E3D_FAR = 500.0;
// Default viewport size
var E3D_WIDTH = 640;
var E3D_HEIGHT = 480;
// Gravitational constant
var E3D_G = 386.22;
// Global members
var TIMER = new E3D_timing(E3D_onTick_callback);
var CANVAS = null;
var CONTEXT = null;
var SCENE = new E3D_scene_default("scene0");
var CAMERA = new E3D_camera("camera0");
var INPUTS = new E3D_input();
// Content containers
// Those 3 arrays are linked: the animation and body class at a same given index is used to compute the state of this index's entity.
var ENTITIES = []; // E3D_entity, mesh content and information, spatial data, state
var ANIMATIONS = []; // E3D_animation that transforms an entity's position and rotation, or any other properties
var BODIES = []; // E3D_body to interract with other entity's body
// Callbacks
// base events
var CB_input;
var CB_pointerlockMove;
var CB_pointerlockEvent;
var CB_resize;
var CB_tick;
// engine loop
var CB_processInputs;
var CB_processCamera;
var CB_processAnimations;
var CB_processPreRender;
var CB_processRender;
var CB_processPostRender;
// Engine initialization functions
// Context, timer, perspective camera, all input types, default scene and shaders, lights
function E3D_InitAll(element) {
E3D_InitScene(element);
if (SCENE) {
log("Camera", false);
CAMERA = new E3D_camera_persp("camera0p");
E3D_onResize();
log("Inputs", false);
INPUTS.supportKeyboard();
INPUTS.supportMouse();
INPUTS.supportTouch();
INPUTS.supportPointerLock();
log("Timer", false);
TIMER.onTick = E3D_onTick_default;
TIMER.run();
}
}
// Context, scene, lights
function E3D_InitScene(element) {
E3D_InitContext(element);
if (CONTEXT) {
log("Scene Initialization", false);
try {
log("Shader Program", false);
SCENE.program = new E3D_program("program_default_lights", programData_default);
SCENE.initialize();
SCENE.state = E3D_ACTIVE;
} catch (e) {
log(e, false);
return;
}
}
}
// Context only
function E3D_InitContext(element) {
if (element == undefined) {
log("No target element provided", false);
element = document.getElementById("E3D_mainDiv");
if (element == undefined) {
log("No target element found", false);
element = document.createElement("div");
element.id = "E3D_mainDiv";
element.style.position = "absolute";
element.style.width = "100%";
element.style.height = "100%";
element.style.left = "0px";
element.style.top = "0px";
document.body.appendChild(element);
}
}
if (element.tagName == "CANVAS") {
CANVAS = element;
} else {
CANVAS = document.createElement("canvas");
CANVAS.id = "E3D_canvas";
element.appendChild(CANVAS);
CANVAS.style.width = "100%";
CANVAS.style.height = "100%";
}
CANVAS.width = CANVAS.offsetWidth;
CANVAS.height = CANVAS.offsetHeight;
window.addEventListener("resize", E3D_onResize);
log("Context Initialization", false);
CONTEXT = CANVAS.getContext("webgl");
if (!CONTEXT) {
log("Unable to initialize WebGL. Your browser or machine may not support it.", false);
TIMER.pause();
return;
}
}
// on resize handlers
// Default resize function
function E3D_onResize() {
// get new size
E3D_WIDTH = CANVAS.offsetWidth;
E3D_HEIGHT = CANVAS.offsetHeight;
// adjust canvas resolution to fit new size, remove or override to lower the render viewport resolution
CANVAS.width = E3D_WIDTH;
CANVAS.height = E3D_HEIGHT;
// reset viewport and camera
CONTEXT.viewport(0, 0, E3D_WIDTH, E3D_HEIGHT);
CAMERA.resize();
INPUTS.resize();
if (E3D_DEBUG_VERBOSE) log("Resized to " + E3D_WIDTH + "x" + E3D_HEIGHT);
if (CB_resize) CB_resize();
}
// on tick handlers
// Default timer tick event handler
function E3D_onTick_default() {
if (CB_tick) CB_tick();
// Inputs
INPUTS.processInputs();
INPUTS.smoothRotation(6);
INPUTS.smoothPosition(6);
// Camera
CAMERA.moveBy(-INPUTS.px_delta_smth, INPUTS.py_delta_smth, INPUTS.pz_delta_smth,
INPUTS.rx_delta_smth, INPUTS.ry_delta_smth, INPUTS.rz_delta_smth);
CAMERA.updateMatrix();
// Animations
singlePassAnimator();
// Render
if (SCENE.state == E3D_ACTIVE) {
SCENE.preRender();
SCENE.render();
SCENE.postRender();
}
}
// timer tick handler for scene only, callbacks for the rest
function E3D_onTick_scene() {
if (CB_processInputs) CB_processInputs();
if (CB_processCamera) CB_processCamera();
if (CB_processAnimations) CB_processAnimations();
// Render
if (SCENE.state == E3D_ACTIVE) {
SCENE.preRender();
if (CB_processPreRender) CB_processPreRender();
SCENE.render();
if (CB_processRender) CB_processRender();
SCENE.postRender();
if (CB_processPostRender) CB_processPostRender();
}
}
// timer tick handler for callbacks only
function E3D_onTick_callback() {
if (CB_processInputs) CB_processInputs();
if (CB_processCamera) CB_processCamera();
if (CB_processAnimations) CB_processAnimations();
if (CB_processPreRender) CB_processPreRender();
if (CB_processRender) CB_processRender();
if (CB_processPostRender) CB_processPostRender();
}
// Default Logger
var E3D_logElement = null;
var E3D_logStart = Date.now();
function log(text, silent = true) {
let ts = Date.now() - E3D_logStart;
if (!silent) {
if (E3D_logElement == null) E3D_logElement = document.getElementById("E3D_logDiv");
if (E3D_logElement != null) {
E3D_logElement.innerHTML += "[" + ts + "] " + text + "<br />";
E3D_logElement.scrollTop = E3D_logElement.scrollHeight - E3D_logElement.offsetHeight;
}
}
console.log("[" + ts + "] " + text);
}
// Entities management
// Add a new entity to the current scene and setup the GPU buffers
function E3D_addEntity(ent, animation = false, body = false) {
if (E3D_getEntityIndexFromId(ent.id) != -1) {
log("Duplicate entity ID: " + ent.id);
return -1;
}
// Initialize context data buffers
ent.vertexBuffer = CONTEXT.createBuffer();
ent.colorBuffer = CONTEXT.createBuffer();
ent.normalBuffer = CONTEXT.createBuffer();
ent.strokeIndexBuffer = CONTEXT.createBuffer();
var usage = (ent.isDynamic) ? CONTEXT.DYNAMIC_DRAW : CONTEXT.STATIC_DRAW;
CONTEXT.bindBuffer(CONTEXT.ARRAY_BUFFER, ent.vertexBuffer);
CONTEXT.bufferData(CONTEXT.ARRAY_BUFFER, ent.vertexArray, usage);
CONTEXT.bindBuffer(CONTEXT.ARRAY_BUFFER, ent.colorBuffer);
CONTEXT.bufferData(CONTEXT.ARRAY_BUFFER, ent.colorArray, usage);
CONTEXT.bindBuffer(CONTEXT.ARRAY_BUFFER, ent.normalBuffer);
CONTEXT.bufferData(CONTEXT.ARRAY_BUFFER, ent.normalArray, usage);
if (ent.strokeIndexArray) {
CONTEXT.bindBuffer(CONTEXT.ELEMENT_ARRAY_BUFFER, ent.strokeIndexBuffer);
CONTEXT.bufferData(CONTEXT.ELEMENT_ARRAY_BUFFER, ent.strokeIndexArray, usage);
}
ent.visibilityDistance = v3_length(E3D_calculate_max_pos(ent.vertexArray));
ent.updateMatrix();
// update lists
ENTITIES.push(ent);
ANIMATIONS.push(animation ? new E3D_animation() : null);
BODIES.push(body ? new E3D_body() : null);
// return new entity's index in the lists
return ENTITIES.length - 1;
}
function E3D_getEntityIndexFromId(id) {
for (let i = 0; i < ENTITIES.length; ++i) {
if (ENTITIES[i].id == id) return i;
}
return -1;
}
function E3D_updateEntityData(ent) {
let idx = E3D_getEntityIndexFromId(ent.id);
if (idx > -1) {
ent.dataContentChanged = true;
ent.dataSizeChanged = true;
ent.visibilityDistance = v3_length(E3D_calculate_max_pos(ent.vertexArray));
ent.updateMatrix();
} else {
return E3D_addEntity(ent);
}
}
function E3D_removeEntity(id, deleteBuffers = true) {
let idx = this.E3D_getEntityIndexFromId(id);
if (idx > -1) {
if (deleteBuffers) {
CONTEXT.deleteBuffer(ENTITIES[idx].vertexBuffer);
CONTEXT.deleteBuffer(ENTITIES[idx].colorBuffer);
CONTEXT.deleteBuffer(ENTITIES[idx].normalBuffer);
CONTEXT.deleteBuffer(ENTITIES[idx].strokeIndexBuffer);
}
ENTITIES.splice(idx, 1);
ANIMATIONS.splice(idx, 1);
BODIES.splice(idx, 1);
}
}
function E3D_clearEntity(id, mesh = true, animation = true, body = true) {
let idx = this.E3D_getEntityIndexFromId(id);
if (idx > -1) {
if (mesh) ENTITIES[idx].clear();
if (animation && (ANIMATIONS[idx] != null)) ANIMATIONS[idx] = null;
if (body && (BODIES[idx] != null)) BODIES[idx].clear();
}
}
function E3D_cloneEntity(id, newId) {
let idx = E3D_getEntityIndexFromId(id);
if ((idx > -1) && (id != newId)) {
var ent = new E3D_entity(newId, ENTITIES[idx].isDynamic);
ent.cloneData(ENTITIES[idx]);
if (ent.isDynamic) {
ent.vertexBuffer = CONTEXT.createBuffer();
ent.colorBuffer = CONTEXT.createBuffer();
ent.normalBuffer = CONTEXT.createBuffer();
ent.strokeIndexBuffer = CONTEXT.createBuffer();
CONTEXT.bindBuffer(CONTEXT.ARRAY_BUFFER, ent.vertexBuffer);
CONTEXT.bufferData(CONTEXT.ARRAY_BUFFER, ent.vertexArray, CONTEXT.DYNAMIC_DRAW);
CONTEXT.bindBuffer(CONTEXT.ARRAY_BUFFER, ent.colorBuffer);
CONTEXT.bufferData(CONTEXT.ARRAY_BUFFER, ent.colorArray, CONTEXT.DYNAMIC_DRAW);
CONTEXT.bindBuffer(CONTEXT.ARRAY_BUFFER, ent.normalBuffer);
CONTEXT.bufferData(CONTEXT.ARRAY_BUFFER, ent.normalArray, CONTEXT.DYNAMIC_DRAW);
if (ent.strokeIndexArray) {
CONTEXT.bindBuffer(CONTEXT.ARRAY_BUFFER, ent.strokeIndexBuffer);
CONTEXT.bufferData(CONTEXT.ARRAY_BUFFER, ent.strokeIndexArray, CONTEXT.DYNAMIC_DRAW);
}
ent.dataSizeChanged = true;
}
ENTITIES.push(ent);
var anim = null;
if (ANIMATIONS[idx] != null) {
anim = new E3D_animation();
anim.cloneData(ANIMATIONS[idx]);
}
ANIMATIONS.push(anim);
var body = null;
if (BODIES[idx] != null) {
body = new E3D_body();
body.cloneData(BODIES[idx]);
}
BODIES.push(body);
return ent; // return reference to new entity
} else {
log("Invalid entity ID (not found or duplicate): " + id);
}
}
function E3D_calculate_max_pos(vertArray) {
let result = v3_new();
let pos = v3_new();
let r_dist2 = 0;
for (let i = 0; i < vertArray.length; i += 3) {
v3_val_res(pos, vertArray[i], vertArray[i+1], vertArray[i+2]);
var currentDist = v3_lengthsquared(pos);
if (currentDist > r_dist2) {
v3_copy(result, pos);
r_dist2 = currentDist;
}
}
return result;
}
var _E3D_check_entity_visible_pos = v3_new();
// Basic culling, only if in front of camera plane
function E3D_check_entity_visible(idx) {
if (ENTITIES[idx].isVisibiltyCullable) {
v3_sub_res(_E3D_check_entity_visible_pos, ENTITIES[idx].position, CAMERA.position);
CAMERA.negateCamera_mod(_E3D_check_entity_visible_pos);
var dist = -_E3D_check_entity_visible_pos[2]; // only check for Z
return ( ((dist - ENTITIES[idx].visibilityDistance) < E3D_FAR) &&
((dist + ENTITIES[idx].visibilityDistance) > E3D_NEAR) );
}
return true;
}
|
iraf-community/stsdas
|
stsdas/pkg/hst_calib/nicmos/calnica/n_photcalc.c
|
<reponame>iraf-community/stsdas<gh_stars>1-10
# include <hstio.h> /* defines HST I/O functions */
# include "calnic.h" /* defines NICMOS data structure */
# include "calnica.h" /* defines CALNICA data structure */
/* N_PHOTCALC: Store the photometry parameter values in the global header
** keywords PHOTMODE, PHOTFLAM, PHOTFNU, PHOTZPT, PHOTPLAM, and PHOTBW.
**
** The input data are NOT modified (only the global header is modified).
**
** Revision history:
** H.Bushouse Sept. 1995 Build 1
** H.Bushouse April 1996 Upgraded for Build 2 (Version 2.0)
** H.Bushouse 09-Feb-1999 Updated use of putKey routines for HSTIO v2.1
** (Version 3.2.2)
**
** <NAME> added temperature dependent scaling for the photometry
*/
int n_photcalc (NicInfo *nic, PhotData *phot, MultiNicmosGroup *input) {
/* Arguments:
** nic i: NICMOS info structure
** phot i: Photometry parameters
** input io: input image
*/
/* Function definitions */
int n_calReport (CalStep *, int, Hdr *, Hdr *);
if (nic->PHOT.corr == PERFORM) {
/* Write the photometry values to header keywords */
if (putKeyS (input->group[0].globalhdr, "PHOTMODE", phot->mode, ""))
return (status = 1);
if (putKeyF (input->group[0].globalhdr, "PHOTFLAM", phot->flam, ""))
return (status = 1);
if (putKeyF (input->group[0].globalhdr, "PHOTFNU", phot->fnu, ""))
return (status = 1);
if (putKeyF (input->group[0].globalhdr, "PHOTZPT", phot->zpt, ""))
return (status = 1);
if (putKeyF (input->group[0].globalhdr, "PHOTPLAM", phot->plam, ""))
return (status = 1);
if (putKeyF (input->group[0].globalhdr, "PHOTBW", phot->bw, ""))
return (status = 1);
/* Do the temperature-dependent zero-point correction for each group */
for (nic->group=nic->ngroups; nic->group >= 1; nic->group--) {
if (n_photzpt (nic, &(input->group[nic->group-1]), phot))
return (status);
}
if (putKeyF (input->group[0].globalhdr, "ZPSCALE", phot->zpscale, ""))
return (status = 1);
if (putKeyF (input->group[0].globalhdr, "PHOTFERR", phot->ferr, ""))
return (status = 1);
}
for (nic->group=nic->ngroups; nic->group >= 1; nic->group--) {
n_calReport (&nic->PHOT, nic->group,
&input->group[nic->group-1].sci.hdr,
input->group[nic->group-1].globalhdr);
}
/* Successful return */
return (status = 0);
}
/* N_PHOTZPT: Correct the NICMOS image for the effects of the
** temperature-dependent photometric zero-point changes. The
** zero-point scaling factor is computed from values read in
** from the PHOTTAB reference table. This scaling factor gets
** applied in-place to the science image. The scaling factor
** image error is combined with the science
** data errors. The input DQ, SAMP and TIME arrays
** are unchanged.
**
** Revision history:
** <NAME> Oct 2008 Build 1
**
*/
int n_photzpt (NicInfo *nic, SingleNicmosGroup *input,
PhotData *phot) {
/* Arguments:
** nic i: NICMOS info structure
** phot i: photometry table structure
** input io: image to be zero-point corrected
*/
float zpscale;
/* Function definitions */
void n_amulk (SingleNicmosGroup *, float);
/* If the exposure time is zero, don't bother with subtraction */
if (nic->exptime[nic->group-1] == 0)
return (status = 0);
/* If tfbtemp is invalid/not computed, the skip this correction */
if (nic->tfbtemp == -1) {
sprintf(MsgText,"Photometric zero-point NOT corrected.");
n_warn(MsgText);
sprintf(MsgText,"No valid temperature from bias found.");
n_message(MsgText);
return (status=0);
}
/* Check to see if PHOTTAB had temperature-dependent zero-point
coefficients and limits. If not, skip this correction. */
if (phot->ferr == -1 || phot->tfblow == -1) {
sprintf(MsgText,"Photometric zero-point NOT corrected.");
n_warn(MsgText);
sprintf(MsgText,"No Photometric zero-point coefficients found.");
n_message(MsgText);
return (status = 0);
}
/* Perform temperature bounds checking to see whether the image
temperature is within the range which can be corrected by the
relationship found in the PHOTTAB.
*/
if (nic->tfbtemp < phot->tfblow || nic->tfbtemp > phot->tfbhigh) {
sprintf(MsgText,"Photometric zero-point NOT corrected.");
n_warn(MsgText);
sprintf(MsgText,"Temperature from bias of %.3f out of limits: %.3f to %.3f",nic->tfbtemp, phot->tfblow, phot->tfbhigh);
n_message(MsgText);
return (status=0);
}
zpscale = (phot->f_c1 * (nic->tfbtemp - phot->reft)) + phot->f_c0;
phot->zpscale = zpscale;
/* Do the zero-point correction in-place in input */
n_amulk (input, zpscale);
return (status=0);
}
|
giantswarm/happa
|
testUtils/mockHttpCalls/status.js
|
<filename>testUtils/mockHttpCalls/status.js
import { Providers } from 'shared/constants';
import { V4_CLUSTER } from './constantsAndHelpers';
export const v4AWSClusterStatusResponse = {
aws: {
availabilityZones: [
{
name: 'eu-central-1a',
subnet: {
private: { cidr: '10.1.2.0/25' },
public: { cidr: '10.1.2.128/25' },
},
},
],
autoScalingGroup: { name: '' },
},
cluster: {
conditions: [
{
lastTransitionTime: '2019-11-15T15:54:05.711696992Z',
status: 'True',
type: 'Created',
},
],
network: { cidr: '10.1.2.0/24' },
nodes: [
{
labels: {
'aws-operator.giantswarm.io/version': '5.5.0',
'beta.kubernetes.io/arch': 'amd64',
'beta.kubernetes.io/instance-type': V4_CLUSTER.AWSInstanceType,
'beta.kubernetes.io/os': 'linux',
'failure-domain.beta.kubernetes.io/region': 'eu-central-1',
'failure-domain.beta.kubernetes.io/zone': 'eu-central-1c',
'giantswarm.io/provider': Providers.AWS,
ip: '10.1.2.18',
'kubernetes.io/arch': 'amd64',
'kubernetes.io/hostname':
'ip-10-1-2-18.eu-central-1.compute.internal',
'kubernetes.io/os': 'linux',
'kubernetes.io/role': 'worker',
'node-role.kubernetes.io/worker': '',
'node.kubernetes.io/worker': '',
role: 'worker',
},
lastTransitionTime: '2019-11-14T05:28:11.958410127Z',
name: 'ip-10-1-2-18.eu-central-1.compute.internal',
version: '5.5.0',
},
{
labels: {
'aws-operator.giantswarm.io/version': '5.5.0',
'beta.kubernetes.io/arch': 'amd64',
'beta.kubernetes.io/instance-type': V4_CLUSTER.AWSInstanceType,
'beta.kubernetes.io/os': 'linux',
'failure-domain.beta.kubernetes.io/region': 'eu-central-1',
'failure-domain.beta.kubernetes.io/zone': 'eu-central-1c',
'giantswarm.io/provider': Providers.AWS,
ip: '10.1.2.49',
'kubernetes.io/arch': 'amd64',
'kubernetes.io/hostname':
'ip-10-1-2-49.eu-central-1.compute.internal',
'kubernetes.io/os': 'linux',
'kubernetes.io/role': 'master',
'node-role.kubernetes.io/master': '',
'node.kubernetes.io/master': '',
role: 'master',
},
lastTransitionTime: '2019-11-14T05:28:11.958410751Z',
name: 'ip-10-1-2-49.eu-central-1.compute.internal',
version: '5.5.0',
},
{
labels: {
'aws-operator.giantswarm.io/version': '5.5.0',
'beta.kubernetes.io/arch': 'amd64',
'beta.kubernetes.io/instance-type': V4_CLUSTER.AWSInstanceType,
'beta.kubernetes.io/os': 'linux',
'failure-domain.beta.kubernetes.io/region': 'eu-central-1',
'failure-domain.beta.kubernetes.io/zone': 'eu-central-1c',
'giantswarm.io/provider': Providers.AWS,
ip: '10.1.2.52',
'kubernetes.io/arch': 'amd64',
'kubernetes.io/hostname':
'ip-10-1-2-52.eu-central-1.compute.internal',
'kubernetes.io/os': 'linux',
'kubernetes.io/role': 'worker',
'node-role.kubernetes.io/worker': '',
'node.kubernetes.io/worker': '',
role: 'worker',
},
lastTransitionTime: '2019-11-14T05:28:11.958411157Z',
name: 'ip-10-1-2-52.eu-central-1.compute.internal',
version: '5.5.0',
},
{
labels: {
'aws-operator.giantswarm.io/version': '5.5.0',
'beta.kubernetes.io/arch': 'amd64',
'beta.kubernetes.io/instance-type': V4_CLUSTER.AWSInstanceType,
'beta.kubernetes.io/os': 'linux',
'failure-domain.beta.kubernetes.io/region': 'eu-central-1',
'failure-domain.beta.kubernetes.io/zone': 'eu-central-1c',
'giantswarm.io/provider': Providers.AWS,
ip: '10.1.2.85',
'kubernetes.io/arch': 'amd64',
'kubernetes.io/hostname':
'ip-10-1-2-85.eu-central-1.compute.internal',
'kubernetes.io/os': 'linux',
'kubernetes.io/role': 'worker',
'node-role.kubernetes.io/worker': '',
'node.kubernetes.io/worker': '',
role: 'worker',
},
lastTransitionTime: '2019-11-14T05:28:11.958411586Z',
name: 'ip-10-1-2-85.eu-central-1.compute.internal',
version: '5.5.0',
},
],
resources: null,
scaling: { desiredCapacity: 3 },
versions: [],
},
};
export const v4AzureClusterStatusResponse = {
aws: { availabilityZones: null, autoScalingGroup: { name: '' } },
cluster: {
conditions: [
{
lastTransitionTime: '2019-11-29T16:03:28.054594841Z',
status: 'True',
type: 'Created',
},
],
network: { cidr: '' },
nodes: [
{
labels: {
'azure-operator.giantswarm.io/version': '2.7.0',
'beta.kubernetes.io/arch': 'amd64',
'beta.kubernetes.io/instance-type': V4_CLUSTER.AzureInstanceType,
'beta.kubernetes.io/os': 'linux',
'failure-domain.beta.kubernetes.io/region': 'westeurope',
'failure-domain.beta.kubernetes.io/zone': '0',
'giantswarm.io/provider': Providers.AZURE,
ip: '10.1.0.5',
'kubernetes.io/arch': 'amd64',
'kubernetes.io/hostname': 'o7oyb-master-000000',
'kubernetes.io/os': 'linux',
'kubernetes.io/role': 'master',
'node-role.kubernetes.io/master': '',
'node.kubernetes.io/master': '',
role: 'master',
},
lastTransitionTime: '2019-11-29T16:03:27.992976986Z',
name: 'o7oyb-master-000000',
version: '2.7.0',
},
{
labels: {
'azure-operator.giantswarm.io/version': '2.7.0',
'beta.kubernetes.io/arch': 'amd64',
'beta.kubernetes.io/instance-type': V4_CLUSTER.AzureInstanceType,
'beta.kubernetes.io/os': 'linux',
'failure-domain.beta.kubernetes.io/region': 'westeurope',
'failure-domain.beta.kubernetes.io/zone': '4',
'giantswarm.io/provider': Providers.AZURE,
ip: '10.1.1.8',
'kubernetes.io/arch': 'amd64',
'kubernetes.io/hostname': 'o7oyb-worker-000004',
'kubernetes.io/os': 'linux',
'kubernetes.io/role': 'worker',
'node-role.kubernetes.io/worker': '',
'node.kubernetes.io/worker': '',
role: 'worker',
},
lastTransitionTime: '2019-11-29T16:03:27.992978386Z',
name: 'o7oyb-worker-000004',
version: '2.7.0',
},
{
labels: {
'azure-operator.giantswarm.io/version': '2.7.0',
'beta.kubernetes.io/arch': 'amd64',
'beta.kubernetes.io/instance-type': V4_CLUSTER.AzureInstanceType,
'beta.kubernetes.io/os': 'linux',
'failure-domain.beta.kubernetes.io/region': 'westeurope',
'failure-domain.beta.kubernetes.io/zone': '0',
'giantswarm.io/provider': Providers.AZURE,
ip: '10.1.1.9',
'kubernetes.io/arch': 'amd64',
'kubernetes.io/hostname': 'o7oyb-worker-000005',
'kubernetes.io/os': 'linux',
'kubernetes.io/role': 'worker',
'node-role.kubernetes.io/worker': '',
'node.kubernetes.io/worker': '',
role: 'worker',
},
lastTransitionTime: '2019-11-29T16:03:27.992978986Z',
name: 'o7oyb-worker-000005',
version: '2.7.0',
},
{
labels: {
'azure-operator.giantswarm.io/version': '2.7.0',
'beta.kubernetes.io/arch': 'amd64',
'beta.kubernetes.io/instance-type': V4_CLUSTER.AzureInstanceType,
'beta.kubernetes.io/os': 'linux',
'failure-domain.beta.kubernetes.io/region': 'westeurope',
'failure-domain.beta.kubernetes.io/zone': '1',
'giantswarm.io/provider': Providers.AZURE,
ip: '10.1.1.10',
'kubernetes.io/arch': 'amd64',
'kubernetes.io/hostname': 'o7oyb-worker-000006',
'kubernetes.io/os': 'linux',
'kubernetes.io/role': 'worker',
'node-role.kubernetes.io/worker': '',
'node.kubernetes.io/worker': '',
role: 'worker',
},
lastTransitionTime: '2019-11-29T16:03:27.992979386Z',
name: 'o7oyb-worker-000006',
version: '2.7.0',
},
],
resources: [
{
conditions: [
{
lastTransitionTime: '0001-01-01T00:00:00Z',
status: 'InstancesUpgrading',
type: 'Stage',
},
],
name: 'instancev11',
},
],
scaling: { desiredCapacity: 0 },
versions: [
{
date: '0001-01-01T00:00:00Z',
lastTransitionTime: '2019-11-29T16:03:28.105736089Z',
semver: '2.7.0',
},
],
},
};
export const v4KVMClusterStatusResponse = {
aws: { availabilityZones: null, autoScalingGroup: { name: '' } },
cluster: {
conditions: [
{
lastTransitionTime: '2019-11-27T09:22:45.131944981Z',
status: 'True',
type: 'Created',
},
],
network: { cidr: '' },
nodes: [
{
labels: {
'beta.kubernetes.io/arch': 'amd64',
'beta.kubernetes.io/os': 'linux',
'giantswarm.io/provider': Providers.KVM,
ip: '172.23.9.10',
'kubernetes.io/arch': 'amd64',
'kubernetes.io/hostname': 'master-w0je0-7fcb8856b6-4r8jj',
'kubernetes.io/os': 'linux',
'kubernetes.io/role': 'master',
'kvm-operator.giantswarm.io/version': '3.10.0',
'node-role.kubernetes.io/master': '',
'node.kubernetes.io/master': '',
role: 'master',
},
lastTransitionTime: '2019-12-02T04:36:46.868446664Z',
name: 'master-w0je0-7fcb8856b6-4r8jj',
version: '3.10.0',
},
{
labels: {
'beta.kubernetes.io/arch': 'amd64',
'beta.kubernetes.io/os': 'linux',
'giantswarm.io/provider': Providers.KVM,
ip: '172.23.9.194',
'kubernetes.io/arch': 'amd64',
'kubernetes.io/hostname': 'worker-3c8x8-85c74757f5-x95d5',
'kubernetes.io/os': 'linux',
'kubernetes.io/role': 'worker',
'kvm-operator.giantswarm.io/version': '3.10.0',
'node-role.kubernetes.io/worker': '',
'node.kubernetes.io/worker': '',
role: 'worker',
},
lastTransitionTime: '2019-12-02T04:36:46.868447575Z',
name: 'worker-3c8x8-85c74757f5-x95d5',
version: '3.10.0',
},
{
labels: {
'beta.kubernetes.io/arch': 'amd64',
'beta.kubernetes.io/os': 'linux',
'giantswarm.io/provider': Providers.KVM,
ip: '172.23.9.186',
'kubernetes.io/arch': 'amd64',
'kubernetes.io/hostname': 'worker-87jfz-bf8df564c-jnst9',
'kubernetes.io/os': 'linux',
'kubernetes.io/role': 'worker',
'kvm-operator.giantswarm.io/version': '3.10.0',
'node-role.kubernetes.io/worker': '',
'node.kubernetes.io/worker': '',
role: 'worker',
},
lastTransitionTime: '2019-12-02T04:36:46.868448272Z',
name: 'worker-87jfz-bf8df564c-jnst9',
version: '3.10.0',
},
{
labels: {
'beta.kubernetes.io/arch': 'amd64',
'beta.kubernetes.io/os': 'linux',
'giantswarm.io/provider': Providers.KVM,
ip: '172.23.9.234',
'kubernetes.io/arch': 'amd64',
'kubernetes.io/hostname': 'worker-r0e8m-696fd6c7bc-slgsh',
'kubernetes.io/os': 'linux',
'kubernetes.io/role': 'worker',
'kvm-operator.giantswarm.io/version': '3.10.0',
'node-role.kubernetes.io/worker': '',
'node.kubernetes.io/worker': '',
role: 'worker',
},
lastTransitionTime: '2019-12-02T04:36:46.868448804Z',
name: 'worker-r0e8m-696fd6c7bc-slgsh',
version: '3.10.0',
},
],
resources: null,
scaling: { desiredCapacity: 0 },
versions: [
{
date: '0001-01-01T00:00:00Z',
lastTransitionTime: '2019-11-27T09:22:45.332250352Z',
semver: '3.10.0',
},
],
},
};
|
liuyanmin/spring-boot-web
|
src/main/java/com/lym/springboot/web/util/HttpServletResponseUtil.java
|
package com.lym.springboot.web.util;
import com.alibaba.fastjson.JSON;
import javax.servlet.http.HttpServletResponse;
import java.io.PrintWriter;
/**
* Created by liuyanmin on 2019/9/29.
*/
public class HttpServletResponseUtil {
private static String UTF8 = "UTF-8";
private static String CONTENT_TYPE = "application/json";
private HttpServletResponseUtil(){
throw new AssertionError();
}
public static void printJSON(HttpServletResponse response, Object object) throws Exception{
String str = JSON.toJSONString(object);
printStr(response, str);
}
public static void printStr(HttpServletResponse response, String str) throws Exception{
response.setCharacterEncoding(UTF8);
response.setContentType(CONTENT_TYPE);
PrintWriter printWriter = response.getWriter();
printWriter.write(str);
printWriter.flush();
printWriter.close();
}
}
|
gitXugx/design-mode
|
src/main/java/design/mode/dm/structural/pp/v1/IStar.java
|
<filename>src/main/java/design/mode/dm/structural/pp/v1/IStar.java
package design.mode.dm.structural.pp.v1;
/**
* @author :ex-xugaoxiang001
* @description :
* @copyright : Copyright 2018 yowits Corporation. All rights reserved.
* @create :2018/12/27 17:58
*/
public interface IStar {
void openingAConcert();
}
|
mbarkley/uberfire
|
uberfire-testing-utils/src/main/java/org/uberfire/mocks/CallerMock.java
|
package org.uberfire.mocks;
import java.lang.reflect.Proxy;
import org.jboss.errai.common.client.api.Caller;
import org.jboss.errai.common.client.api.ErrorCallback;
import org.jboss.errai.common.client.api.RemoteCallback;
public class CallerMock<T> implements Caller<T> {
private T callerProxy;
private RemoteCallback successCallBack;
private ErrorCallback errorCallBack;
public CallerMock( T t ) {
callerProxy = (T) CallerProxy.newInstance( t );
}
@Override
public T call() {
final CallerProxy localProxy = ( (CallerProxy) Proxy.getInvocationHandler( callerProxy ) );
localProxy.setSuccessCallBack( null );
localProxy.setErrorCallBack( null );
return callerProxy;
}
@Override
public T call( RemoteCallback<?> remoteCallback ) {
final CallerProxy localProxy = ( (CallerProxy) Proxy.getInvocationHandler( callerProxy ) );
localProxy.setSuccessCallBack( (RemoteCallback<Object>) remoteCallback );
localProxy.setErrorCallBack( null );
return callerProxy;
}
@Override
public T call( RemoteCallback<?> remoteCallback,
ErrorCallback<?> errorCallback ) {
final CallerProxy localProxy = ( (CallerProxy) Proxy.getInvocationHandler( callerProxy ) );
localProxy.setSuccessCallBack( (RemoteCallback<Object>) remoteCallback );
localProxy.setErrorCallBack( (ErrorCallback<Object>) errorCallback );
return callerProxy;
}
}
|
goline/lapi
|
handler.go
|
<gh_stars>1-10
package lapi
import "github.com/goline/errors"
// Handler is a request's handler
type Handler interface {
// Handle performs logic for solving request
Handle(connection Connection) (interface{}, errors.Error)
}
// IOHandler describes input and output for handler
// This interface aims to support to generate documentation only
type IOHandler interface {
IO() (input interface{}, output interface{})
}
|
UltiRequiem/daily-python-practice
|
finished/edabit/medium/length_of_number.py
|
# Create a function that takes a number num and returns its length
def number_lenght(num: int) -> int:
return len(str(num))
# number_lenght_lambda = lambda x: len(str(x))
if __name__ == "__main__":
print(number_lenght(25000))
|
nataliadelmar/momentum-react-v2
|
src/legacy/InputSearch/examples/Loading.js
|
<reponame>nataliadelmar/momentum-react-v2<gh_stars>1-10
import React from 'react';
import { InputSearch } from '@momentum-ui/react-collaboration';
export default function LoadingInputSearch() {
return (
<InputSearch
clear
htmlId="loadingSearchInput"
containerSize="medium-6"
isLoading
label="Loading Search Input"
name="loadingSearchInput"
/>
);
}
|
raoxy5/huogou
|
web/js/jquery.page.js
|
<filename>web/js/jquery.page.js
(function($) {
var ms = {
init:function(obj, args) {
return (function() {
ms.fillHtml(obj, args);
ms.bindEvent(obj, args);
})();
},
// 填充html
fillHtml:function(obj, args) {
return (function() {
obj.empty();
// 上一页
obj.append('<a href="javascript:;" class="prev">上一页</a>');
// 中间页码
if (args.current != 1 && args.current >= 4 && args.pageCount != 4) {
obj.append('<a href="javascript:;" class="tcdNumber">'+1+'</a>');
}
if(args.current - 2 > 2 && args.current <= args.pageCount && args.pageCount > 5){
obj.append('<i>...</i>');
}
var start = args.current - 2, end = args.current + 2;
if ((start > 1 && args.current < 4) || args.current == 1) {
end++;
}
if (args.current > args.pageCount - 4 && args.current >= args.pageCount) {
start--;
}
for (; start <= end; start++) {
if (start <= args.pageCount && start >= 1) {
if (start != args.current) {
obj.append('<a href="javascript:;" class="tcdNumber">'+ start +'</a>');
} else {
obj.append('<a href="javascript:;" class="act"><b></b>'+ start +'</a>');
}
}
}
if (args.current + 2 < args.pageCount - 1 && args.current >= 1 && args.pageCount > 5) {
obj.append('<i>...</i>');
}
if (args.current != args.pageCount && args.current < args.pageCount - 2 && args.pageCount != 4) {
obj.append('<a href="javascript:;" class="tcdNumber">' + args.pageCount + '</a>');
}
//下一页
obj.append('<a href="javascript:;" class="next">下一页</a>');
})();
},
//绑定事件
bindEvent:function(obj,args) {
return (function() {
$('a.tcdNumber').click(function(){
var current = parseInt($(this).text());
window[args.gotoPage]((current - 1) * args.downPage + 1);
});
$('a.prev').click(function(){
var current = parseInt(obj.children("a.act").text());
if (current <= 1) return;
window[args.gotoPage]((current - 2) * args.downPage + 1);
});
$('a.next').click(function(){
var current = parseInt(obj.children("a.act").text());
if (current >= args.pageCount) return;
window[args.gotoPage](current * args.downPage + 1);
});
})();
}
}
$.fn.createPage = function(options){
var args = $.extend({
pageCount : 10,
current : 10,
downPage: 10,
gotoPage: 'gotoPage',
backFn : function(){}
},options);
ms.init(this,args);
}
})(jQuery);
|
vcebollada/dali-core
|
dali/public-api/actors/draw-mode.h
|
#ifndef DALI_DRAW_MODE_H
#define DALI_DRAW_MODE_H
/*
* Copyright (c) 2019 Samsung Electronics Co., Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include <dali/public-api/common/dali-common.h>
namespace Dali
{
/**
* @addtogroup dali_core_actors
* @{
*/
/**
* @brief DrawMode to determine how the Actor and its children will be drawn.
*
* @SINCE_1_0.0
*/
namespace DrawMode
{
/**
* @brief Enumeration for the instance of how the actor and it's children will be drawn.
*
* @SINCE_1_0.0
* @see Dali::Actor::SetDrawMode()
*/
enum Type
{
NORMAL = 0, ///< @brief binary 00. The default draw-mode @SINCE_1_0.0
OVERLAY_2D = 1 ///< @brief binary 01. Draw the actor and its children as an overlay @SINCE_1_0.0
};
} // namespace DrawMode
/**
* @}
*/
} // namespace Dali
#endif // DALI_DRAW_MODE_H
|
SairajK19/PaperHouse
|
thepaperhousegraph/node_modules/gluegun/semver.js
|
<filename>thepaperhousegraph/node_modules/gluegun/semver.js<gh_stars>10-100
module.exports = require('./build/semver')
|
xiamoweinuan/GKitComponent
|
GKitComponent/Classes/BaseComponent/Component/GRouter.h
|
<gh_stars>0
//
// FiserMan.h
// CCRouter
//
// Created by <NAME> on 2018/11/8.
// Copyright © 2018 keruyun. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "GModProtocol.h"
#import "GServiceProtocol.h"
NS_ASSUME_NONNULL_BEGIN
@interface GRouter : NSObject
@property (nonatomic,strong,readonly)UIViewController *appRootViewController;
+ (instancetype)shard;
/**查找service*/
- (id)findServiceWithName:(NSString *)serviceName;
- (void)applicationDidEnterBackground:(UIApplication *)application;
-(void)addMod:(id)modeObj;
/**查找mod*/
- (id)findModWithName:(NSString *)modeName;
-(void)pushCanvas:(NSString *)modeName;
-(void)pushCanvas:(NSString *)modeName withBlock:(void (^)(UIViewController* vc))block;
-(void)popCanvas;
-(void)popToCanvas:(NSString *)modeName withComplete:(void(^)(UIViewController* vc))comPlete;
-(void)presentCanvas:(NSString *)modeName wihtCompletion: (void (^)(void))completion;
-(void)dismissCanvas:(NSString *)modeName wihtCompletion: (void (^)(void))completion;
@end
NS_ASSUME_NONNULL_END
|
zhoupb/springBootTest
|
src/main/java/com/example/designPattern/structural_type/decorator/FriedNoodles.java
|
package com.example.designPattern.structural_type.decorator;
/**
* @author: zhoupb
* @Description: 炒面类(具体构件角色)
* @since: version 1.0
*/
public class FriedNoodles extends FastFood {
public FriedNoodles() {
super(12f, "炒面");
}
@Override
public float cost() {
return getPrice();
}
}
|
pfeairheller/canis
|
pkg/credential/engine/indy/mocks/VDRClient.go
|
// Code generated by mockery v1.0.0. DO NOT EDIT.
package mocks
import (
vdr "github.com/hyperledger/indy-vdr/wrappers/golang/vdr"
mock "github.com/stretchr/testify/mock"
)
// VDRClient is an autogenerated mock type for the VDRClient type
type VDRClient struct {
mock.Mock
}
// CreateClaimDef provides a mock function with given fields: from, ref, pubKey, revocation, signer
func (_m *VDRClient) CreateClaimDef(from string, ref uint32, pubKey map[string]interface{}, revocation map[string]interface{}, signer vdr.Signer) (string, error) {
ret := _m.Called(from, ref, pubKey, revocation, signer)
var r0 string
if rf, ok := ret.Get(0).(func(string, uint32, map[string]interface{}, map[string]interface{}, vdr.Signer) string); ok {
r0 = rf(from, ref, pubKey, revocation, signer)
} else {
r0 = ret.Get(0).(string)
}
var r1 error
if rf, ok := ret.Get(1).(func(string, uint32, map[string]interface{}, map[string]interface{}, vdr.Signer) error); ok {
r1 = rf(from, ref, pubKey, revocation, signer)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// CreateNym provides a mock function with given fields: did, verkey, role, from, signer
func (_m *VDRClient) CreateNym(did string, verkey string, role string, from string, signer vdr.Signer) error {
ret := _m.Called(did, verkey, role, from, signer)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string, string, vdr.Signer) error); ok {
r0 = rf(did, verkey, role, from, signer)
} else {
r0 = ret.Error(0)
}
return r0
}
// CreateSchema provides a mock function with given fields: issuerDID, name, version, attrs, signer
func (_m *VDRClient) CreateSchema(issuerDID string, name string, version string, attrs []string, signer vdr.Signer) (string, error) {
ret := _m.Called(issuerDID, name, version, attrs, signer)
var r0 string
if rf, ok := ret.Get(0).(func(string, string, string, []string, vdr.Signer) string); ok {
r0 = rf(issuerDID, name, version, attrs, signer)
} else {
r0 = ret.Get(0).(string)
}
var r1 error
if rf, ok := ret.Get(1).(func(string, string, string, []string, vdr.Signer) error); ok {
r1 = rf(issuerDID, name, version, attrs, signer)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetCredDef provides a mock function with given fields: credDefID
func (_m *VDRClient) GetCredDef(credDefID string) (*vdr.ReadReply, error) {
ret := _m.Called(credDefID)
var r0 *vdr.ReadReply
if rf, ok := ret.Get(0).(func(string) *vdr.ReadReply); ok {
r0 = rf(credDefID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*vdr.ReadReply)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(string) error); ok {
r1 = rf(credDefID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetNym provides a mock function with given fields: did
func (_m *VDRClient) GetNym(did string) (*vdr.ReadReply, error) {
ret := _m.Called(did)
var r0 *vdr.ReadReply
if rf, ok := ret.Get(0).(func(string) *vdr.ReadReply); ok {
r0 = rf(did)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*vdr.ReadReply)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(string) error); ok {
r1 = rf(did)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetSchema provides a mock function with given fields: schemaID
func (_m *VDRClient) GetSchema(schemaID string) (*vdr.ReadReply, error) {
ret := _m.Called(schemaID)
var r0 *vdr.ReadReply
if rf, ok := ret.Get(0).(func(string) *vdr.ReadReply); ok {
r0 = rf(schemaID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*vdr.ReadReply)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(string) error); ok {
r1 = rf(schemaID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// SetEndpoint provides a mock function with given fields: did, from, ep, signer
func (_m *VDRClient) SetEndpoint(did string, from string, ep string, signer vdr.Signer) error {
ret := _m.Called(did, from, ep, signer)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string, vdr.Signer) error); ok {
r0 = rf(did, from, ep, signer)
} else {
r0 = ret.Error(0)
}
return r0
}
|
UnderJollyRoger/storymap-crowdsource
|
src/app/store/reducers/app/map/Map.babel.js
|
import { combineReducers } from 'redux';
import {
UPDATE_MAP_REFERENCES,
UPDATE_MAP_FEATURES_IN_EXTENT,
UPDATE_MAP_SELECTED_FEATURES,
UPDATE_MAP_HIGHLIGHTED_FEATURES,
UPDATE_MAP_MOVING,
UPDATE_MAP_ON_TOP
} from 'babel/constants/actionsTypes/Map';
const itemInfo = function(state = false, action) {
switch (action.type) {
case UPDATE_MAP_REFERENCES:
if (action.references.itemInfo) {
return action.references.itemInfo;
} else {
return state;
}
break;
default:
return state;
}
};
const layer = function(state = false, action) {
switch (action.type) {
case UPDATE_MAP_REFERENCES:
if (action.references.layer) {
return action.references.layer;
} else {
return state;
}
break;
default:
return state;
}
};
const clusterLayer = function(state = false, action) {
switch (action.type) {
case UPDATE_MAP_REFERENCES:
if (action.references.clusterLayer) {
return action.references.clusterLayer;
} else {
return state;
}
break;
default:
return state;
}
};
const originalObject = function(state = false, action) {
switch (action.type) {
case UPDATE_MAP_REFERENCES:
if (action.references.map) {
return action.references.map;
} else {
return state;
}
break;
default:
return state;
}
};
const featuresInExtent = function(state = [], action) {
switch (action.type) {
case UPDATE_MAP_FEATURES_IN_EXTENT:
return action.features;
default:
return state;
}
};
const selectedFeatureId = function(state = false, action) {
switch (action.type) {
case UPDATE_MAP_SELECTED_FEATURES:
return action.id;
default:
return state;
}
};
const highlightedFeatureId = function(state = false, action) {
switch (action.type) {
case UPDATE_MAP_HIGHLIGHTED_FEATURES:
return action.id;
case UPDATE_MAP_SELECTED_FEATURES:
return false;
default:
return state;
}
};
const mapMoving = function(state = false, action) {
switch (action.type) {
case UPDATE_MAP_MOVING:
return action.moving ? true : false;
default:
return state;
}
};
const forceToTop = function(state = false, action) {
switch (action.type) {
case UPDATE_MAP_ON_TOP:
return action.showOnTop ? true : false;
default:
return state;
}
};
export const map = combineReducers({
itemInfo,
clusterLayer,
layer,
originalObject,
featuresInExtent,
selectedFeatureId,
highlightedFeatureId,
mapMoving,
forceToTop
});
export default map;
|
ahmedengu/h2o-3
|
h2o-algos/src/main/java/hex/schemas/AggregatorModelV99.java
|
package hex.schemas;
import hex.aggregator.AggregatorModel;
import water.api.API;
import water.api.schemas3.KeyV3;
import water.api.schemas3.ModelOutputSchemaV3;
import water.api.schemas3.ModelSchemaV3;
public class AggregatorModelV99 extends ModelSchemaV3<AggregatorModel, AggregatorModelV99, AggregatorModel.AggregatorParameters, AggregatorV99.AggregatorParametersV99, AggregatorModel.AggregatorOutput, AggregatorModelV99.AggregatorModelOutputV99> {
public static final class AggregatorModelOutputV99 extends ModelOutputSchemaV3<AggregatorModel.AggregatorOutput, AggregatorModelOutputV99> {
@API(help = "Aggregated Frame of Exemplars")
public KeyV3.FrameKeyV3 output_frame;
@API(help ="Aggregated Frame mapping to the rows in the original data")
public KeyV3.FrameKeyV3 mapping_frame;
}
// TODO: I think we can implement the following two in ModelSchemaV3, using reflection on the type parameters.
public AggregatorV99.AggregatorParametersV99 createParametersSchema() { return new AggregatorV99.AggregatorParametersV99(); }
public AggregatorModelOutputV99 createOutputSchema() { return new AggregatorModelOutputV99(); }
// Version&Schema-specific filling into the impl
@Override public AggregatorModel createImpl() {
AggregatorModel.AggregatorParameters parms = parameters.createImpl();
return new AggregatorModel( model_id.key(), parms, null );
}
}
|
Elyahu41/Terasology
|
engine/src/main/java/org/terasology/engine/utilities/modifiable/ModifiableValue.java
|
<gh_stars>1000+
// Copyright 2021 The Terasology Foundation
// SPDX-License-Identifier: Apache-2.0
package org.terasology.engine.utilities.modifiable;
import org.terasology.gestalt.module.sandbox.API;
/**
* A helper type to get and modify the value of a component without changing its actual value.
* <p>
* Components using this type must mention so in their javadoc so all modifiers are added correctly.
* </p>
*/
@API
public class ModifiableValue {
private final float baseValue;
private float preModifier;
private float multiplier;
private float postModifier;
public ModifiableValue(float baseValue) {
preModifier = 0;
multiplier = 1;
postModifier = 0;
this.baseValue = baseValue;
}
public float getBaseValue() {
return baseValue;
}
public ModifiableValue multiply(float amount) {
multiplier *= amount;
return this;
}
public ModifiableValue preAdd(float amount) {
preModifier += amount;
return this;
}
public ModifiableValue postAdd(float amount) {
postModifier += amount;
return this;
}
/**
* Calculates the result value from the base value and given modifiers and multiplier.
* <p>
* The value is calculated based on the following formula:
* <pre> {@code
* result = (<baseValue> + Σ <modifier>) * Π <multiplier> + Σ <postModifier>
* } </pre>
*
* <emph>non-negativity of the value is not ensured and must be checked by the system if needed</emph>
*/
public float getValue() {
return (baseValue + preModifier) * multiplier + postModifier;
}
public float getPreModifier() {
return preModifier;
}
public float getPostModifier() {
return postModifier;
}
public float getMultiplier() {
return multiplier;
}
/**
* Setter method used to set the preModifier at once.
* It is only used for setting the modifier during the Deserialization process.
* For any modification, use the preAdd() method instead.
* @param preModifier the preModifier to set for the component data.
*/
public void setPreModifier(float preModifier) {
this.preModifier = preModifier;
}
/**
* Setter method used to set the multiplier at once.
* It is only used for setting the multiplier during the Deserialization process.
* For any modification, use the multiply() method instead.
* @param multiplier the multiplier to set for the component data.
*/
public void setMultiplier(float multiplier) {
this.multiplier = multiplier;
}
/**
* Setter method used to set the postModifier at once.
* It is only used for setting the postModifier during the Deserialization process.
* For any modification, use the postAdd() method instead.
* @param postModifier the postModifier to set for the component data.
*/
public void setPostModifier(float postModifier) {
this.postModifier = postModifier;
}
}
|
xing5/DynamicS4
|
subprojects/s4-core/src/main/java/org/apache/s4/core/DefaultRemoteSenders.java
|
<gh_stars>0
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.s4.core;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import org.apache.s4.base.Event;
import org.apache.s4.base.Hasher;
import org.apache.s4.base.SerializerDeserializer;
import org.apache.s4.comm.serialize.SerializerDeserializerFactory;
import org.apache.s4.comm.tcp.RemoteEmitters;
import org.apache.s4.comm.topology.Clusters;
import org.apache.s4.comm.topology.RemoteStreams;
import org.apache.s4.comm.topology.StreamConsumer;
import org.apache.s4.core.staging.RemoteSendersExecutorServiceFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Inject;
import com.google.inject.Singleton;
/**
* Default {@link RemoteSenders} implementation for sending events to nodes of a remote cluster.
*/
@Singleton
public class DefaultRemoteSenders implements RemoteSenders {
Logger logger = LoggerFactory.getLogger(DefaultRemoteSenders.class);
final RemoteEmitters remoteEmitters;
final RemoteStreams remoteStreams;
final Clusters remoteClusters;
final SerializerDeserializer serDeser;
final Hasher hasher;
private App app;
ConcurrentMap<String, RemoteSender> sendersByTopology = new ConcurrentHashMap<String, RemoteSender>();
private final ExecutorService executorService;
@Inject
public DefaultRemoteSenders(RemoteEmitters remoteEmitters, RemoteStreams remoteStreams, Clusters remoteClusters,
SerializerDeserializerFactory serDeserFactory, Hasher hasher,
RemoteSendersExecutorServiceFactory senderExecutorFactory) {
this.remoteEmitters = remoteEmitters;
this.remoteStreams = remoteStreams;
this.remoteClusters = remoteClusters;
this.hasher = hasher;
executorService = senderExecutorFactory.create();
serDeser = serDeserFactory.createSerializerDeserializer(Thread.currentThread().getContextClassLoader());
}
/*
* (non-Javadoc)
*
* @see org.apache.s4.core.RemoteSenders#send(java.lang.String, org.apache.s4.base.Event)
*/
@Override
public void send(String hashKey, Event event) {
Set<StreamConsumer> consumers = remoteStreams.getConsumers(event.getStreamId());
for (StreamConsumer consumer : consumers) {
// NOTE: even though there might be several ephemeral znodes for the same app and topology, they are
// represented by a single stream consumer
RemoteSender sender = sendersByTopology.get(consumer.getClusterName());
if (sender == null) {
logger.debug("Creat RemoteSender. CLuster=[{}](" + remoteClusters.getCluster(consumer.getClusterName())
+ ") [{}]", consumer.getClusterName(), remoteClusters.getCluster(consumer.getClusterName())
.getPhysicalCluster().getNodes());
RemoteSender newSender = new RemoteSender(remoteEmitters.getEmitter(remoteClusters.getCluster(consumer
.getClusterName())), hasher, consumer.getClusterName());
// TODO cleanup when remote topologies die
sender = sendersByTopology.putIfAbsent(consumer.getClusterName(), newSender);
if (sender == null) {
sender = newSender;
}
}
// NOTE: this implies multiple serializations, there might be an optimization
executorService.execute(new SendToRemoteClusterTask(hashKey, event, sender));
}
}
public void setApp(App app) {
if (this.app != null) {
this.app = app;
}
}
public App getApp() {
return this.app;
}
class SendToRemoteClusterTask implements Runnable {
String hashKey;
Event event;
RemoteSender sender;
public SendToRemoteClusterTask(String hashKey, Event event, RemoteSender sender) {
super();
this.hashKey = hashKey;
this.event = event;
this.sender = sender;
}
@Override
public void run() {
try {
sender.send(hashKey, serDeser.serialize(event));
} catch (InterruptedException e) {
logger.error("Interrupted blocking send operation for event {}. Event is lost.", event);
Thread.currentThread().interrupt();
}
}
}
@Override
public void sendPE(String key, byte[] peState, String streamName, int peIndex, String destClusterName) {
// TODO Auto-generated method stub
}
@Override
public void sendAllCachedPE(String string, String newConsumerClusterName) {
// TODO Auto-generated method stub
}
}
|
iTitus/PDXTools
|
dds-reader/src/main/java/io/github/ititus/dds/D3dFormat.java
|
<reponame>iTitus/PDXTools
package io.github.ititus.dds;
import java.io.IOException;
import java.util.NoSuchElementException;
public enum D3dFormat {
UNKNOWN(0),
R8G8B8(20),
A8R8G8B8(21),
X8R8G8B8(22),
R5G6B5(23),
X1R5G5B5(24),
A1R5G5B5(25),
A4R4G4B4(26),
R3G3B2(27),
A8(28),
A8R3G3B2(29),
X4R4G4B4(30),
A2B10G10R10(31),
A8B8G8R8(32),
X8B8G8R8(33),
G16R16(34),
A2R10G10B10(35),
A16B16G16R16(36),
A8P8(40),
P8(41),
L8(50),
A8L8(51),
A4L4(52),
V8U8(60),
L6V5U5(61),
X8L8V8U8(62),
Q8W8V8U8(63),
V16U16(64),
A2W10V10U10(67),
UYVY(DdsConstants.D3DFMT_UYVY),
R8G8_B8G8(DdsConstants.D3DFMT_R8G8_B8G8),
YUY2(DdsConstants.D3DFMT_YUY2),
G8R8_G8B8(DdsConstants.D3DFMT_G8R8_G8B8),
DXT1(DdsConstants.D3DFMT_DXT1),
DXT2(DdsConstants.D3DFMT_DXT2),
DXT3(DdsConstants.D3DFMT_DXT3),
DXT4(DdsConstants.D3DFMT_DXT4),
DXT5(DdsConstants.D3DFMT_DXT5),
D16_LOCKABLE(70),
D32(71),
D15S1(73),
D24S8(75),
D24X8(77),
D24X4S4(79),
D16(80),
D32F_LOCKABLE(82),
D24FS8(83),
D32_LOCKABLE(84),
S8_LOCKABLE(85),
L16(81),
VERTEXDATA(100),
INDEX16(101),
INDEX32(102),
Q16W16V16U16(110),
MULTI2_ARGB8(DdsConstants.D3DFMT_MULTI2_ARGB8),
R16F(111),
G16R16F(112),
A16B16G16R16F(113),
R32F(114),
G32R32F(115),
A32B32G32R32F(116),
CxV8U8(117),
A1(118),
A2B10G10R10_XR_BIAS(119),
BINARYBUFFER(199),
FORCE_DWORD(0x7fffffff);
private static final D3dFormat[] VALUES = values();
private final int value;
D3dFormat(int value) {
this.value = value;
}
public static D3dFormat load(DataReader r) throws IOException {
try {
return get(r.readDword());
} catch (NoSuchElementException e) {
throw new IOException(e);
}
}
public static D3dFormat get(int value) {
for (D3dFormat f : VALUES) {
if (f != FORCE_DWORD && f.value == value) {
return f;
}
}
throw new NoSuchElementException("unknown d3d format");
}
public int value() {
return ordinal();
}
public int getBitsPerPixel() {
return switch (this) {
case A1 -> 1;
case DXT1 -> 4;
case R3G3B2, A8, P8, L8, A4L4, S8_LOCKABLE, DXT2, DXT3, DXT4, DXT5 -> 8;
case R5G6B5, X1R5G5B5, A1R5G5B5, A4R4G4B4, A8R3G3B2, X4R4G4B4, A8P8, A8L8, V8U8, L6V5U5, UYVY, YUY2, R8G8_B8G8, G8R8_G8B8, D16, D16_LOCKABLE, D15S1, L16, INDEX16, R16F, CxV8U8 -> 16;
case R8G8B8 -> 24;
case A8R8G8B8, X8R8G8B8, A2B10G10R10, A8B8G8R8, X8B8G8R8, G16R16, A2R10G10B10, X8L8V8U8, V16U16, A2W10V10U10, D32, D24S8, D24X8, D24X4S4, D24FS8, INDEX32, G16R16F, R32F, A2B10G10R10_XR_BIAS, MULTI2_ARGB8 -> 32;
case A16B16G16R16, A16B16G16R16F, Q16W16V16U16, G32R32F -> 64;
case A32B32G32R32F -> 128;
default -> throw new IllegalStateException("unknown bpp for format " + this);
};
}
public boolean isBlockCompressed() {
return switch (this) {
case DXT1, DXT2, DXT3, DXT4, DXT5 -> true;
default -> false;
};
}
}
|
Benjamindavid03/JBlazeEngine
|
Engine/src/com/blaze/engine/rendering/Material.java
|
<gh_stars>1-10
package com.blaze.engine.rendering;
import com.blaze.engine.rendering.resourceManagement.MappedValues;
import java.util.HashMap;
public class Material extends MappedValues {
private HashMap<String, Texture> m_textureHashMap;
public float DEFAULT_COLOUR = 1.0f;
public Material(Texture diffuse) {
super();
m_textureHashMap = new HashMap<String, Texture>();
AddTexture("diffuse", diffuse);
AddFloat("specularIntensity", 0.5f);
AddFloat("specularPower", 0.2f);
float baseBias = 0.2f / 2.0f;
AddFloat("dispMapScale", 0.1f);
AddFloat("dispMapBias", -baseBias + baseBias * 0.1f);
}
public Material(Texture diffuse, float specularIntensity, float specularPower, float dispMapScale, float dispMapOffset) {
super();
m_textureHashMap = new HashMap<String, Texture>();
AddTexture("diffuse", diffuse);
AddFloat("specularIntensity", specularIntensity);
AddFloat("specularPower", specularPower);
float baseBias = dispMapScale / 2.0f;
AddFloat("dispMapScale", dispMapScale);
AddFloat("dispMapBias", -baseBias + baseBias * dispMapOffset);
}
public Material(Texture diffuse, float specularIntensity, float specularPower, Texture normal,
Texture dispMap, float dispMapScale, float dispMapOffset) {
super();
m_textureHashMap = new HashMap<String, Texture>();
AddTexture("diffuse", diffuse);
AddFloat("specularIntensity", specularIntensity);
AddFloat("specularPower", specularPower);
AddTexture("normalMap", normal);
AddTexture("dispMap", dispMap);
float baseBias = dispMapScale / 2.0f;
AddFloat("dispMapScale", dispMapScale);
AddFloat("dispMapBias", -baseBias + baseBias * dispMapOffset);
}
public void AddTexture(String name, Texture texture) {
m_textureHashMap.put(name, texture);
}
public Texture GetTexture(String name) {
Texture result = m_textureHashMap.get(name);
if (result != null) {
return result;
}
return new Texture("test.png");
}
}
|
GigaNova/CrossWars
|
Cross Wars vs Onslaught/Character.cpp
|
<gh_stars>1-10
#include "Character.h"
Character::Character()
{
}
Character::~Character()
{
}
Character::Character(Character* _Character)
{
ATKGrowth = _Character->GetATKGrowth();
DEFGrowth = _Character->GetDEFGrowth();
SPDGrowth = _Character->GetSPDGrowth();
LCKGrowth = _Character->GetLCKGrowth();
HPGrowth = _Character->GetHPGrowth();
Name = _Character->GetNAME();
AttackSprite = _Character->AttackSprite;
BattleSprite = _Character->BattleSprite;
DialougeSprite = _Character->DialougeSprite;
SupportSprite = _Character->SupportSprite;
HUDSprite = new sf::Sprite(*_Character->HUDSprite);
}
|
kapiak/ware_prod
|
assistant/warehouse/wagtail_hooks.py
|
<filename>assistant/warehouse/wagtail_hooks.py
from django.utils.translation import gettext_lazy as _
from wagtail.admin.edit_handlers import (
FieldPanel,
InlinePanel,
MultiFieldPanel,
)
from wagtail.contrib.modeladmin.options import (
ModelAdmin,
ModelAdminGroup,
modeladmin_register,
)
from wagtail.snippets.edit_handlers import SnippetChooserPanel
from .models import Warehouse, Stock, Allocation
class WarehouseWagtailAdmin(ModelAdmin):
model = Warehouse
menu_icon = "home"
menu_order = 100
list_display = ("name", "company_name", "email")
search_fields = ("name", "company_name", "email")
list_export = ("name", "company_name", "email")
panels = [
FieldPanel("name"),
FieldPanel("company_name"),
FieldPanel("email"),
SnippetChooserPanel("address"),
]
class StockWagtailAdmin(ModelAdmin):
model = Stock
menu_icon = "table"
menu_order = 200
list_display = ("warehouse", "product_variant", "quantity")
search_fields = ("warehouse", "product_variant")
list_export = ("warehouse", "product_variant", "quantity")
panels = [
FieldPanel("warehouse"),
FieldPanel("product_variant"),
FieldPanel("quantity"),
]
class AllocationWagtailAdmin(ModelAdmin):
model = Allocation
menu_icon = "chain-broken"
list_display = ("order_line", "stock", "quantity_allocated")
menu_order = 300
class InventoryWagtailAdminGroup(ModelAdminGroup):
menu_label = _("Inventory")
menu_icon = "cogs"
menu_order = 100
items = (
WarehouseWagtailAdmin,
StockWagtailAdmin,
AllocationWagtailAdmin,
)
modeladmin_register(InventoryWagtailAdminGroup)
|
Chinchu-Thambi/YellowWorks
|
src/scenes/Onboarding/components/CustomWidgets/EstimatedPerformance/index.js
|
export { default } from './EstimatedPerformance';
|
manoldonev/algo-challenges
|
src/test_next_greater_element_ii.py
|
<filename>src/test_next_greater_element_ii.py<gh_stars>0
import unittest
from next_greater_element_ii import next_greater_element
class NextGreaterElementIITests(unittest.TestCase):
"""Tests for next greater element ii challenge."""
def test_case_1(self):
self.assertEqual(next_greater_element([1, 2, 1]), [2, -1, 2])
if __name__ == "__main__":
unittest.main(verbosity=2)
|
polart/vagrant-registry
|
client/src/components/MyPagination.js
|
import React, {Component} from "react";
import {Pagination} from "react-bootstrap";
export default class MyPagination extends Component {
onPageChange = (page) => {
const location = this.props.router.createLocation({
pathname: this.location.pathname,
query: Object.assign({}, this.location.query, { page }),
});
this.props.router.push(location);
};
render() {
this.location = this.props.router.location;
this.activePage = parseInt(this.location.query.page || 1, 10);
this.perPage = this.props.perPage || 10;
if (this.props.itemsCount <= this.perPage) {
return null;
}
const items = Math.ceil(this.props.itemsCount / this.perPage);
return (
<div className="text-center">
<Pagination
prev
next
first
last
ellipsis
boundaryLinks
items={items}
maxButtons={5}
activePage={this.activePage}
onSelect={this.onPageChange}
/>
</div>
);
}
}
|
GiulianaPola/select_repeats
|
venv/lib/python3.8/site-packages/setuptools/errors.py
|
/home/runner/.cache/pip/pool/b7/84/66/f397979bbd538b4f8f3b580040c44add5ecd377c75b5c6dcc34fb11922
|
a-samir97/E-commerce-django-API
|
products/views.py
|
from rest_framework.viewsets import ModelViewSet
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import permissions, status
from payment.views import get_client_ip
from payment import secrets
import json , requests , hashlib
from rest_framework.generics import (
ListAPIView,
CreateAPIView,
GenericAPIView
)
from .serializers import(
ProductSerializer,
CreateRateProductSerializer,
ProductDetailSerializer,
ListRateProductSerializer
)
from .models import (
Product,
ProductImage,
ProductRateImage,
RateProduct
)
from .pagination import ProductPagination
from categories.models import Category, SubCategory
from users.serializers import UserDataSerializer
from cart.models import Cart, CartItem
from utils import send_sms_messages, send_single_message
import datetime
import asyncio
#######################
#### Product APIS #####
#######################
class ProductAPIViewSet(ModelViewSet):
serializer_class = ProductSerializer
queryset = Product.objects.all().order_by('-created_at').filter(in_stock__gte=1)
pagination_class = ProductPagination
def get_serializer_class(self):
if self.action == 'list' or self.action == 'retrieve':
return ProductSerializer
else:
return ProductDetailSerializer
def get_permissions(self):
if self.action == 'list' or self.action == 'retrieve':
permission_classes = (permissions.AllowAny,)
else:
permission_classes = (permissions.IsAuthenticated,)
return [permission() for permission in permission_classes]
def perform_create(self, serializer):
product = serializer.save(owner=self.request.user)
images = dict((self.request.data).lists())['images']
for image in images:
ProductImage.objects.create(
product=product,
img=image
)
send_sms_messages(self.request.user.following.all())
class ToggleFavoriteProductAPI(APIView):
def post(self, request, product_id):
get_product = Product.objects.filter(id=product_id).first()
current_user = request.user
if get_product:
# if product exists in fav product of the current user,
# remove product from the fav list of the current user
if get_product in current_user.favorite_products.all():
current_user.favorite_products.remove(get_product)
return Response(
{'data': 'product is removed from your favroite products list'},
status=status.HTTP_200_OK
)
# if product doesn't exist in fav products of the current user,
# add product to the fav products of the current user
else:
current_user.favorite_products.add(get_product)
return Response(
{'data': 'product is added to your favroite products list'},
status=status.HTTP_200_OK
)
else:
return Response(
{'error': 'product id is not exist'},
status=status.HTTP_404_NOT_FOUND
)
class GetUserFavoriteProductsAPI(ListAPIView):
def get_queryset(self):
return self.request.user.favorite_products.all()
serializer_class = ProductSerializer
class BiddingProductAPI(APIView):
def post(self, request, product_id):
get_product = Product.objects.filter(id=product_id).first()
if not get_product:
return Response(
{'error': 'product is not exist'},
status=status.HTTP_404_NOT_FOUND
)
if get_product.is_fixed:
return Response(
{'error': 'this product has a fixed price'},
status=status.HTTP_400_BAD_REQUEST
)
# normal bidding
if request.data.get('new_price'):
new_price = request.data['new_price']
# check if the new price is greater than current price
# and check also if the new price is greater than the current bidding limit
if new_price <= get_product.price:
return Response(
{'error': 'new price should be higher than the current price'},
status=status.HTTP_400_BAD_REQUEST
)
elif new_price > get_product.price and new_price >= get_product.bidding_limit:
get_product.price = new_price
get_product.last_user_bid = request.user
get_product.save()
else:
get_product.price = new_price + 1
get_product.save()
# user serializer
user_serializer = UserDataSerializer(get_product.last_user_bid)
return Response(
{
'new_price': get_product.price,
'user': user_serializer.data
},
status=status.HTTP_201_CREATED
)
# automatic bidding
elif request.data.get('limit'):
limit = request.data['limit']
# check if the limit is greater than the current price
# and also greater than the bidding limit
if limit == '0':
if request.user != get_product.last_user_bid:
return Response(
{'error': 'last user bid only should stop the automatic bidding'},
status=status.HTTP_400_BAD_REQUEST
)
get_product.bidding_limit = 0
get_product.save()
# user serializer
user_serializer = UserDataSerializer(get_product.last_user_bid)
return Response(
{
'price': get_product.price,
'limit': get_product.bidding_limit,
'user': user_serializer.data
}
)
elif limit <= get_product.price or limit <= get_product.bidding_limit:
return Response(
{'error': 'your limit should be greater than product price and product bidding limit'},
status=status.HTTP_400_BAD_REQUEST
)
else:
# limit > get_product.price and limit > get_product.bidding_limit:
get_product.last_user_bid = request.user
get_product.price += 1
get_product.bidding_limit = limit
get_product.save()
# user serializer
user_serializer = UserDataSerializer(get_product.last_user_bid)
return Response(
{
'new_price': get_product.price,
'limit': limit,
'user': user_serializer.data
},
status=status.HTTP_201_CREATED
)
else:
return Response(
{'error': 'user should put new price or bidding limit'},
status=status.HTTP_400_BAD_REQUEST
)
class AutomaticBiddingProductAPI(APIView):
permission_classes = (permissions.AllowAny,)
def post(self, request, product_id):
get_product = Product.objects.filter(id=product_id).first()
if get_product:
# if less than limit
if get_product.bidding_limit > get_product.price:
get_product.price += 1
get_product.save()
return Response(
{"data": get_product.price},
status=status.HTTP_200_OK
)
else:
return Response(
{'error': 'product is not exists'},
status=status.HTTP_404_NOT_FOUND
)
class FixedPriceProducts(ListAPIView):
queryset = Product.objects.filter(is_fixed=True, in_stock__gte=1)
serializer_class = ProductSerializer
permission_classes = (permissions.AllowAny,)
class VariablePriceProducts(ListAPIView):
queryset = Product.objects.filter(is_fixed=False,in_stock__gte=1)
serializer_class = ProductSerializer
permission_classes = (permissions.AllowAny,)
class LatestProducts(ListAPIView):
queryset = Product.objects.order_by('-created_at').filter(in_stock__gte=1)
serializer_class = ProductSerializer
permission_classes = (permissions.AllowAny,)
class HighPriceProducsts(ListAPIView):
queryset = Product.objects.order_by('-price').filter(in_stock__gte=1)
serializer_class = ProductSerializer
permission_classes = (permissions.AllowAny,)
class LowPriceProducts(ListAPIView):
queryset = Product.objects.order_by('price').filter(in_stock__gte=1)
serializer_class = ProductSerializer
permissions = (permissions.AllowAny,)
class SearchByCategory(APIView):
permission_classes = (permissions.AllowAny,)
def post(self, request):
try:
category = Category.objects.get(id=request.data['id'])
except Category.DoesNotExist:
return Response(
{'error': 'category does not exist'},
status=status.HTTP_404_NOT_FOUND
)
queryset = category.products.filter(in_stock__gte=1)
product_serializer = ProductSerializer(queryset, many=True)
return Response(product_serializer.data, status=status.HTTP_200_OK)
class SearchBySubCategory(APIView):
permission_classes = (permissions.AllowAny,)
def post(self, request):
try:
sub_category = SubCategory.objects.get(id=request.data['id'])
except SubCategory.DoesNotExist:
return Response(
{'error': 'sub category does not exist'},
status=status.HTTP_404_NOT_FOUND
)
queryset = sub_category.products.filter(in_stock__gte=1)
product_serializer = ProductSerializer(queryset, many=True)
return Response(product_serializer.data, status=status.HTTP_200_OK)
class SearchByName(APIView):
permission_classes = (permissions.AllowAny,)
def post(self, request):
if request.data.get('name'):
products = Product.objects.filter(name__icontains=request.data['name'], in_stock__gte=1)
product_serializer = ProductSerializer(products, many=True)
return Response(product_serializer.data, status=status.HTTP_200_OK)
else:
return Response(
{'error': 'please add param for category name '},
status=status.HTTP_400_BAD_REQUEST
)
class EndProductDuration(APIView):
def post(self, request, product_id):
# check if the product exist
try:
product = Product.objects.get(id=product_id)
except Product.DoesNotExist:
return Response(
{'error': 'product is not exist'},
status=status.HTTP_404_NOT_FOUND
)
if request.user == product.owner:
product.duration = datetime.datetime.now()
product.save()
if product.last_user_bid:
asyncio.run(send_single_message(product.last_user_bid, 'تهانينا لك, لقد فزت بالمزاد ويجب عليك التوجه الي سلة المشتريات الخاصة بحسابك الشخصي'))
get_cart, _ = Cart.objects.get_or_create(user=product.last_user_bid)
# add product to the cart item
CartItem.objects.create(
product=product,
quantity=1,
cart=get_cart
)
product_serializer = ProductSerializer(product)
return Response(
product_serializer.data,
status=status.HTTP_200_OK
)
else:
return Response(
{'error': 'user should be the owner of the product'},
status=status.HTTP_400_BAD_REQUEST
)
###########################
#### RateProduct APIS #####
###########################
class RequestRateProduct(CreateAPIView):
'''
name: name of the product
description: description of the product
images : one or more than one images
uploaded_photo: "True" or "False"
category: category id
'''
serializer_class = CreateRateProductSerializer
#
# def perform_create(self, serializer):
# rate_product = serializer.save(owner=self.request.user)
# if "images" in self.request.data:
# images = dict((self.request.data).lists())['images']
# for image in images:
# ProductRateImage.objects.create(
# rate_product=rate_product,
# img=image
# )
# print(self.request.user)
def post(self, request):
rate_product = self.serializer_class(data=request.data)
ip_address = get_client_ip(request)
data = []
if rate_product.is_valid(raise_exception=True):
rate_product.save(owner=self.request.user)
get_rate_product = RateProduct.objects.get(id=int(rate_product.data['id']))
images = dict((self.request.data))['images']
for image in images:
product_rate_image = ProductRateImage(
rate_product=get_rate_product,
img=image
)
product_rate_image.save()
get_rate_product = RateProduct.objects.get(id=int(rate_product.data['id']))
if get_rate_product.uploaded_photo == True:
get_rate_product.price = get_rate_product.category.uploaded_price + (get_rate_product.category.uploaded_price * 0.04)
get_rate_product.save()
else:
get_rate_product.price = get_rate_product.category.msawm_team_price + (get_rate_product.category.msawm_team_price * 0.04)
get_rate_product.save()
# get_rate_product.price = get_rate_product.calculate_user_pay()
# print(get_rate_product.price)
# get_rate_product.save()
'''
Payment operation for Rate Product Request
'''
posted = {
'terminalId': secrets.TERMINAL_ID,
'password': <PASSWORD>,
'secret': secrets.MERCHANT_SECRET_KEY,
'currency': secrets.CURRENCY,
'country': secrets.COUNTRY,
'action': secrets.ACTION,
'trackid': str(get_rate_product.id),
'customerEmail': request.user.email,
'merchantIp': ip_address,
'amount': str(get_rate_product.price),
'udf1': "Request Rate Product",
'udf2': "http://581f6f004c50.ngrok.io/payment/payment_receipt/",
"udf3": request.user.id,
}
hashSequence = posted['trackid'] + "|" + posted["terminalId"] + "|" + posted["password"] + "|" + posted["secret"] + "|" + posted["amount"] + "|" + posted["currency"]
hashVarsSeq = hashSequence.split('|')
hash = hashlib.sha256(hashSequence.encode()).hexdigest()
posted["requestHash"] = hash
name = json.dumps(posted)
apiURL = "https://payments-dev.urway-tech.com/URWAYPGService/transaction/jsonProcess/JSONrequest"
response = requests.request("POST", apiURL, data=name)
res = response.json()
pymentID = json.dumps(res["payid"])
target_url = json.dumps(res["targetUrl"])
redirectURL = (target_url + "?paymentid=" + pymentID).replace('"', '')
if 'null' in redirectURL:
return Response(
{'error': 'there is something wrong, please try again'},
status=status.HTTP_400_BAD_REQUEST
)
data.append({"data":rate_product.data,"payment_url": redirectURL})
return Response(
data,
status=status.HTTP_200_OK
)
else:
return Response(rate_product.errors, status=status.HTTP_400_BAD_REQUEST)
class GetAllRatedProduct(APIView):
def get(self, request):
queryset = RateProduct.objects.filter(owner=self.request.user, is_rated=True)
serializer = ListRateProductSerializer(queryset, many=True)
return Response(
{'data': serializer.data},
status=status.HTTP_200_OK
)
|
kenjin/DSAlgo
|
LeetCode/0268_Missing-Number/0268_Missing-Number.c
|
<filename>LeetCode/0268_Missing-Number/0268_Missing-Number.c
int missingNumber(int* nums, int numsSize)
{
int ret = ((1+numsSize)*numsSize)/2;
for (int i = 0; i < numsSize; i++)
{
ret -= nums[i];
}
return ret;
}
|
kupci/team-explorer-everywhere
|
source/com.microsoft.tfs.client.common.ui/src/com/microsoft/tfs/client/common/ui/buildmanager/BuildManager.java
|
<filename>source/com.microsoft.tfs.client.common.ui/src/com/microsoft/tfs/client/common/ui/buildmanager/BuildManager.java
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See License.txt in the repository root.
package com.microsoft.tfs.client.common.ui.buildmanager;
import com.microsoft.tfs.core.clients.build.IBuildDetail;
import com.microsoft.tfs.core.clients.build.IQueuedBuild;
import com.microsoft.tfs.util.listeners.SingleListenerFacade;
public class BuildManager {
private final SingleListenerFacade listeners = new SingleListenerFacade(BuildManagerListener.class);
public final void addBuildManagerListener(final BuildManagerListener listener) {
listeners.addListener(listener);
}
public final void removeBuildManagerListener(final BuildManagerListener listener) {
listeners.removeListener(listener);
}
public final void fireBuildPropertyChangedEvent(
final Object source,
final String buildUri,
final String buildProperty) {
final BuildPropertyChangedEvent event = new BuildPropertyChangedEvent(source, buildUri, buildProperty);
((BuildManagerListener) listeners.getListener()).onBuildPropertyChanged(event);
}
public final void fireBuildDetailsChangedEvent(final Object source, final IBuildDetail[] buildDetails) {
final BuildManagerEvent event = new BuildManagerEvent(source, buildDetails);
((BuildManagerListener) listeners.getListener()).onBuildDetailsChanged(event);
}
public final void fireBuildQueuedEvent(final Object source, final IQueuedBuild queuedBuild) {
final BuildManagerEvent event = new BuildManagerEvent(source, queuedBuild);
((BuildManagerListener) listeners.getListener()).onBuildQueued(event);
}
public final void fireBuildStoppedEvent(final Object source, final String buildUri) {
final BuildManagerEvent event = new BuildManagerEvent(source, buildUri);
((BuildManagerListener) listeners.getListener()).onBuildStopped(event);
}
public final void fireBuildPostponedOrResumedEvent(final Object source, final String buildUri) {
final BuildManagerEvent event = new BuildManagerEvent(source, buildUri);
((BuildManagerListener) listeners.getListener()).onBuildPostponedOrResumed(event);
}
public final void fireBuildPrioritiesChangedEvent(final Object source, final IQueuedBuild[] affectedBuilds) {
final BuildManagerEvent event = new BuildManagerEvent(source, affectedBuilds);
((BuildManagerListener) listeners.getListener()).onBuildPrioritiesChanged(event);
}
public final void fireBuildDeletedEvent(final Object source, final String buildUri) {
final BuildManagerEvent event = new BuildManagerEvent(source, buildUri);
((BuildManagerListener) listeners.getListener()).onBuildDeleted(event);
}
public final void fireBuildsDeletedEvent(final Object source, final IBuildDetail[] deletedBuilds) {
final BuildManagerEvent event = new BuildManagerEvent(source, deletedBuilds);
((BuildManagerListener) listeners.getListener()).onBuildsDeleted(event);
}
}
|
Webiny/Core
|
Js/Ui/Vendors/DateTimePicker/index.js
|
import './styles.scss?extract';
import 'eonasdan-bootstrap-datetimepicker';
|
zhaoyupeng/prm
|
src/test/java/RBAC_RoleTest.java
|
import com.yonyou.dto.Role;
import com.yonyou.service.RoleService;
import org.junit.Test;
/**
* 权限系统测试
* Created by shidl on 2017/6/7.
*/
public class RBAC_RoleTest extends BaseTest {
@Test
public void save_role()throws Exception{
RoleService roleService = (RoleService) this.wac.getBean("roleService");
Role role = new Role();
role.setRole_code("admin");
role.setRole_name("admin");
roleService.save_role(role);
}
@Test
public void remove_role () throws Exception{
RoleService roleService = (RoleService) this.wac.getBean("roleService");
roleService.remove_role("537df13b-4b87-11e7-874a-28d24480b297");
}
}
|
TrustedBSD/sebsd
|
usr.sbin/ppp/prompt.h
|
/*-
* Copyright (c) 1998 <NAME> <<EMAIL>>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* $FreeBSD: src/usr.sbin/ppp/prompt.h,v 1.10 2002/08/27 20:11:58 brian Exp $
*/
#define LOCAL_AUTH 0x01
#define LOCAL_NO_AUTH 0x02
#define LOCAL_DENY 0x03
#define LOCAL_CX 0x04 /* OR'd value - require a context */
#define LOCAL_CX_OPT 0x08 /* OR'd value - optional context */
struct server;
struct datalink;
struct bundle;
struct cmdargs;
struct prompt {
struct fdescriptor desc;
int fd_in, fd_out;
struct datalink *TermMode; /* The modem we're talking directly to */
FILE *Term; /* sits on top of fd_out */
u_char auth; /* Local Authorized status */
struct server *owner; /* who created me */
struct bundle *bundle; /* who I'm controlling */
unsigned nonewline : 1; /* need a newline before our prompt ? */
unsigned needprompt : 1; /* Show a prompt at the next UpdateSet() */
unsigned active : 1; /* Is the prompt active (^Z) */
unsigned readtilde : 1; /* We've read a ``~'' from fd_in */
struct {
const char *type; /* Type of connection */
char from[40]; /* Source of connection */
} src;
struct prompt *next; /* Maintained in log.c */
u_long logmask; /* Maintained in log.c */
struct termios oldtio; /* Original tty mode */
struct termios comtio; /* Command level tty mode */
};
#define descriptor2prompt(d) \
((d)->type == PROMPT_DESCRIPTOR ? (struct prompt *)(d) : NULL)
#define PROMPT_STD (-1)
extern struct prompt *prompt_Create(struct server *, struct bundle *, int);
extern void prompt_Destroy(struct prompt *, int);
extern void prompt_Required(struct prompt *);
#ifdef __GNUC__
extern void prompt_Printf(struct prompt *, const char *, ...)
__attribute__ ((format (printf, 2, 3)));
#else
extern void prompt_Printf(struct prompt *, const char *, ...);
#endif
#ifdef __GNUC__
extern void prompt_vPrintf(struct prompt *, const char *, va_list)
__attribute__ ((format (printf, 2, 0)));
#else
extern void prompt_vPrintf(struct prompt *, const char *, va_list);
#endif
#define PROMPT_DONT_WANT_INT 1
#define PROMPT_WANT_INT 0
extern void prompt_TtyInit(struct prompt *);
extern void prompt_TtyCommandMode(struct prompt *);
extern void prompt_TtyTermMode(struct prompt *, struct datalink *);
extern void prompt_TtyOldMode(struct prompt *);
extern pid_t prompt_pgrp(struct prompt *);
extern int PasswdCommand(struct cmdargs const *);
extern void prompt_Suspend(struct prompt *);
extern void prompt_Continue(struct prompt *);
#define prompt_IsTermMode(p, dl) ((p)->TermMode == (dl) ? 1 : 0)
#define prompt_IsController(p) (!(p) || (p)->owner ? 0 : 1)
#define prompt_Required(p) ((p)->needprompt = 1)
|
arock121/pocketnet.core
|
src/reindexer/core/type_consts.h
|
<filename>src/reindexer/core/type_consts.h
#pragma once
#include <stdint.h>
enum { TAG_VARINT, TAG_DOUBLE, TAG_STRING, TAG_BOOL, TAG_NULL, TAG_ARRAY, TAG_OBJECT, TAG_END };
typedef enum KeyValueType {
KeyValueInt64 = TAG_VARINT,
KeyValueDouble = TAG_DOUBLE,
KeyValueString = TAG_STRING,
KeyValueBool = TAG_BOOL,
KeyValueNull = TAG_NULL,
KeyValueInt = TAG_END + 1,
KeyValueUndefined,
KeyValueComposite,
KeyValueTuple,
} KeyValueType;
typedef enum IndexType {
IndexStrHash = 0,
IndexStrBTree = 1,
IndexIntBTree = 2,
IndexIntHash = 3,
IndexInt64BTree = 4,
IndexInt64Hash = 5,
IndexDoubleBTree = 6,
IndexFastFT = 7,
IndexFuzzyFT = 8,
IndexCompositeBTree = 9,
IndexCompositeHash = 10,
IndexCompositeFastFT = 11,
IndexBool = 12,
IndexIntStore = 13,
IndexInt64Store = 14,
IndexStrStore = 15,
IndexDoubleStore = 16,
IndexCompositeFuzzyFT = 17,
} IndexType;
typedef enum QueryItemType {
QueryCondition,
QueryDistinct,
QuerySortIndex,
QueryJoinOn,
QueryLimit,
QueryOffset,
QueryReqTotal,
QueryDebugLevel,
QueryAggregation,
QuerySelectFilter,
QuerySelectFunction,
QueryEnd,
QueryExplain,
QueryEqualPosition,
} QueryItemType;
typedef enum QuerySerializeMode {
Normal = 0x0,
SkipJoinQueries = 0x01,
SkipMergeQueries = 0x02,
SkipLimitOffset = 0x04
} QuerySerializeMode;
typedef enum CondType {
CondAny = 0,
CondEq = 1,
CondLt = 2,
CondLe = 3,
CondGt = 4,
CondGe = 5,
CondRange = 6,
CondSet = 7,
CondAllSet = 8,
CondEmpty = 9,
} CondType;
enum ErrorCode {
errOK = 0,
errParseSQL = 1,
errQueryExec = 2,
errParams = 3,
errLogic = 4,
errParseJson = 5,
errParseDSL = 6,
errConflict = 7,
errParseBin = 8,
errForbidden = 9,
errWasRelock = 10,
errNotValid = 11,
errNetwork = 12,
errNotFound = 13,
errStateInvalidated = 14
};
enum OpType { OpOr = 1, OpAnd = 2, OpNot = 3 };
enum AggType { AggSum, AggAvg, AggFacet, AggMin, AggMax };
enum JoinType { LeftJoin, InnerJoin, OrInnerJoin, MergeR };
enum CalcTotalMode { ModeNoTotal, ModeCachedTotal, ModeAccurateTotal };
enum DataFormat { FormatJson, FormatCJson };
enum QueryResultItemType { QueryResultEnd, QueryResultAggregation, QueryResultExplain };
enum CacheMode { CacheModeOn = 0, CacheModeAggressive = 1, CacheModeOff = 2 };
typedef int IdType;
typedef unsigned SortType;
static const SortType SortIdUnfilled = -1;
static const SortType SortIdUnexists = -2;
typedef enum LogLevel { LogNone, LogError, LogWarning, LogInfo, LogTrace } LogLevel;
enum {
kResultsFormatMask = 0xF,
kResultsPure = 0x0,
kResultsPtrs = 0x1,
kResultsCJson = 0x2,
kResultsJson = 0x3,
kResultsWithPayloadTypes = 0x10,
kResultsWithItemID = 0x20,
kResultsWithPercents = 0x40,
kResultsWithNsID = 0x80,
kResultsWithJoined = 0x100
};
typedef enum IndexOpt { kIndexOptPK = 1 << 7, kIndexOptArray = 1 << 6, kIndexOptDense = 1 << 5, kIndexOptSparse = 1 << 3 } IndexOpt;
typedef enum StotageOpt {
kStorageOptEnabled = 1 << 0,
kStorageOptDropOnFileFormatError = 1 << 1,
kStorageOptCreateIfMissing = 1 << 2,
kStorageOptVerifyChecksums = 1 << 3,
kStorageOptFillCache = 1 << 4,
kStorageOptSync = 1 << 5
} StorageOpt;
enum CollateMode { CollateNone = 0, CollateASCII, CollateUTF8, CollateNumeric, CollateCustom };
enum ItemModifyMode { ModeUpdate = 0, ModeInsert = 1, ModeUpsert = 2, ModeDelete = 3 };
typedef struct StorageOpts {
#ifdef __cplusplus
StorageOpts() : options(0) {}
bool IsEnabled() const { return options & kStorageOptEnabled; }
bool IsDropOnFileFormatError() const { return options & kStorageOptDropOnFileFormatError; }
bool IsCreateIfMissing() const { return options & kStorageOptCreateIfMissing; }
bool IsVerifyChecksums() const { return options & kStorageOptVerifyChecksums; }
bool IsFillCache() const { return options & kStorageOptFillCache; }
bool IsSync() const { return options & kStorageOptSync; }
StorageOpts& Enabled(bool value = true) {
options = value ? options | kStorageOptEnabled : options & ~(kStorageOptEnabled);
return *this;
}
StorageOpts& DropOnFileFormatError(bool value = true) {
options = value ? options | kStorageOptDropOnFileFormatError : options & ~(kStorageOptDropOnFileFormatError);
return *this;
}
StorageOpts& CreateIfMissing(bool value = true) {
options = value ? options | kStorageOptCreateIfMissing : options & ~(kStorageOptCreateIfMissing);
return *this;
}
StorageOpts& VerifyChecksums(bool value = true) {
options = value ? options | kStorageOptVerifyChecksums : options & ~(kStorageOptVerifyChecksums);
return *this;
}
StorageOpts& FillCache(bool value = true) {
options = value ? options | kStorageOptFillCache : options & ~(kStorageOptFillCache);
return *this;
}
StorageOpts& Sync(bool value = true) {
options = value ? options | kStorageOptSync : options & ~(kStorageOptSync);
return *this;
}
#endif
uint8_t options;
} StorageOpts;
|
Dzuming/home-finance
|
src/actions/apiActions.spec.js
|
import * as actions from './apiActions';
import expect from 'expect';
describe('api actions', () => {
it('should create API_REQUEST_PENDING action with snapshot', () => {
expect(actions.apiRequestPending()).toMatchSnapshot();
});
it('should create API_REQUEST_SUCCESS action with snapshot', () => {
expect(actions.apiRequestSuccess()).toMatchSnapshot();
});
});
|
ashutoshcipher/oozie
|
sharelib/sqoop/src/test/java/org/apache/oozie/action/hadoop/TestSqoopMain.java
|
<reponame>ashutoshcipher/oozie
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oozie.action.hadoop;
import java.io.File;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.oozie.test.XTestCase;
import org.junit.Assert;
public class TestSqoopMain extends XTestCase {
public void testJobIDPattern() {
List<String> lines = new ArrayList<String>();
lines.add("Job complete: job_001");
lines.add("Job job_002 has completed successfully");
lines.add("Submitted application application_003");
// Non-matching ones
lines.add("Job complete: application_004");
lines.add("Job application_005 has completed successfully");
lines.add("Submitted application job_006");
Set<String> jobIds = new LinkedHashSet<String>();
for (String line : lines) {
LauncherMain.extractJobIDs(line, SqoopMain.SQOOP_JOB_IDS_PATTERNS,
jobIds);
}
Set<String> expected = new LinkedHashSet<String>();
expected.add("job_001");
expected.add("job_002");
expected.add("job_003");
assertEquals(expected, jobIds);
}
public void testIfDelegationTokenForTezAdded() throws Exception {
final File actionXml = new File(LauncherAM.ACTION_CONF_XML);
LauncherMain.sysenv = new MockedSystemEnvironment();
setSystemProperty(LauncherAM.OOZIE_ACTION_CONF_XML, LauncherAM.ACTION_CONF_XML);
try {
actionXml.createNewFile();
final Configuration conf = SqoopMain.setUpSqoopSite();
Assert.assertNotNull(
String.format("Property [%s] shall be part of configuration", SqoopMain.TEZ_CREDENTIALS_PATH),
conf.get(SqoopMain.TEZ_CREDENTIALS_PATH));
} finally {
actionXml.delete();
// sqoop-site.xml stays there after test run so that shall be cleaned up explicitly
new File(SqoopMain.SQOOP_SITE_CONF).delete();
}
}
class MockedSystemEnvironment extends SystemEnvironment {
@Override
public String getenv(final String name) {
if(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION.equals(name)) {
return UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION;
}
return super.getenv(name);
}
}
}
|
maguangyan/BaslerCamera-1
|
baslerSDK/pylon/include/clprotocol/CLProtocol.h
|
//-----------------------------------------------------------------------------
// (c) 2008 by Basler Vision Technologies
// Section: Vision Components
// Project: GenApi
// Author: <NAME>
// $Header$
//
// License: This file is published under the license of the EMVA GenICam Standard Group.
// A text file describing the legal terms is included in your installation as 'GenICam_license.pdf'.
// If for some reason you are missing this file please contact the EMVA or visit the website
// (http://www.genicam.org) for a full copy.
//
// THIS SOFTWARE IS PROVIDED BY THE EMVA GENICAM STANDARD GROUP "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE EMVA GENICAM STANDARD GROUP
// OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//-----------------------------------------------------------------------------
/*!
\file
\brief Exported C functions for camera link protocol drivers
*/
#ifndef CLPROTOCOL_CLPROTOCOL_H
#define CLPROTOCOL_CLPROTOCOL_H
#include <CLProtocol/ClSerialTypes.h>
#include <stdarg.h>
//==============================================================================
// Macros
//==============================================================================
#if (defined(__cplusplus) || defined(__cplusplus__))
#define USER_EXTERN_C extern "C"
#define USER_EXTERN_C_START extern "C" {
#define USER_EXTERN_C_END }
#else
#define USER_EXTERN_C
#define USER_EXTERN_C_START
#define USER_EXTERN_C_END
#endif
#ifdef CLPROTOCOL_EXPORTS
#if defined(_WIN32)
#define CLPROTOCOLEXPORT USER_EXTERN_C __declspec(dllexport)
#define CLPROTOCOL_DECL __declspec(dllexport)
#else
#define CLPROTOCOLEXPORT USER_EXTERN_C __attribute__((visibility("default")))
#define CLPROTOCOL_DECL __attribute__((visibility("default")))
#endif
#else
#if defined(_WIN32)
#define CLPROTOCOLEXPORT USER_EXTERN_C __declspec(dllimport)
#define CLPROTOCOL_DECL __declspec(dllimport)
#else
#define CLPROTOCOLEXPORT USER_EXTERN_C __attribute__((visibility("default")))
#define CLPROTOCOL_DECL __attribute__((visibility("default")))
#endif
#endif
#ifndef CLPROTOCOL
#if defined(_WIN32)
#define CLPROTOCOL __cdecl
#else
#if !defined(__x86_64) && !defined(__arm__) && !defined(VXWORKS)
#define CLPROTOCOL __attribute__((cdecl))
#else
#define CLPROTOCOL /* use default calling convention */
#endif
#endif
#endif
//==============================================================================
// Types and Enums
//==============================================================================
typedef void (CLPROTOCOL *clp_logger_t)( CLINT32 level, const char* stringFormat, va_list argptr);
USER_EXTERN_C_START
typedef struct clpLogCallback_t
{
CLUINT32 id; //! Must be set to 0xeb6e1ca9
clp_logger_t logger; //! The address of the logging function
}clpLogCallback_t;
USER_EXTERN_C_END
//! Types of parameters to be accessed by clpGetParam/clpSetParam
enum CLP_PARAMS
{
// Global parameters that do not use the Cookie when called.
CLP_LOG_LEVEL = 1, //! A 32-bit value that contains the current log level.
//! This parameter expects a value represented by the
//! CLP_LOG_LEVEL_VALUE constants.
CLP_LOG_CALLBACK = 2, //! A 32-bit value that contains a clpLogCallback_t pointer to the logger function.
CLP_STOP_PROBE_DEVICE = 3, //! A 32-bit value that contains a signal to stop all running ProbeDevice.
// Device related parameters that require a valid cookie to execute properly.
CLP_DEVICE_BAUDERATE = -1, //! A 32-bit value that contains the baudrate used to communicate with the device.
//! This parameter expects a value represented by the
//! CL_BAUDRATE_XXX constants.
CLP_DEVICE_SUPPORTED_BAUDERATES = -2, //! A 32-bit value that contains the baudrates
//! supported by the device and the host.
//! This read only parameter return one or more value(s) represented by the
//! CL_BAUDRATE_XXX constants.
};
enum CLP_LOG_LEVEL_VALUE
{
CLP_LOG_FATAL = 0,
CLP_LOG_ALERT = 100,
CLP_LOG_CRIT = 200,
CLP_LOG_ERROR = 300,
CLP_LOG_WARN = 400,
CLP_LOG_NOTICE = 500,
CLP_LOG_INFO = 600,
CLP_LOG_DEBUG = 700,
CLP_LOG_NOTSET = 800
};
//==============================================================================
// Exported Functions
//==============================================================================
class ISerial;
/*! \ingroup PublicInterfaces */
/*@{*/
/*! \defgroup CLProtocolInterface CLProtocol C-Function Interface */
/*@{*/
/*!
\brief This function is call to initialize the library after it is loaded.
\param[in] logger A function pointer that contains a logging function.
\param[in] logLevel The log level default value. This argument expects a value represented
by the CLP_LOG_LEVEL_VALUE constants.
\return On completion, this function returns one of the following status codes:
\retval CL_ERR_NO_ERR No error occurred
\return CL_ERR_INVALID_PTR The initialisation parameters are invalid.
\return CL_ERR_IN_USE The library is already open
*/
CLPROTOCOLEXPORT CLINT32 CLPROTOCOL
clpInitLib( clp_logger_t logger,
CLP_LOG_LEVEL_VALUE logLevel);
/*!
\brief This function is call to close the library before unloading.
\return On completion, this function returns one of the following status codes:
\retval CL_ERR_NO_ERR No error occurred
*/
CLPROTOCOLEXPORT CLINT32 CLPROTOCOL
clpCloseLib( void );
/*!
\brief This function returns a list of short device templates coded into a single string
Short device templates can have the following form:
- <CODE>"CameraManufacturer"</CODE>
- <CODE>"CameraManufacturer#CameraFamily"</CODE>
- <CODE>"CameraManufacturer#CameraFamily#CameraModel"</CODE>
As compared to the long version the protocol driver dll's directory and file name is missing at the beginning.
The device templates are separated by tabulators.
The whole list is contained in a NULL terminated string.
\param[out] pShortDeviceTemplates A pointer to a user-allocated buffer into which the function
copies the list of device template names.
\param[in,out] pBufferSize As an input, the value contains the size of the buffer that
is passed through the pShortDeviceTemplates parameter.
On successful return, this parameter contains the number of bytes written into the buffer,
including the NULL termination character.
On CL_ERR_BUFFER_TOO_SMALL, this parameter contains the size of the buffer needed to write
the data text.
\return At completion, this function returns one of the following status codes:
\retval CL_ERR_NO_ERR No error occurred
\retval CL_ERR_BUFFER_TOO_SMALL The pShortDeviceTemplates is too small
*/
CLPROTOCOLEXPORT CLINT32 CLPROTOCOL clpGetShortDeviceIDTemplates ( CLINT8* pShortDeviceTemplates, CLUINT32* pBufferSize );
/*!
\brief Connects and identifies the device and returns a DeviceID
This function connects to a device attached to the serial port and attempts to identify it using the DeviceID template as hint.
If the function is successful it returns a DeviceID as well as a Cookie. The DeviceID has the form:
<CODE>"CameraManufacturer#CameraFamily#CameraModel#Version.2.3.4#SerialNumber"</CODE>
The connection is made with 9600 baud which is the power up baud rate as defined by the CameraLink standard.
This function attempts to re-connect the device if a DeviceID is passed instead of a template as pDeviceIDTemplate.
\param[in] pSerial Pointer to the interface giving access to the serial port
\param[in] pDeviceIDTemplate A pointer to NULL-terminated string containing a device ID template
(in the probe use case) or a device ID (in the connect use case)
\param[in, out] pDeviceID A pointer to a user-allocated buffer. If DeviceIDTemplate is valid
and a device is found and identified the DLL copies a device ID to the buffer.
\param[in,out] pBufferSize As an input, the value contains the size of the buffer that
is passed through the pDeviceID parameter.
On successful return, this parameter contains the number of bytes written into the buffer,
including the NULL termination character.
On CL_ERR_BUFFER_TOO_SMALL, this parameter contains the size of the buffer needed to write
the data text.
\param[out] pCookie A token supplied by the driver dll which must be handed in by all functions
called later to access the device. The dll's implementation can use this token to
cache information about the connected device thus avoiding to have to parse the deviceID again.
Note that the cookie value must not be 0.
\param[in] TimeOut A timeout value in [ms] used during accessing the device.
Note that the execution of the function itself can take longer than TimeOut because more than one
access might be necessary.
\return At completion, this function returns one of the following status codes:
\retval CL_ERR_NO_ERR No error occurred
\retval CL_BUFFER_TOO_SMALL The pDeviceID buffer is too small
\retval CL_ERR_INVALID_DEVICEID The DeviceID is not recognized as valid by this dll
\retval CL_ERR_NO_DEVICE_FOUND There was no device found matching the DeviceID
*/
CLPROTOCOLEXPORT CLINT32 CLPROTOCOL clpProbeDevice( ISerial *pSerial, const CLINT8* pDeviceIDTemplate, CLINT8* pDeviceID, CLUINT32 *pBufferSize, CLUINT32 *pCookie, const CLUINT32 TimeOut );
/*!
\brief This function returns a list of XML IDs coded into a single string
XML IDs have the following form:
<CODE>"SchemaVersion.1.1@CameraManufacturer#CameraFamily#CameraModel@XMLVersion.2.3.4"</CODE>
The first part of the XML ID denotes the major and minor version number of the XML schema.
The last part of the XML ID denotes the major, minor, and subminor version number of the camera.
Both version numbers mut be the same as given in the attributes of the RegisterDescription node
of the corrensponding XML file.
The middle part of the XML ID must be a complete DeviceID.
The device templates are separated by tabulators.
The whole list is contained in a NULL terminated string.
\param[in] pSerial Pointer to the interface giving access to the serial port
\param[in] Cookie Token supplied by the driver dll via the clpProbeDevice function.
\param[out] pXMLIDs A pointer to a user-allocated buffer into which the function
copies the list of XML IDs. The list may be empty in which case is contains of a '0' only.
\param[in,out] pBufferSize As an input, the value contains the size of the buffer that
is passed through the pXMLIDs parameter.
On successful return, this parameter contains the number of bytes written into the buffer,
including the NULL termination character.
On CL_ERR_BUFFER_TOO_SMALL, this parameter contains the size of the buffer needed to write
the data text.
\param[in] TimeOut A timeout value in [ms] used during accessing the device.
Note that the execution of the function itself can take longer than TimeOut because more than one
access might be necessary.
\return At completion, this function returns one of the following status codes:
\retval CL_ERR_NO_ERR No error occurred
\retval CL_ERR_BUFFER_TOO_SMALL The pXMLIDs buffer is too small
\retval CL_ERR_TIMEOUT A timeout occurred
\retval CL_ERR_INVALID_COOKIE The cookie supplied is invalid
*/
CLPROTOCOLEXPORT CLINT32 CLPROTOCOL clpGetXMLIDs ( ISerial *pSerial, const CLUINT32 Cookie, CLINT8* pXMLIDs, CLUINT32* pBufferSize, const CLUINT32 TimeOut );
/*!
\brief This function retrieves an XML file from the camera if possible
The DLL may ask the camera for the XML code, or may have the code as a resource compiled in
itself or even may have it's own private registry mechanism installed.
In order to support the latter use case the long DeviceID is supplied which contains
the driver dll's directory.
\param[in] pSerial Pointer to the interface giving access to the serial port
\param[in] Cookie Token supplied by the driver dll via the clpProbeDevice function.
\param[in] pXMLID A pointer to a NULL terminated string containing the connected device's XML ID
\param[out] pXMLBuffer A pointer to a user-allocated buffer. Can be NULL if only the size of the
buffer is requested (see pBufferSize)
\param[in,out] pBufferSize pBufferSize As an input, the value contains the size of the buffer that
is passed through the pDeviceID parameter.
On successful return, this parameter contains the number of bytes written into the buffer,
including the NULL termination character.
On CL_ERR_BUFFER_TOO_SMALL, this parameter contains the size of the buffer needed to write
the data text.
\param[in] TimeOut A timeout value in [ms] used during accessing the device.
Note that the execution of the function itself can take longer than TimeOut because more than one
access might be necessary.
\return At completion, this function returns one of the following status codes:
\retval CL_ERR_NO_ERR No error occurred
\retval CL_ERR_BUFFER_TOO_SMALL The pDeviceID buffer is too small
\retval CL_ERR_NO_XMLDESCRIPTION_FOUND No XML description could be retrieved for the given XML ID
\retval CL_ERR_TIMEOUT A timeout occurred
\retval CL_ERR_INVALID_COOKIE The cookie supplied is invalid
*/
CLPROTOCOLEXPORT CLINT32 CLPROTOCOL
clpGetXMLDescription( ISerial *pSerial, const CLUINT32 Cookie, const CLINT8* pXMLID, CLINT8* pXMLBuffer, CLUINT32* pBufferSize, const CLUINT32 TimeOut );
/*!
\brief This function reads numBytes from the serial device referred to by serialRef.
clSerialRead will return when numBytes are available at the serial port or
when the serialTimeout period has passed.
Upon success, numBytes are copied into buffer.
In the case of any error, including CL_ERR_TIMEOUT, no data is copied into buffer.
\param[in] pSerial The interface giving access to the serial port
\param[in] Cookie Token supplied by the driver dll via the clpProbeDevice function.
\param[in] Address Address the data is to be read from
\param[out] pBuffer Points to a user-allocated buffer. Upon a successful call,
buffer contains the data read from the serial device.
Caller should ensure that buffer is at least numBytes in size.
\param[in] BufferSize This is the number of bytes requested by the caller.
\param[in] TimeOut A timeout value in [ms] used during accessing the device.
Note that there may be specific commands which may take longer than TimeOut to be
accessed. Since clpReadRegister is missing a CL_ERR_PENDING_WRITE mechanism the
CLProcotol DLL may be forced for these commands to delay the return for more than TimeOut.
\return On completion, this function returns one of the following status codes:
\retval CL_ERR_NO_ERR No error occurred
\retval CL_ERR_TIMEOUT A timeout occurred
\retval CL_ERR_INVALID_COOKIE The cookie supplied is invalid
*/
CLPROTOCOLEXPORT CLINT32 CLPROTOCOL
clpReadRegister(ISerial *pSerial, const CLUINT32 Cookie, const CLINT64 Address, CLINT8* pBuffer, const CLINT64 BufferSize, const CLUINT32 TimeOut );
/*!
\brief This function writes numBytes to the serial device referred to by serialRef.
clSerialRead will return when numBytes are available at the serial port or
when the serialTimeout period has passed.
Upon success, numBytes are copied into buffer.
In the case of any error, including CL_ERR_TIMEOUT, no data is copied into buffer.
\param[in] pSerial The interface giving access to the serial port
\param[in] Cookie ManToken supplied by the driver dll via the clpProbeDevice function.
\param[in] Address Address the data is to be read from
\param[in] pBuffer Points to a containing the data to be written
Caller should ensure that buffer is at least BufferSize in size.
\param[in] BufferSize This is the number of bytes to be written.
\param[in] TimeOut A timeout value in [ms] used during accessing the device.
If the CLProtocol DLL knowns for specific registers that accessing them will take longer
than TiemOut for a good reason, e.g., because some re-calibration is triggered, it can
use the \ref CL_ERR_PENDING_WRITE mechanism to inform the client abou this and prolong
the waiting time.
\return On completion, this function returns one of the following status codes:
\retval CL_ERR_NO_ERR No error occurred
\retval CL_ERR_TIMEOUT A timeout occurred
\retval CL_ERR_INVALID_COOKIE The cookie supplied is invalid
\retval CL_ERR_PENDING_WRITE The timeout has expired but the write is still pending for a good reason.
The next call must be \ref clpContinueWriteRegister
*/
CLPROTOCOLEXPORT CLINT32 CLPROTOCOL
clpWriteRegister(ISerial *pSerial, const CLUINT32 Cookie, const CLINT64 Address, const CLINT8* pBuffer, const CLINT64 BufferSize, const CLUINT32 TimeOut );
/*!
\brief Calling This function is called after a write function returned CL_ERR_PENDING_WRITE. It either issues another wait for write cycle or cancels the write action
This function must be called if clpWriteRegister or clpContinueWriteRegister returns CL_ERR_PENDING_WRITE.
Note, that the CLProtocol.dll is responsible for preventing an endless number of calls to this
function by tracking the total time elapsed since the original call and returning CL_ERR_TIMEOUT
if this time is larger than the write operation should normally take place.
\param[in] pSerial The interface giving access to the serial port
\param[in] Cookie ManToken supplied by the driver dll via the clpProbeDevice function.
\param[in] ContinueWaiting If true the function will block waiting for the previous call to clpWriteRegister to finish. If false the call will be cancelled.
\param[in] TimeOut A timeout value in [ms] used during accessing the device.
\return On completion, this function returns one of the following status codes:
\retval CL_ERR_NO_ERR No error occurred
\retval CL_ERR_TIMEOUT A timeout occurred
\retval CL_ERR_INVALID_COOKIE The cookie supplied is invalid
\retval CL_ERR_PENDING_WRITE The timeout has expired but the write is still pending for a good reason. The next call must be \ref clpContinueWriteRegister
*/
CLPROTOCOLEXPORT CLINT32 CLPROTOCOL
clpContinueWriteRegister(ISerial *pSerial, const CLUINT32 Cookie, const BOOL8 ContinueWaiting, const CLUINT32 TimeOut );
/*!
\brief This function converts an error code to error text for display in a
dialog box or in a standard I/O window.
Note: clGetErrorText first looks for the error code in CCTserial.dll.
If the error code is not found in CCTserial.dll, it is not a standard Camera Link error.
clGetErrorText then passes the error code to the manufacturer-specific .dll,
which returns the manufacturer-specific error text.
\param[in] errorCode The error code used to find the appropriate error text.
An error code is returned by every function in this library.
\param[out] errorText A caller-allocated buffer which contains the
NULL-terminated error text on function return.
\param[in,out] errorTextSize On success, contains the number of bytes written
into the buffer, including the NULL-termination character.
This value should be the size in bytes of the error text
buffer passed in. On CL_ERR__BUFFER_TOO_SMALL,
contains the size of the buffer needed to write the error text.
\param[in] Cookie Used only for CL_ERR_GET_LAST_ERROR. Returns the last error
fired on the connection identified by the Cookie
\return On completion, this function returns one of the following status codes:
\retval CL_ERR_NO_ERR No error occurred
\retval CL_ERR_BUFFER_TOO_SMALL The pDeviceID buffer is too small
\retval CL_ERR_ERROR_NOT_FOUND No error string could be retrieved for the given error number
*/
CLPROTOCOLEXPORT CLINT32 CLPROTOCOL
clpGetErrorText( CLINT32 errorCode, CLINT8* errorText, CLUINT32* errorTextSize, const CLUINT32 Cookie = 0);
/*!
\brief This function disconnects the DLL from the device
When this function is called all resources allocated in the driver DLL and associated
with the Cookie are deleted and the Cookie becomes invalid.
\param[in] Cookie ManToken supplied by the driver dll via the clpProbeDevice function.
\return On completion, this function returns one of the following status codes:
\retval CL_ERR_NO_ERR No error occurred
\retval CL_ERR_INVALID_COOKIE The cookie supplied is invalid
*/
CLPROTOCOLEXPORT CLINT32 CLPROTOCOL
clpDisconnect( const CLUINT32 Cookie );
/*!
\brief This function returns the version of the CLProtocol interface version
The function is required to ensure this interface can be extended.
\param[out] pVersionMajor If this parameter changes backward compatibility is broken.
\param[out] pVersionMinor If this parameter changes backward compatibility is maintained.
\return On completion, this function returns one of the following status codes:
\retval CL_ERR_NO_ERR No error occurred
*/
CLPROTOCOLEXPORT CLINT32 CLPROTOCOL
clpGetCLProtocolVersion( CLUINT32 *pVersionMajor, CLUINT32 *pVersionMinor );
/*!
\brief This function is call to read a CLProtocol parameter value.
\param[in] param The parameter to read.
\param[in] Cookie Token supplied by the driver dll via the clpProbeDevice function.
Some parameters ignore the Cookie.
\param[in] pBuffer Points to a user-allocated buffer. Upon a successful call,
buffer contains the value of the parameter.
\param[in] BufferSize This is the number of bytes to be written.
Caller must ensure that buffer size match the parameters size.
\param[in] TimeOut A timeout value in [ms] used during accessing the device.
\return On completion, this function returns one of the following status codes:
\retval CL_ERR_NO_ERR No error occurred
\retval CL_ERR_TIMEOUT A timeout occurred
\retval CL_ERR_PARAM_NOT_SUPPORTED The parameters is not supported
*/
CLPROTOCOLEXPORT CLINT32 CLPROTOCOL
clpGetParam( ISerial *pSerial, CLP_PARAMS param, const CLUINT32 Cookie, CLINT8* pBuffer, const CLINT64 BufferSize, const CLUINT32 TimeOut);
/*!
\brief This function is call to change a CLProtocol parameter value.
\param[in] param The parameter to be changed.
\param[in] Cookie Token supplied by the driver dll via the clpProbeDevice function.
Some parameters ignore the Cookie.
\param[in] pBuffer Points to a buffer containing the data to be written
\param[in] BufferSize This is the number of bytes in the buffer.
Caller must ensure that buffer size match the parameters size.
\param[in] TimeOut A timeout value in [ms] used during accessing the device.
\return On completion, this function returns one of the following status codes:
\retval CL_ERR_NO_ERR No error occurred
\retval CL_ERR_TIMEOUT A timeout occurred
\retval CL_ERR_PARAM_NOT_SUPPORTED The parameters is not supported
*/
CLPROTOCOLEXPORT CLINT32 CLPROTOCOL
clpSetParam( ISerial *pSerial, CLP_PARAMS param, const CLUINT32 Cookie, const CLINT8* pBuffer, const CLINT64 BufferSize, const CLUINT32 TimeOut);
/*!
\brief This function is call know if a parameter is supported by a CLProtocol.
\param[in] param The parameter to probe.
\return On completion, this function returns one of the following status codes:
\retval CL_ERR_NO_ERR The parameters is supported
\retval CL_ERR_PARAM_NOT_SUPPORTED The parameters is not supported
*/
CLPROTOCOLEXPORT CLINT32 CLPROTOCOL
clpIsParamSupported( CLP_PARAMS param);
/*!
\brief This function is call to get a possible event by a CLProtocol.
\param[in] pSerial Pointer to the interface giving access to the serial port
\param[in] Cookie Token supplied by the driver dll via the clpProbeDevice function.
\param[out] pBuffer A pointer to a user-allocated buffer into which the function
copies the event data.
\param[in,out] pBufferSize As an input, the value contains the size of the buffer that
is passed through the pBuffer parameter.
On successful return, this parameter contains the number of bytes written into the buffer,
including the NULL termination character.
On CL_ERR_BUFFER_TOO_SMALL, this parameter contains the size of the buffer needed to write
the data text.
\return On completion, this function returns one of the following status codes:
\retval CL_ERR_NO_ERR The next event is copied to pBuffer
\retval CL_ERR_TIMEOUT There was no new event received
\retval CL_ERR_BUFFER_TOO_SMALL The pBuffer is too small
\retval CL_ERR_INVALID_COOKIE The cookie supplied is invalid
*/
CLPROTOCOLEXPORT CLINT32 CLPROTOCOL
clpGetEventData(const CLUINT32 Cookie, CLINT8* pBuffer, CLUINT32* pBufferSize);
/*@}*/ // CLProtocolInterface
/*@}*/ // PublicInterfaces
#endif // CLPROTOCOL_CLPROTOCOL_H
|
iwave-git/imx-optee-os-iWave
|
core/drivers/crypto/caam/acipher/caam_dsa.c
|
<reponame>iwave-git/imx-optee-os-iWave
// SPDX-License-Identifier: BSD-2-Clause
/*
* Copyright 2019-2020 NXP
*
* Implementation of DSA functions
*/
#include <caam_acipher.h>
#include <caam_common.h>
#include <caam_hal_ctrl.h>
#include <caam_jr.h>
#include <caam_utils_mem.h>
#include <caam_utils_status.h>
#include <drvcrypt.h>
#include <drvcrypt_acipher.h>
#include <mm/core_memprot.h>
#include <tee/cache.h>
#include <string.h>
#include "local.h"
/*
* Definition of the local DSA Keypair
* Domain Parameters (p, q, g)
* Private Key format (x)
* Public Key format (y)
*/
struct caam_dsa_keypair {
struct caambuf g; /* Generator */
struct caambuf p; /* Prime Number (L bits) */
struct caambuf q; /* Subprime Number (N bits) */
struct caambuf x; /* Private key */
struct caambuf y; /* Public key */
};
/*
* Free local DSA keypair
*
* @key DSA keypair
*/
static void do_keypair_free(struct caam_dsa_keypair *key)
{
caam_free_buf(&key->g);
caam_free_buf(&key->p);
caam_free_buf(&key->q);
caam_free_buf(&key->x);
caam_free_buf(&key->y);
}
/*
* If all DSA parameters p, q and g are present, convert them from bignumbers
* to local buffers (via keypair object).
* Otherwise generate them.
*
* @outkey [out] Output keypair in local format
* @inkey Input key in TEE Crypto format
* @l_bytes Prime p size in bytes
* @n_bytes Subprime q size in bytes
*/
static TEE_Result get_keypair_domain_params(struct caam_dsa_keypair *outkey,
const struct dsa_keypair *key,
size_t l_bytes, size_t n_bytes)
{
enum caam_status retstatus = CAAM_OUT_MEMORY;
size_t in_q_size = 0;
size_t in_p_size = 0;
size_t in_g_size = 0;
struct prime_dsa prime = {};
DSA_TRACE("DSA conv key param (p, g) of %zu bytes and (q) of %zu bytes",
l_bytes, n_bytes);
retstatus = caam_calloc_buf(&outkey->q, n_bytes);
if (retstatus != CAAM_NO_ERROR)
return caam_status_to_tee_result(retstatus);
retstatus = caam_calloc_buf(&outkey->g, l_bytes);
if (retstatus != CAAM_NO_ERROR)
return caam_status_to_tee_result(retstatus);
retstatus = caam_calloc_buf(&outkey->p, l_bytes);
if (retstatus != CAAM_NO_ERROR)
return caam_status_to_tee_result(retstatus);
/*
* Get all inputs parameters size, if one of them is not
* define generate new parameters
*/
in_g_size = crypto_bignum_num_bytes(key->g);
in_p_size = crypto_bignum_num_bytes(key->p);
in_q_size = crypto_bignum_num_bytes(key->q);
if (!in_q_size || !in_g_size || !in_p_size) {
/* Generate DSA parameters: Generator G and Primes P/Q */
prime.g = &outkey->g;
prime.p = &outkey->p;
prime.q = &outkey->q;
retstatus = caam_prime_dsa_gen(&prime);
DSA_TRACE("Generate G and Primes P/Q returned 0x%" PRIx32,
retstatus);
if (retstatus != CAAM_NO_ERROR)
return caam_status_to_tee_result(retstatus);
/* Copy Generated DSA Parameter */
crypto_bignum_bin2bn(outkey->q.data, outkey->q.length, key->q);
crypto_bignum_bin2bn(outkey->g.data, outkey->g.length, key->g);
crypto_bignum_bin2bn(outkey->p.data, outkey->p.length, key->p);
} else {
DSA_TRACE("Prime Q is defined");
crypto_bignum_bn2bin(key->q,
outkey->q.data + n_bytes - in_q_size);
cache_operation(TEE_CACHECLEAN, outkey->q.data,
outkey->q.length);
DSA_TRACE("Prime G is defined");
crypto_bignum_bn2bin(key->g,
outkey->g.data + l_bytes - in_g_size);
cache_operation(TEE_CACHECLEAN, outkey->g.data,
outkey->g.length);
DSA_TRACE("Prime P is defined");
crypto_bignum_bn2bin(key->p,
outkey->p.data + l_bytes - in_p_size);
cache_operation(TEE_CACHECLEAN, outkey->p.data,
outkey->p.length);
}
return TEE_SUCCESS;
}
/*
* Convert Crypto DSA Private Key to local Keypair Key
* Ensure Key is push in physical memory
*
* @outkey [out] Output keypair in local format
* @inkey Input key in TEE Crypto format
* @l_bytes Prime p size in bytes
* @n_bytes Subprime q size in bytes
*/
static enum caam_status do_keypriv_conv(struct caam_dsa_keypair *outkey,
const struct dsa_keypair *inkey,
size_t l_bytes, size_t n_bytes)
{
enum caam_status retstatus = CAAM_OUT_MEMORY;
size_t field_size = 0;
DSA_TRACE("DSA Convert Key Private size l=%zu bytes, n=%zu bytes",
l_bytes, n_bytes);
/* Generator */
retstatus = caam_calloc_buf(&outkey->g, l_bytes);
if (retstatus != CAAM_NO_ERROR)
return retstatus;
/* Get the number of bytes of g to pad with 0's */
field_size = crypto_bignum_num_bytes(inkey->g);
crypto_bignum_bn2bin(inkey->g, outkey->g.data + l_bytes - field_size);
/* Prime Number Modulus */
retstatus = caam_calloc_buf(&outkey->p, l_bytes);
if (retstatus != CAAM_NO_ERROR)
return retstatus;
/* Get the number of bytes of p to pad with 0's */
field_size = crypto_bignum_num_bytes(inkey->p);
crypto_bignum_bn2bin(inkey->p, outkey->p.data + l_bytes - field_size);
/* Subprime Number Modulus */
retstatus = caam_calloc_buf(&outkey->q, n_bytes);
if (retstatus != CAAM_NO_ERROR)
return retstatus;
/* Get the number of bytes of q to pad with 0's */
field_size = crypto_bignum_num_bytes(inkey->q);
crypto_bignum_bn2bin(inkey->q, outkey->q.data + n_bytes - field_size);
/* Private key is only scalar x of n bytes */
retstatus = caam_calloc_buf(&outkey->x, n_bytes);
if (retstatus != CAAM_NO_ERROR)
return retstatus;
/* Get the number of bytes of x to pad with 0's */
field_size = crypto_bignum_num_bytes(inkey->x);
crypto_bignum_bn2bin(inkey->x, outkey->x.data + n_bytes - field_size);
cache_operation(TEE_CACHECLEAN, outkey->g.data, outkey->g.length);
cache_operation(TEE_CACHECLEAN, outkey->p.data, outkey->p.length);
cache_operation(TEE_CACHECLEAN, outkey->q.data, outkey->q.length);
cache_operation(TEE_CACHECLEAN, outkey->x.data, outkey->x.length);
return CAAM_NO_ERROR;
}
/*
* Convert Crypto DSA Public Key to local DSA Keypair Key
* Ensure Key is push in physical memory
*
* @outkey [out] Output keypair in local format
* @inkey Input key in TEE Crypto format
* @l_bytes Prime p size in bytes
* @n_bytes Subprime q size in bytes
*/
static enum caam_status do_keypub_conv(struct caam_dsa_keypair *outkey,
const struct dsa_public_key *inkey,
size_t l_bytes, size_t n_bytes)
{
enum caam_status retstatus = CAAM_OUT_MEMORY;
size_t field_size = 0;
DSA_TRACE("DSA Convert Public Key size l=%zu bytes, n=%zu bytes",
l_bytes, n_bytes);
/* Generator */
retstatus = caam_calloc_buf(&outkey->g, l_bytes);
if (retstatus != CAAM_NO_ERROR)
return retstatus;
/* Get the number of bytes of g to pad with 0's */
field_size = crypto_bignum_num_bytes(inkey->g);
crypto_bignum_bn2bin(inkey->g, outkey->g.data + l_bytes - field_size);
/* Prime Number Modulus */
retstatus = caam_calloc_buf(&outkey->p, l_bytes);
if (retstatus != CAAM_NO_ERROR)
return retstatus;
/* Get the number of bytes of p to pad with 0's */
field_size = crypto_bignum_num_bytes(inkey->p);
crypto_bignum_bn2bin(inkey->p, outkey->p.data + l_bytes - field_size);
/* Subprime Number Modulus */
retstatus = caam_calloc_buf(&outkey->q, n_bytes);
if (retstatus != CAAM_NO_ERROR)
return retstatus;
/* Get the number of bytes of q to pad with 0's */
field_size = crypto_bignum_num_bytes(inkey->q);
crypto_bignum_bn2bin(inkey->q, outkey->q.data + n_bytes - field_size);
/* Public key is only scalar y of l bytes */
retstatus = caam_calloc_buf(&outkey->y, l_bytes);
if (retstatus != CAAM_NO_ERROR)
return retstatus;
/* Get the number of bytes of y to pad with 0's */
field_size = crypto_bignum_num_bytes(inkey->y);
crypto_bignum_bn2bin(inkey->y, outkey->y.data + l_bytes - field_size);
cache_operation(TEE_CACHECLEAN, outkey->g.data, outkey->g.length);
cache_operation(TEE_CACHECLEAN, outkey->p.data, outkey->p.length);
cache_operation(TEE_CACHECLEAN, outkey->q.data, outkey->q.length);
cache_operation(TEE_CACHECLEAN, outkey->y.data, outkey->y.length);
return CAAM_NO_ERROR;
}
/*
* Allocate a TEE DSA keypair.
*
* @key Keypair
* @l_bits L bits size (prime p size)
* @n_bits N bits size (subprime q size)
*/
static TEE_Result do_allocate_keypair(struct dsa_keypair *key, size_t l_bits,
size_t n_bits)
{
DSA_TRACE("DSA allocate Keypair of L=%zu bits and N=%zu bits", l_bits,
n_bits);
/* Initialize the key fields to NULL */
memset(key, 0, sizeof(*key));
/* Allocate Generator Scalar */
key->g = crypto_bignum_allocate(l_bits);
if (!key->g)
goto err_alloc_keypair;
/* Allocate Prime Number Modulus */
key->p = crypto_bignum_allocate(l_bits);
if (!key->p)
goto err_alloc_keypair;
/* Allocate Prime Number Modulus */
key->q = crypto_bignum_allocate(n_bits);
if (!key->q)
goto err_alloc_keypair;
/* Allocate Private key X */
key->x = crypto_bignum_allocate(n_bits);
if (!key->x)
goto err_alloc_keypair;
/* Allocate Public Key Y */
key->y = crypto_bignum_allocate(l_bits);
if (!key->y)
goto err_alloc_keypair;
return TEE_SUCCESS;
err_alloc_keypair:
DSA_TRACE("Allocation error");
crypto_bignum_free(key->g);
crypto_bignum_free(key->p);
crypto_bignum_free(key->q);
crypto_bignum_free(key->x);
return TEE_ERROR_OUT_OF_MEMORY;
}
/*
* Allocate a DSA Public Key
*
* @key Public Key
* @l_bits L bits size (prime p size)
* @n_bits N bits size (subprime q size)
*/
static TEE_Result do_allocate_publickey(struct dsa_public_key *key,
size_t l_bits, size_t n_bits)
{
DSA_TRACE("DSA Allocate Public of L=%zu bits and N=%zu bits", l_bits,
n_bits);
/* Initialize the key fields to NULL */
memset(key, 0, sizeof(*key));
/* Allocate Generator Scalar */
key->g = crypto_bignum_allocate(l_bits);
if (!key->g)
goto err_alloc_pubkey;
/* Allocate Prime Number Modulus */
key->p = crypto_bignum_allocate(l_bits);
if (!key->p)
goto err_alloc_pubkey;
/* Allocate Prime Number Modulus */
key->q = crypto_bignum_allocate(n_bits);
if (!key->q)
goto err_alloc_pubkey;
/* Allocate Public Key Y */
key->y = crypto_bignum_allocate(l_bits);
if (!key->y)
goto err_alloc_pubkey;
return TEE_SUCCESS;
err_alloc_pubkey:
DSA_TRACE("Allocation error");
crypto_bignum_free(key->g);
crypto_bignum_free(key->p);
crypto_bignum_free(key->q);
return TEE_ERROR_OUT_OF_MEMORY;
}
/*
* Generates an DSA keypair
* Keypair @key contains the input primes p, g and generator g values
* The function calculates private x and public y.
*
* @key [in/out] Keypair
* @l_bits L bits size (prime p size)
* @n_bits N bits size (subprime q size)
*/
static TEE_Result do_gen_keypair(struct dsa_keypair *key, size_t l_bits,
size_t n_bits)
{
TEE_Result ret = TEE_ERROR_GENERIC;
enum caam_status retstatus = CAAM_FAILURE;
struct caam_dsa_keypair caam_dsa_key = {};
struct caam_jobctx jobctx = {};
uint32_t *desc = NULL;
uint32_t desclen = 0;
size_t l_bytes = l_bits / 8;
size_t n_bytes = n_bits / 8;
#ifdef CFG_CAAM_64BIT
#define MAX_DESC_KEY_GEN 14
#else
#define MAX_DESC_KEY_GEN 9
#endif
DSA_TRACE("Generate Key - Private (%zu bits) and Public (%zu bits)",
n_bits, l_bits);
/* Allocate the job used to prepare the operation */
desc = caam_calloc_desc(MAX_DESC_KEY_GEN);
if (!desc) {
ret = TEE_ERROR_OUT_OF_MEMORY;
goto exit_gen_keypair;
}
/* Allocate Private Key to be generated */
retstatus = caam_calloc_align_buf(&caam_dsa_key.x, n_bytes);
if (retstatus != CAAM_NO_ERROR) {
ret = caam_status_to_tee_result(retstatus);
goto exit_gen_keypair;
}
cache_operation(TEE_CACHEFLUSH, caam_dsa_key.x.data,
caam_dsa_key.x.length);
/* Allocate Public Key to be generated */
retstatus = caam_calloc_align_buf(&caam_dsa_key.y, l_bytes);
if (retstatus != CAAM_NO_ERROR) {
ret = caam_status_to_tee_result(retstatus);
goto exit_gen_keypair;
}
cache_operation(TEE_CACHEFLUSH, caam_dsa_key.y.data,
caam_dsa_key.y.length);
/* Generator and Prime */
ret = get_keypair_domain_params(&caam_dsa_key, key, l_bytes, n_bytes);
if (ret != TEE_SUCCESS)
goto exit_gen_keypair;
/*
* Build the descriptor using the PDB Public Key generation
* block (PD=0)
*/
caam_desc_init(desc);
caam_desc_add_word(desc, DESC_HEADER(0));
caam_desc_add_word(desc, PDB_DL_KEY_L_SIZE(l_bytes) |
PDB_DL_KEY_N_SIZE(n_bytes));
caam_desc_add_ptr(desc, caam_dsa_key.p.paddr);
caam_desc_add_ptr(desc, caam_dsa_key.q.paddr);
caam_desc_add_ptr(desc, caam_dsa_key.g.paddr);
caam_desc_add_ptr(desc, caam_dsa_key.x.paddr);
caam_desc_add_ptr(desc, caam_dsa_key.y.paddr);
caam_desc_add_word(desc, PK_KEYPAIR_GEN(DL));
desclen = caam_desc_get_len(desc);
caam_desc_update_hdr(desc, DESC_HEADER_IDX(desclen, desclen - 1));
DSA_DUMPDESC(desc);
jobctx.desc = desc;
retstatus = caam_jr_enqueue(&jobctx, NULL);
if (retstatus == CAAM_NO_ERROR) {
cache_operation(TEE_CACHEINVALIDATE, caam_dsa_key.x.data,
caam_dsa_key.x.length);
cache_operation(TEE_CACHEINVALIDATE, caam_dsa_key.y.data,
caam_dsa_key.y.length);
/* Copy Private and Public keypair */
ret = crypto_bignum_bin2bn(caam_dsa_key.x.data,
caam_dsa_key.x.length, key->x);
if (ret != TEE_SUCCESS)
goto exit_gen_keypair;
ret = crypto_bignum_bin2bn(caam_dsa_key.y.data,
caam_dsa_key.y.length, key->y);
if (ret != TEE_SUCCESS)
goto exit_gen_keypair;
DSA_DUMPBUF("X", caam_dsa_key.x.data, caam_dsa_key.x.length);
DSA_DUMPBUF("Y", caam_dsa_key.y.data, caam_dsa_key.y.length);
} else {
DSA_TRACE("CAAM Status 0x%08" PRIx32, jobctx.status);
ret = job_status_to_tee_result(jobctx.status);
}
exit_gen_keypair:
caam_free_desc(&desc);
do_keypair_free(&caam_dsa_key);
return ret;
}
/*
* Signature of DSA message
* Note the message to sign is already hashed
*
* @sdata [in/out] DSA data to sign / Signature
* @l_bytes L bytes size (prime p size)
* @n_bytes N bytes size (subprime q size)
*/
static TEE_Result do_sign(struct drvcrypt_sign_data *sdata, size_t l_bytes,
size_t n_bytes)
{
TEE_Result ret = TEE_ERROR_GENERIC;
enum caam_status retstatus = CAAM_FAILURE;
struct dsa_keypair *inkey = sdata->key;
struct caam_dsa_keypair dsakey = {};
struct caam_jobctx jobctx = {};
uint32_t *desc = NULL;
uint32_t desclen = 0;
struct caamdmaobj msg = {};
size_t sign_len = 0;
struct caamdmaobj sign_c = {};
struct caamdmaobj sign_d = {};
uint32_t pdb_sgt_flags = 0;
#ifdef CFG_CAAM_64BIT
#define MAX_DESC_SIGN 19
#else
#define MAX_DESC_SIGN 12
#endif
DSA_TRACE("DSA Signature");
/* Allocate the job descriptor */
desc = caam_calloc_desc(MAX_DESC_SIGN);
if (!desc) {
ret = TEE_ERROR_OUT_OF_MEMORY;
goto exit_sign;
}
/* Convert the private key to a local key */
retstatus = do_keypriv_conv(&dsakey, inkey, l_bytes, n_bytes);
if (retstatus != CAAM_NO_ERROR) {
ret = caam_status_to_tee_result(retstatus);
goto exit_sign;
}
/* Prepare the input message CAAM Descriptor entry */
ret = caam_dmaobj_input_sgtbuf(&msg, sdata->message.data,
sdata->message.length);
if (ret)
goto exit_sign;
if (msg.sgtbuf.sgt_type)
pdb_sgt_flags |= PDB_SGT_PKSIGN_MSG;
caam_dmaobj_cache_push(&msg);
DSA_DUMPBUF("Message", sdata->message.data, sdata->message.length);
/*
* ReAllocate the signature result buffer with a maximum size
* of the roundup to 16 bytes of the secure size in bytes if
* the signature buffer is not aligned or too short.
*
* - 1st Part: size_sec
* - 2nd Part: size_sec roundup to 16 bytes
*/
sign_len = ROUNDUP(sdata->size_sec, 16) + sdata->size_sec;
ret = caam_dmaobj_output_sgtbuf(&sign_c, sdata->signature.data,
sdata->signature.length, sign_len);
if (ret)
goto exit_sign;
if (sign_c.sgtbuf.sgt_type)
pdb_sgt_flags |= PDB_SGT_PKSIGN_SIGN_C;
/* Prepare the 2nd Part of the signature. Derive from sign_c */
ret = caam_dmaobj_derive_sgtbuf(&sign_d, &sign_c, sdata->size_sec,
ROUNDUP(sdata->size_sec, 16));
if (ret)
goto exit_sign;
if (sign_d.sgtbuf.sgt_type)
pdb_sgt_flags |= PDB_SGT_PKSIGN_SIGN_D;
caam_dmaobj_cache_push(&sign_c);
/*
* Build the descriptor using Predifined ECC curve
*/
caam_desc_init(desc);
caam_desc_add_word(desc, DESC_HEADER(0));
caam_desc_add_word(desc, PDB_DSA_SIGN_N(n_bytes) |
PDB_DSA_SIGN_L(l_bytes) |
pdb_sgt_flags);
/* Prime number */
caam_desc_add_ptr(desc, dsakey.p.paddr);
/* Prime Modulus */
caam_desc_add_ptr(desc, dsakey.q.paddr);
/* Generator */
caam_desc_add_ptr(desc, dsakey.g.paddr);
/* Secret key */
caam_desc_add_ptr(desc, dsakey.x.paddr);
/* Input message */
caam_desc_add_ptr(desc, msg.sgtbuf.paddr);
/* Signature 1st part */
caam_desc_add_ptr(desc, sign_c.sgtbuf.paddr);
/* Signature 2nd part */
caam_desc_add_ptr(desc, sign_d.sgtbuf.paddr);
/* Message length */
caam_desc_add_word(desc, sdata->message.length);
caam_desc_add_word(desc, DSA_SIGN(DL));
desclen = caam_desc_get_len(desc);
caam_desc_update_hdr(desc, DESC_HEADER_IDX(desclen, desclen - 1));
ECC_DUMPDESC(desc);
jobctx.desc = desc;
retstatus = caam_jr_enqueue(&jobctx, NULL);
if (retstatus == CAAM_NO_ERROR) {
/* Limit the copy to 2 * sdata->size_sec */
sign_c.orig.length = 2 * sdata->size_sec;
sdata->signature.length = caam_dmaobj_copy_to_orig(&sign_c);
DSA_DUMPBUF("Signature", sdata->signature.data,
sdata->signature.length);
ret = caam_status_to_tee_result(retstatus);
} else {
DSA_TRACE("CAAM Status 0x%08" PRIx32, jobctx.status);
ret = job_status_to_tee_result(jobctx.status);
}
exit_sign:
caam_free_desc(&desc);
do_keypair_free(&dsakey);
caam_dmaobj_free(&msg);
caam_dmaobj_free(&sign_c);
caam_dmaobj_free(&sign_d);
return ret;
}
/*
* Verification of the Signature of DSA message
* Note the message is already hashed
*
* @sdata [in/out] DSA Signature to verify
* @l_bytes L bytes size (prime p size)
* @n_bytes N bytes size (subprime q size)
*/
static TEE_Result do_verify(struct drvcrypt_sign_data *sdata, size_t l_bytes,
size_t n_bytes)
{
TEE_Result ret = TEE_ERROR_GENERIC;
enum caam_status retstatus = CAAM_FAILURE;
struct dsa_public_key *inkey = sdata->key;
struct caam_dsa_keypair dsakey = {};
struct caambuf tmp = {};
struct caam_jobctx jobctx = {};
uint32_t *desc = NULL;
uint32_t desclen = 0;
struct caamdmaobj msg = {};
struct caamdmaobj sign_c = {};
struct caamdmaobj sign_d = {};
uint32_t pdb_sgt_flags = 0;
#ifdef CFG_CAAM_64BIT
#define MAX_DESC_VERIFY 21
#else
#define MAX_DESC_VERIFY 13
#endif
DSA_TRACE("DSA Verify");
/* Allocate the job descriptor */
desc = caam_calloc_desc(MAX_DESC_VERIFY);
if (!desc) {
ret = TEE_ERROR_OUT_OF_MEMORY;
goto exit_verify;
}
/* Convert the Public key to local key */
retstatus = do_keypub_conv(&dsakey, inkey, l_bytes, n_bytes);
if (retstatus != CAAM_NO_ERROR) {
ret = caam_status_to_tee_result(retstatus);
goto exit_verify;
}
/* Prepare the input message CAAM Descriptor entry */
ret = caam_dmaobj_input_sgtbuf(&msg, sdata->message.data,
sdata->message.length);
if (ret)
goto exit_verify;
if (msg.sgtbuf.sgt_type)
pdb_sgt_flags |= PDB_SGT_PKVERIF_MSG;
caam_dmaobj_cache_push(&msg);
/*
* Prepare the 1st Part of the signature
* Handle the full signature in case signature buffer needs to
* be reallocated.
*/
ret = caam_dmaobj_input_sgtbuf(&sign_c, sdata->signature.data,
sdata->signature.length);
if (ret)
goto exit_verify;
if (sign_c.sgtbuf.sgt_type)
pdb_sgt_flags |= PDB_SGT_PKVERIF_SIGN_C;
/* Prepare the 2nd Part of the signature, derive from sign_c */
ret = caam_dmaobj_derive_sgtbuf(&sign_d, &sign_c, sdata->size_sec,
sdata->size_sec);
if (ret)
goto exit_verify;
if (sign_d.sgtbuf.sgt_type)
pdb_sgt_flags |= PDB_SGT_PKVERIF_SIGN_D;
caam_dmaobj_cache_push(&sign_c);
/* Allocate a Temporary buffer used by the CAAM */
retstatus = caam_alloc_align_buf(&tmp, l_bytes);
if (retstatus != CAAM_NO_ERROR) {
ret = caam_status_to_tee_result(retstatus);
goto exit_verify;
}
/*
* Build the descriptor using Predifined ECC curve
*/
caam_desc_init(desc);
caam_desc_add_word(desc, DESC_HEADER(0));
caam_desc_add_word(desc, PDB_DSA_VERIF_N(n_bytes) |
PDB_DSA_VERIF_L(l_bytes) |
pdb_sgt_flags);
/* Prime number */
caam_desc_add_ptr(desc, dsakey.p.paddr);
/* Prime Modulus */
caam_desc_add_ptr(desc, dsakey.q.paddr);
/* Generator */
caam_desc_add_ptr(desc, dsakey.g.paddr);
/* Public key */
caam_desc_add_ptr(desc, dsakey.y.paddr);
/* Input message */
caam_desc_add_ptr(desc, msg.sgtbuf.paddr);
/* Signature 1st part */
caam_desc_add_ptr(desc, sign_c.sgtbuf.paddr);
/* Signature 2nd part */
caam_desc_add_ptr(desc, sign_d.sgtbuf.paddr);
/* Temporary buffer */
caam_desc_add_ptr(desc, tmp.paddr);
/* Message length */
caam_desc_add_word(desc, sdata->message.length);
caam_desc_add_word(desc, DSA_VERIFY(DL));
desclen = caam_desc_get_len(desc);
caam_desc_update_hdr(desc, DESC_HEADER_IDX(desclen, desclen - 1));
DSA_DUMPDESC(desc);
jobctx.desc = desc;
cache_operation(TEE_CACHEFLUSH, tmp.data, tmp.length);
retstatus = caam_jr_enqueue(&jobctx, NULL);
if (retstatus == CAAM_JOB_STATUS && !jobctx.status) {
DSA_TRACE("DSA Verify Status 0x%08" PRIx32, jobctx.status);
ret = TEE_ERROR_SIGNATURE_INVALID;
} else if (retstatus != CAAM_NO_ERROR) {
DSA_TRACE("CAAM Status 0x%08" PRIx32, jobctx.status);
ret = job_status_to_tee_result(jobctx.status);
} else {
ret = caam_status_to_tee_result(retstatus);
}
exit_verify:
caam_free_desc(&desc);
do_keypair_free(&dsakey);
caam_free_buf(&tmp);
caam_dmaobj_free(&msg);
caam_dmaobj_free(&sign_c);
caam_dmaobj_free(&sign_d);
return ret;
}
/*
* Registration of the DSA Driver
*/
static struct drvcrypt_dsa driver_dsa = {
.alloc_keypair = do_allocate_keypair,
.alloc_publickey = do_allocate_publickey,
.gen_keypair = do_gen_keypair,
.sign = do_sign,
.verify = do_verify,
};
enum caam_status caam_dsa_init(struct caam_jrcfg *caam_jrcfg)
{
enum caam_status retstatus = CAAM_FAILURE;
vaddr_t jr_base = caam_jrcfg->base + caam_jrcfg->offset;
if (caam_hal_ctrl_pknum(jr_base))
if (drvcrypt_register_dsa(&driver_dsa) == TEE_SUCCESS)
retstatus = CAAM_NO_ERROR;
return retstatus;
}
|
suhasv1995/md-react-icons
|
icons/ExpandMoreRound.js
|
<reponame>suhasv1995/md-react-icons<filename>icons/ExpandMoreRound.js
import React from 'react';
import createSvg from './utils/createSvg';
export default createSvg(<path d="M15.88 9.29L12 13.17 8.12 9.29c-.39-.39-1.02-.39-1.41 0-.39.39-.39 1.02 0 1.41l4.59 4.59c.39.39 1.02.39 1.41 0l4.59-4.59c.39-.39.39-1.02 0-1.41-.39-.38-1.03-.39-1.42 0z" />, 'ExpandMoreRound', '0 0 24 24');
|
kientrungle2001/fulllook.frontend.v3
|
assets/js/fulllook/default/controller/about.js
|
<reponame>kientrungle2001/fulllook.frontend.v3
flApp.controller('aboutController', ['$scope', function ($scope) {
$scope.order = {
software: 1
};
$scope.doOrder = function () {
if (!$scope.order.fullname || !$scope.order.quantity || !$scope.order.phone || !$scope.order.address) {
return false;
}
jQuery.post(FL_API_URL + '/payment/orderCard', $scope.order, function (data) {
if (data) {
$scope.order.success = 1;
$scope.order.message = 'Bạn đã dặt thẻ thành công, chúng tôi sẽ sớm liên hệ lại với bạn!';
$scope.$apply();
}
});
}
$scope.paycard = {};
$scope.paycardCaptcha = '/3rdparty/captcha/random_image.php?t=' + (new Date()).getMilliseconds();
$scope.payCardFl = function (url) {
if (parseInt(sessionUserId) == 0 || sessionUserId == '') {
$scope.paycard.message = 'Bạn phải đăng nhập mới được nạp thẻ';
$scope.paycard.success = 0;
} else {
if (!$scope.paycard.pincard) {
return false;
}
$scope.paycard.userId = sessionUserId;
$scope.paycard.username = sessionUsername;
jQuery.post(url + '/3rdparty/captcha/check_session.php', {}, function (dataResult) {
if (dataResult) {
if ($scope.paycard.captcha == dataResult) {
jQuery.post(FL_API_URL + '/payment/payCard', $scope.paycard, function (dataResult) {
if (dataResult) {
if (parseInt(dataResult.result) == 1) {
jQuery.post(url + '/update_paycard.php', dataResult, function (data) {
});
$scope.paycard.message = dataResult.string;
$scope.paycard.success = 1;
$scope.$apply();
} else {
$scope.paycard.message = dataResult.string;
$scope.paycard.success = 0;
$scope.paycardCaptcha = '/3rdparty/captcha/random_image.php?t=' + (new Date()).getMilliseconds();
$scope.$apply();
}
}
});
} else {
$scope.paycardCaptcha = '/3rdparty/captcha/random_image.php?t=' + (new Date()).getMilliseconds();
$scope.paycard.message = 'Mã bảo mật chưa đúng';
$scope.paycard.success = 0;
$scope.$apply();
}
}
});
}
};
$scope.tabs = {
proposal: '- Ôn tập, mở rộng kiến thức và đánh giá năng lực toàn diện cho học sinh tiểu học qua các bài luyện tập và hệ thống đề thi thử bằng Tiếng Anh. <br>\
- Phát triển năng lực toàn diện của học sinh :<br>\
+ Khả năng đọc hiểu tiếng Anh<br>\
+ Năng lực tư duy, khả năng phân tích và phán đoán.<br>\
+ Khả năng diễn đạt<br>\
+ Năng lực vận dụng khoa học và hiểu biết xã hội vào cuộc sống.<br>',
authors: '1. Tiến sĩ <NAME> – Tiến sĩ Giáo dục về Ngôn ngữ học ứng dụng, Đại học New South Wales, Australia.<br>\
2. Tiến sĩ <NAME>, giảng viên khoa Ngữ Văn Đại học Sư phạm Hà Nội.<br>\
3. Tiến sĩ ngôn ngữ - Phạm Như Hoa, Trường THCS Cầu Giấy Hà Nội.<br>\
4. Cô Lê Thị Thu Ngân – giáo viên ngữ văn - Phó giám đốc Công ty Cổ phần Giáo dục Phát triển Trí tuệ và Sáng tạo Next Nobels.<br>\
5. Thạc sĩ Trần Thị Mai Phương, Giám đốc Công ty Cổ phần Giáo dục Phát triển Trí Tuệ và Sáng tạo Next Nobels.<br>\
6. Thầy Trần Hữu Hiếu, giáo viên dạy toán giỏi, có nhiều học trò đạt giải cao trong kì thi Violympic toán 5 toàn quốc và nhiều học trò đạt giải trong Cuộc thi Toán Châu Á Thái Bình Dương.<br>\
7. <NAME> – Giảng viên của Đại học FPT, Đại học Hoa Sen Thành Phố Hồ Chí Minh…<br>\
8. <NAME>, Giảng viên Khoa Quốc tế - Đại học Quốc Gia Hà Nội<br>\
9. <NAME>, nguyên Giảng viên trường Quản trị Du lịch Quốc tế PIHMS (Pacific International Hotel Management School) New Zealand.<br>\
10. <NAME> - Tốt nghiệp ĐH Rhodes (Grahamstown, South Africa), chuyên ngành Xã hội học và ngôn ngữ; Nhận chứng chỉ CELTA - ĐH Cambridge; Nhiều năm kinh nghiệm giảng dạy Tiếng Anh tại Nam Phi.<br>\
11. <NAME> (Quốc tịch Mỹ); Thạc sĩ giáo dục; Giảng viên tại trung tâm Anh ngữ ACET; Cựu giám khảo kì thi IELTS.<br>\
12. <NAME> Ph<NAME> - Giảng viên chính thức khóa đầu tiên thuộc nhóm triển khai chương trình khoa học GLOBE (NASA) tại Việt Nam.<br>\
13. <NAME>, Thạc sĩ Sinh học năm 2006. Tiến sĩ Thực vật, côn trùng, và vi sinh vật tương tác đh Missouri năm 2011, hiện là Giảng viên học viện Nông nghiệp Việt Nam<br>\
14. <NAME> <NAME> - giáo viên tiếng Anh - tốt nghiệp khoa Sư Phạm Tiếng Anh, Đại học Sư Phạm Hà Nội. <br>\
15. <NAME> - giáo viên tiếng Anh - tốt nghiệp khoa Sư Phạm Tiếng Anh, Đại học Sư Phạm Hà Nội. <br>\
16. <NAME> - giáo viên tiếng Anh - tốt nghiệp khoa Sư Phạm Tiếng Anh, Đại học Sư Phạm Hà Nội. <br>\
17. <NAME> - giáo viên tiếng Anh - tốt nghiệp khoa Sư Phạm Tiếng Anh, Đại học Sư Phạm Hà Nội. <br>\
18. <NAME> - giáo viên Văn - tốt nghiệp khoa Ngữ văn, Đại học Sư Phạm Hà Nội. <br>\
19. Nguyễ<NAME> - giáo viên Văn - tốt nghiệp khoa Ngữ văn, Đại học Sư Phạm Hà Nội. <br>\
20. Nguyễ<NAME> - giáo viên Văn - tốt nghiệp khoa Ngữ văn, Đại học Sư Phạm Hà Nội. <br>\
21. <NAME> <NAME> - giảng viên khoa Địa lí, Đại học Sư Phạm Hà Nội. <br>\
22. <NAME> - giáo viên Địa lí - trường THCS Dương Nội, Hà Nội. <br>\
',
structure: '<table class="table table-bordered table-sm table-striped">\
<tbody><tr>\
<td><b>STT</b></td>\
<td><b>Các phần chính</b></td>\
<td><b>Nội dung</b></td>\
</tr>\
<tr>\
<td><b>1.</b></td>\
<td><b>Phần Luyện tập các môn</b></td>\
<td>\
- <b>Hàng ngàn câu hỏi trắc nghiệm các môn học</b> : Toán, Khoa học, Lịch sử, Địa lí… bằng tiếng Anh để ôn tập kiến thức, đánh giá năng lực và rèn luyện tư duy cho HS.<br>\
- Hệ thống câu hỏi qua các <b>bài nghe, bài quan sát (các tranh ảnh, video) đa dạng về chủ đề</b> dựa trên nền kiến thức các môn học và sự hiểu biết của HS bậc tiểu học. <br>\
- Hơn 2000 từ vựng chuyên ngành được phân theo các chuyên đề của từng môn học.<br>\
</td>\
</tr>\
<tr>\
<td><b>2.</b></td>\
<td><b>Phần Ôn luyện tiếng Anh</b></td>\
<td>\
- <b> Gồm 100 đề ôn tập ngữ pháp tiếng Anh </b> tích hợp với các kiến thức hiểu biết xã hội\
</td>\
</tr>\
\
<tr>\
<td><b>3.</b></td>\
<td><b>Phần Ôn luyện tổng hợp</b></td>\
<td>\
- <b>Là hệ thống gồm 34 đề </b> ,<b>tổng hợp kiến thức các môn học</b>, giúp học sinh làm quen với các dạng đề thi, ôn luyện kiến thức tổng hợp. \
</td>\
</tr>\
<tr>\
<td><b>4.</b></td>\
<td><b>Phần Thi thử Trần Đại Nghĩa</b></td>\
<td>\
- Bao gồm <b> 30 đề thi Trắc nghiệm và 30 đề Tự luận bám sát cấu trúc đề khảo sát vào trường Trần Đại Nghĩa </b>đã được Sở Giáo dục và Đào tạo Thành phố Hồ Chí Minh công bố tháng 4 năm 2015. \
\
</td>\
</tr>\
<tr>\
<td><b>5.</b></td>\
<td><b>Phần Đề thi chính thức vào trường Trần Đại Nghĩa các năm</b></td>\
<td>\
- Bao gồm các <b> đề thi Trắc nghiệm và đề Tự luận các năm của trường Trần Đại Nghĩa. \
\
</td>\
</tr>\
<tr>\
<td><b>6.</b></td>\
<td><b>Kinh nghiệm ôn thi</b></td>\
<td>\
Gồm nhiều nội dung :<br>\
- Cung cấp <b>Tài liệu tham khảo các môn học</b>:<br>\
+ Mỗi môn học đều có các bài đọc tham khảo bằng tiếng Anh và các câu hỏi hỗ trợ ôn tập kiến thức các thức các môn học bằng tiếng Việt.\
+ Đặc biệt, cung cấp hệ thống từ vựng cơ bản sắp xếp theo các chủ đề.\
- Tập hợp <b>đề thi các năm vào trường Trần Đại Nghĩa</b>.<br>\
- Giới thiệu <b>các Trung tâm uy tín ôn thi</b> vào trường Trần Đại Nghĩa.<br>\
- Trao đổi các <b>kinh nghiệm ôn thi</b> vào trường Trần Đại Nghĩa (Mục hỏi đáp kinh nghiệm ôn thi).\
\
\
</td>\
</tr>\
\
\
</tbody></table>',
advantage: '<ul class="pd-40 list-unstyled left35">\
<li>- Học sinh dễ dàng học từ vựng qua các loại game.</li>\
\
<li>- Chấm điểm và xếp hạng học sinh.</li>\
\
<li>- Tra cứu từ điển Anh Việt ngay trong phần mềm.</li>\
\
<li>- Tất cả các câu hỏi luyện tập và đề thi thử đều có đáp án.</li>\
\
<li>- Nhiều đáp án có phần lí giải bằng tiếng Việt (được thiết kế dành riêng cho học sinh thi vào lớp 6 Trường THPT Chuyên Trần Đại Nghĩa)</li>\
\
<li>-Sản phẩm luôn luôn được nâng cấp, cập nhật cả về số lượng câu hỏi và dạng bài ôn tập.</li>\
</ul>',
guide: ''
};
$scope.banks = [
{
image: 'http://s1.nextnobels.com/default/skin/nobel/themes/story/media/vcb.jpg',
name: '<NAME>ương mại cổ phần ngoại thương(Vietcombank)',
account: '0011004237507',
owner: 'CÔNG TY CỔ PHẦN GIÁO DỤC PHÁT TRIỂN TRÍ TUỆ VÀ SÁNG TẠO NEXT NOBELS',
branch: 'Sở giao dịch'
},
/*
{
image: 'http://s1.nextnobels.com/default/skin/nobel/themes/story/media/vietin.jpg',
name: 'Ngân hàng TMCP công thương Việt Nam(Vietinbank)',
account: '110000145741',
owner: 'CÔNG TY CỔ PHẦN GIÁO DỤC PHÁT TRIỂN TRÍ TUỆ VÀ SÁNG TẠO NEXT NOBELS',
branch: 'Thăng Long'
},
*/
{
image: 'http://s1.nextnobels.com/default/skin/nobel/themes/story/media/agri.jpg',
name: '<NAME> Nông nghiệp và phát triển nông thôn Việt Nam(Agribank)',
account: '1305201013000',
owner: 'CÔNG TY CỔ PHẦN GIÁO DỤC PHÁT TRIỂN TRÍ TUỆ VÀ SÁNG TẠO NEXT NOBELS',
branch: 'Tràng An'
},
{
image: 'http://s1.nextnobels.com/default/skin/nobel/themes/story/media/mb.jpg',
name: 'Ngân hàng TMCP Quân đội MB',
account: '0201100316008',
owner: 'CÔNG TY CỔ PHẦN GIÁO DỤC PHÁT TRIỂN TRÍ TUỆ VÀ SÁNG TẠO NEXT NOBELS',
branch: 'Nam Trung Yên'
},
{
image: 'http://s1.nextnobels.com/default/skin/nobel/themes/story/media/bidv.jpg',
name: '<NAME>àng TMCP Đầu tư và phát triển Việt Nam(BIDV)',
account: '16010000162395',
owner: '<NAME>',
branch: 'Sở giao dịch 3 thành phố Hà Nội'
},
/*
{
image: 'http://s1.nextnobels.com/default/skin/nobel/themes/story/media/bidv.jpg',
name: '<NAME>àng TMCP Đầu tư và phát triển Việt Nam(BIDV)',
account: '26010000705319',
owner: 'CÔNG TY CỔ PHẦN GIÁO DỤC PHÁT TRIỂN TRÍ TUỆ VÀ SÁNG TẠO NEXT NOBELS',
branch: 'Tây Hà Nội'
},
{
image: 'http://s1.nextnobels.com/default/skin/nobel/themes/story/media/donga.jpg',
name: '<NAME> TMCP Đông Á',
account: '014601780001',
owner: 'CÔNG TY CỔ PHẦN GIÁO DỤC PHÁT TRIỂN TRÍ TUỆ VÀ SÁNG TẠO NEXT NOBELS',
branch: 'Cầu Giấy'
},
*/
];
}]);
|
Freggy/labs-framework
|
bedrock-game-session-service/bedrock-api/src/main/java/de/bergwerklabs/framework/bedrock/api/event/game/SpectatorEvent.java
|
package de.bergwerklabs.framework.bedrock.api.event.game;
import de.bergwerklabs.framework.bedrock.api.LabsGame;
import de.bergwerklabs.framework.bedrock.api.LabsPlayer;
/**
* Created by <NAME> on 01.05.2018.
*
* <p>Gets fired when a player enters spectator mode. This is enables specific spectator modes
* can per mini game.
*
* @author <NAME>
*/
public class SpectatorEvent<T extends LabsPlayer> extends AbstractGameEvent<T> {
/** Player that has been set to spectator mode. */
public T getSpectator() {
return spectator;
}
private T spectator;
/**
* @param game {@link LabsGame} instance.
* @param spectator player that has been set to spectator mode.
*/
public SpectatorEvent(LabsGame<T> game, T spectator) {
super(game);
this.spectator = spectator;
}
}
|
JacobARose/image-utils
|
imutils/ml/utils/metric_utils.py
|
"""
image-utils/imutils/ml/utils/metric_utils.py
[TODO] - refactor metric configuration to be more general & use Hydra config-style configuration.
Author: <NAME>
Created: Thursday June 11th, 2021
"""
import torchmetrics as metrics
# from typing import List
__all__ = ["get_scalar_metrics", "get_per_class_metrics"]
def get_scalar_metrics(num_classes: int,
average: str='macro',
prefix: str='',
delimiter: str="_"
) -> metrics.MetricCollection:
default = {f'{average}_acc': metrics.Accuracy(top_k=1, num_classes=num_classes, average=average),
f'{average}_acc_top3': metrics.Accuracy(top_k=3, num_classes=num_classes, average=average),
f'{average}_F1': metrics.F1(top_k=1, num_classes=num_classes, average=average),
f'{average}_F1_top3': metrics.F1(top_k=3, num_classes=num_classes, average=average),
f'{average}_precision': metrics.Precision(top_k=1, num_classes=num_classes, average=average),
f'{average}_recall': metrics.Recall(top_k=1, num_classes=num_classes, average=average)}
if len(prefix)>0:
for k in list(default.keys()):
default[prefix + delimiter + k] = default[k]
del default[k]
return metrics.MetricCollection(default)
def get_per_class_metrics(num_classes: int,
normalize: str='true',
prefix: str=''
) -> metrics.MetricCollection:
"""
Contents:
* Per-class F1 metric
* Confusion Matrix
These metrics return non-scalar results, requiring more careful handling.
Arguments:
num_classes (int)
average (str): default='true'.
The average mode to be applied to the confusion matrix. Options include:
None or 'none': no normalization (default)
'true': normalization over the targets (most commonly used)
'pred': normalization over the predictions
'all': normalization over the whole matrix
"""
default = {'F1': metrics.F1(num_classes=num_classes, average=None)}#,
# 'ConfusionMatrix': metrics.ConfusionMatrix(num_classes=num_classes, normalize=normalize)}
if len(prefix)>0:
for k in list(default.keys()):
default[prefix + r'/per_class/' + k] = default[k]
del default[k]
return metrics.MetricCollection(default)
|
lalitkushwah143/GlassDesign2-master
|
app/src/main/java/com/example/android/glass/glassdesign2/ScanActivity.java
|
<reponame>lalitkushwah143/GlassDesign2-master
package com.example.android.glass.glassdesign2;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.core.app.ActivityCompat;
import android.Manifest;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.os.Build;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
import android.widget.Toast;
import com.example.android.glass.glassdesign2.data.DataTemp;
import com.example.android.glass.glassdesign2.data.DataUsers;
import com.example.android.glass.glassdesign2.util.DropDownAlert;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.firebase.auth.AuthResult;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
import com.google.firebase.firestore.EventListener;
import com.google.firebase.firestore.FirebaseFirestore;
import com.google.firebase.firestore.FirebaseFirestoreException;
import com.google.firebase.firestore.MetadataChanges;
import com.google.firebase.firestore.QueryDocumentSnapshot;
import com.google.firebase.firestore.QuerySnapshot;
import com.google.zxing.Result;
import me.dm7.barcodescanner.zxing.ZXingScannerView;
public class ScanActivity extends BaseActivity implements ZXingScannerView.ResultHandler {
private ZXingScannerView mScannerView;
private DropDownAlert dropDownAlert;
int PERMISSION_ALL = 1;
String[] PERMISSIONS = {
Manifest.permission.INTERNET,
Manifest.permission.ACCESS_NETWORK_STATE,
android.Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.READ_EXTERNAL_STORAGE,
android.Manifest.permission.CAMERA,
Manifest.permission.RECORD_AUDIO,
Manifest.permission.MODIFY_AUDIO_SETTINGS,
Manifest.permission.BLUETOOTH
};
private FirebaseAuth firebaseAuth;
private DatabaseReference reference;
private FirebaseFirestore firestore;
private String email, pass;
private Boolean aBoolean;
@RequiresApi(api = Build.VERSION_CODES.P)
@Override
public void onCreate(Bundle state) {
super.onCreate(state);
// Programmatically initialize the scanner view
if (!hasPermissions(this, PERMISSIONS)) {
ActivityCompat.requestPermissions(this, PERMISSIONS, PERMISSION_ALL);
}
mScannerView = new ZXingScannerView(this);
// Set the scanner view as the content view
setContentView(mScannerView);
dropDownAlert = new DropDownAlert(this);
dropDownAlert.setText("Scan QR Code");
dropDownAlert.setTextWeight(0.5f);
// dropDownAlert.addImages("surfer.png", "bike.png");
dropDownAlert.show();
Log.e("In Scan", "now");
}
@Override
public void onResume() {
super.onResume();
// Register ourselves as a handler for scan results.
mScannerView.setResultHandler(this);
// Start camera on resume
mScannerView.startCamera();
}
@Override
public void onPause() {
super.onPause();
// Stop camera on pause
mScannerView.stopCamera();
}
@Override
public void handleResult(Result rawResult) {
Log.e("result", rawResult.getText());
Log.e("result", rawResult.getBarcodeFormat().toString());
//If you would like to resume scanning, call this method below:
// mScannerView.resumeCameraPreview(this);
String result = rawResult.getText();
String[] arrOfStr = result.split("/", 2);
if(arrOfStr.length == 2 && rawResult.getBarcodeFormat().toString().equals("QR_CODE")) {
Log.e("size", arrOfStr.length + "");
Log.e("Uid", arrOfStr[0]);
Log.e("machine_id", arrOfStr[1]);
Intent intent = new Intent(ScanActivity.this, LoginActivity.class);
intent.putExtra("uid", arrOfStr[0]);
intent.putExtra("machine_id", arrOfStr[1]);
startActivity(intent);
finish();
// firebaseAuth = FirebaseAuth.getInstance();
// firestore = FirebaseFirestore.getInstance();
/*
aBoolean= firebaseAuth.getCurrentUser() != null;
reference = FirebaseDatabase.getInstance().getReference().child("tempUserData").child(arrOfStr[0]);
reference.addValueEventListener(new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot snapshot) {
if (TextUtils.isEmpty(email) && TextUtils.isEmpty(pass)) {
DataTemp dataTemp = snapshot.getValue(DataTemp.class);
if (firebaseAuth.getCurrentUser() == null) {
if (dataTemp != null) {
email = dataTemp.getEmail().toString();
pass = <PASSWORD>();
firebaseAuth.signInWithEmailAndPassword(email, pass)
.addOnSuccessListener(new OnSuccessListener<AuthResult>() {
@Override
public void onSuccess(AuthResult authResult) {
Toast.makeText(ScanActivity.this, "Signed In Successfully", Toast.LENGTH_SHORT).show();
Log.e("ScanActivity", "Signed In Successfully");
reference.setValue(null);
if (firebaseAuth.getCurrentUser() != null) {
aBoolean = true;
firestore.collection("users")
.whereEqualTo("email", email)
.addSnapshotListener(MetadataChanges.INCLUDE, new EventListener<QuerySnapshot>() {
@Override
public void onEvent(@Nullable QuerySnapshot value, @Nullable FirebaseFirestoreException error) {
for (QueryDocumentSnapshot snapshot : value) {
dataUsers.setKey(snapshot.getId());
dataUsers.setEmail(snapshot.getData().get("email").toString());
dataUsers.setPass(snapshot.getData().get("password").toString());
dataUsers.setfName(snapshot.getData().get("firstName").toString());
dataUsers.setlName(snapshot.getData().get("lastName").toString());
dataUsers.setPhone(snapshot.getData().get("phone").toString());
dataUsers.setRole(snapshot.getData().get("role").toString());
Log.e("MainActivity", dataUsers.getEmail().toString());
}
SharedPreferences sharedPreferences = getSharedPreferences("machine_prefs", Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putBoolean("login_status", true);
editor.putString("machine_id", arrOfStr[1]);
editor.putString("role", dataUsers.getRole());
editor.commit();
editor.apply();
SplashActivity.login_status = true;
SplashActivity.machine_id = arrOfStr[1];
SplashActivity.role = dataUsers.getRole();
Intent intent = new Intent(ScanActivity.this, MainActivity.class);
intent.putExtra("uid", arrOfStr[0]);
intent.putExtra("machine_id", arrOfStr[1]);
intent.putExtra("role", dataUsers.getRole());
setResult(RESULT_OK, intent);
startActivity(intent);
Log.e("ScanActivity", "initiated");
finish();
}
});
}
}
})
.addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
Toast.makeText(ScanActivity.this, "Something Went Wrong", Toast.LENGTH_SHORT).show();
Log.e("ScanActivity", "Something Went Wrong");
}
});
} else {
Toast.makeText(ScanActivity.this, "QR Code Expired", Toast.LENGTH_SHORT).show();
Log.e("ScanActivity", "QR Code Expired");
startActivity(new Intent(ScanActivity.this, SplashActivity.class));
}
}
}
}
@Override
public void onCancelled(@NonNull DatabaseError error) {
}
});
*/
}else {
mScannerView.resumeCameraPreview(this);
}
/*
Intent intent = new Intent(ScanActivity.this, MainActivity.class);
switch (result) {
case "Operator":
intent.putExtra("menu_key", R.menu.menu_operator);
Toast.makeText(this, "Operator Module", Toast.LENGTH_SHORT).show();
startActivity(intent);
break;
case "Supervisor":
intent.putExtra("menu_key", R.menu.menu_supervisor);
Toast.makeText(this, "Supervisor Module", Toast.LENGTH_SHORT).show();
startActivity(intent);
break;
case "Trainee":
intent.putExtra("menu_key", R.menu.menu_trainee);
Toast.makeText(this, "Trainee Module", Toast.LENGTH_SHORT).show();
startActivity(intent);
break;
case "Validator":
intent.putExtra("menu_key", R.menu.menu_validator);
Toast.makeText(this, "Validator Module", Toast.LENGTH_SHORT).show();
startActivity(intent);
break;
case "Maintenance":
intent.putExtra("menu_key", R.menu.menu_maintenance);
Toast.makeText(this, "Maintenance Module", Toast.LENGTH_SHORT).show();
startActivity(intent);
break;
default:
Toast.makeText(this, "No user Found", Toast.LENGTH_SHORT).show();
Intent intent1 = getIntent();
finish();
startActivity(intent1);
break;
}
finish();
*/
}
public static boolean hasPermissions(Context context, String... permissions) {
if (context != null && permissions != null) {
for (String permission : permissions) {
if (ActivityCompat.checkSelfPermission(context, permission) != PackageManager.PERMISSION_GRANTED) {
return false;
}
}
}
return true;
}
}
|
laszlocsontos/relinkr
|
relinkr-api/src/test/java/io/relinkr/core/security/authz/access/AuthorizeOwnerVerifierTest.java
|
<reponame>laszlocsontos/relinkr
/*
Copyright [2018-2019] <NAME> (sole trader)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package io.relinkr.core.security.authz.access;
import static io.relinkr.test.Mocks.USER_ID;
import static io.relinkr.test.Mocks.USER_ID_ZERO;
import static javax.persistence.LockModeType.NONE;
import static org.junit.Assert.assertEquals;
import static org.mockito.BDDMockito.given;
import static org.springframework.security.access.AccessDecisionVoter.ACCESS_ABSTAIN;
import static org.springframework.security.access.AccessDecisionVoter.ACCESS_DENIED;
import static org.springframework.security.access.AccessDecisionVoter.ACCESS_GRANTED;
import io.relinkr.core.orm.EntityClassAwareId;
import java.security.Principal;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceException;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class AuthorizeOwnerVerifierTest {
@Mock
private Principal principal;
@Mock
private EntityClassAwareId entityClassAwareId;
@Mock
private EntityManager entityManager;
private AuthorizeOwnerVerifier ownerVerifier;
@Before
public void setUp() {
ownerVerifier = new AuthorizeOwnerVerifierImpl(entityManager);
}
@Test(expected = IllegalArgumentException.class)
public void givenNullPrincipal_whenCanAccess_thenIllegalArgumentException() {
ownerVerifier.canAccess(null, entityClassAwareId);
}
@Test(expected = IllegalArgumentException.class)
public void givenNullEntityClassAwareId_whenCanAccess_thenIllegalArgumentException() {
ownerVerifier.canAccess(principal, null);
}
@Test
public void givenUnsupportedEntityClass_whenCanAccess_thenAbstain() {
given(entityClassAwareId.getEntityClass()).willReturn(Object.class);
int result = ownerVerifier.canAccess(principal, entityClassAwareId);
assertEquals(ACCESS_ABSTAIN, result);
}
@Test
public void givenNoSuchEntityFound_whenCanAccess_thenAbstain() {
given(entityClassAwareId.getEntityClass()).willReturn(TestOwnable.class);
given(entityManager.find(TestOwnable.class, entityClassAwareId, NONE)).willReturn(null);
int result = ownerVerifier.canAccess(principal, entityClassAwareId);
assertEquals(ACCESS_ABSTAIN, result);
}
@Test
public void givenEntityOwnedByGivenPrinciple_whenCanAccess_thenGranted() {
given(principal.getName()).willReturn(USER_ID.toString());
given(entityClassAwareId.getEntityClass()).willReturn(TestOwnable.class);
given(entityManager.find(TestOwnable.class, entityClassAwareId, NONE))
.willReturn(TestOwnable.of(USER_ID));
int result = ownerVerifier.canAccess(principal, entityClassAwareId);
assertEquals(ACCESS_GRANTED, result);
}
@Test
public void givenEntityOwnedByGivenPrinciple_andPersistenceError_whenCanAccess_thenAbstain() {
given(entityClassAwareId.getEntityClass()).willReturn(TestOwnable.class);
given(entityManager.find(TestOwnable.class, entityClassAwareId, NONE))
.willThrow(PersistenceException.class);
int result = ownerVerifier.canAccess(principal, entityClassAwareId);
assertEquals(ACCESS_ABSTAIN, result);
}
@Test
public void givenEntityOwnedByAnotherPrinciple_whenCanAccess_thenGranted() {
given(principal.getName()).willReturn(USER_ID_ZERO.toString());
given(entityClassAwareId.getEntityClass()).willReturn(TestOwnable.class);
given(entityManager.find(TestOwnable.class, entityClassAwareId, NONE))
.willReturn(TestOwnable.of(USER_ID));
int result = ownerVerifier.canAccess(principal, entityClassAwareId);
assertEquals(ACCESS_DENIED, result);
}
@Test(expected = IllegalArgumentException.class)
public void givenIllegalPrincipalName_whenCanAccess_thenIllegalArgumentException() {
given(principal.getName()).willReturn("bad");
given(entityClassAwareId.getEntityClass()).willReturn(TestOwnable.class);
given(entityManager.find(TestOwnable.class, entityClassAwareId, NONE))
.willReturn(TestOwnable.of(USER_ID));
ownerVerifier.canAccess(principal, entityClassAwareId);
}
}
|
tyduptyler13/MyEngine
|
src/BasicWindowEventHandler.cpp
|
//
// Created by tyler on 3/14/21.
//
#include "../include/BasicWindowEventHandler.hpp"
using namespace MyEngine;
Window* BasicWindowEventHandler::addMouseHandler(const std::shared_ptr<MouseEventHandler>& eventHandler) {
mouseEventHandlers.insert(eventHandler);
return this;
}
Window* BasicWindowEventHandler::addGamepadButtonHandler(const std::shared_ptr<GamepadButtonEventHandler>& eventHandler) {
gamepadButtonEventHandlers.insert(eventHandler);
return this;
}
Window* BasicWindowEventHandler::addGamepadAxisHandler(const std::shared_ptr<GamepadAxisEventHandler>& eventHandler) {
gamepadAxisEventHandlers.insert(eventHandler);
return this;
}
Window* BasicWindowEventHandler::addButtonHandler(const std::shared_ptr<KeyboardEventHandler>& eventHandler) {
keyboardEventHandlers.insert(eventHandler);
return this;
}
bool BasicWindowEventHandler::removeMouseHandler(const std::shared_ptr<MouseEventHandler>& eventHandler) {
return mouseEventHandlers.erase(eventHandler) == 1;
}
bool BasicWindowEventHandler::removeGamepadButtonHandler(const std::shared_ptr<GamepadButtonEventHandler>& eventHandler) {
return gamepadButtonEventHandlers.erase(eventHandler) == 1;
}
bool BasicWindowEventHandler::removeGamepadAxisHandler(const std::shared_ptr<GamepadAxisEventHandler>& eventHandler) {
return gamepadAxisEventHandlers.erase(eventHandler) == 1;
}
bool BasicWindowEventHandler::removeButtonHandler(const std::shared_ptr<KeyboardEventHandler>& eventHandler) {
return keyboardEventHandlers.erase(eventHandler) == 1;
}
|
BlackYoup/gamecq
|
Service/main.cpp
|
#include <windows.h>
#include "Service.h"
int main( int argc, char ** argv ) {
// create the service-object
CService serv;
// RegisterService() checks the parameterlist for predefined switches
// (such as -d or -i etc.; see NTService.h for possible switches) and
// starts the service's functionality.
// You can use the return value from "RegisterService()"
// as your exit-code.
return serv.RegisterService(argc, argv);
}
|
samruddhi2909/Citi_Bridge
|
lib/gitlab/chat_commands/base_command.rb
|
<gh_stars>1-10
module Gitlab
module ChatCommands
class BaseCommand
QUERY_LIMIT = 5
def self.match(_text)
raise NotImplementedError
end
def self.help_message
raise NotImplementedError
end
def self.available?(_project)
raise NotImplementedError
end
def self.allowed?(_user, _ability)
true
end
def self.can?(object, action, subject)
Ability.allowed?(object, action, subject)
end
def execute(_)
raise NotImplementedError
end
def collection
raise NotImplementedError
end
attr_accessor :project, :current_user, :params
def initialize(project, user, params = {})
@project, @current_user, @params = project, user, params.dup
end
private
def find_by_iid(iid)
collection.find_by(iid: iid)
end
def presenter
Gitlab::ChatCommands::Presenter.new
end
end
end
end
|
pDiller/JiraAlerts
|
persistence/src/main/java/io/reflectoring/jiraalerts/iotaction/IoTAction.java
|
<reponame>pDiller/JiraAlerts<filename>persistence/src/main/java/io/reflectoring/jiraalerts/iotaction/IoTAction.java
package io.reflectoring.jiraalerts.iotaction;
import io.reflectoring.jiraalerts.device.Device;
import io.reflectoring.jiraalerts.routine.RoutineQuery;
import javax.persistence.*;
import java.io.Serializable;
import java.sql.Blob;
/**
* Entity for IoT-Actions.
*/
@Entity
@Table(name = "IOT_ACTION", uniqueConstraints = {@UniqueConstraint(columnNames = {"DEVICE_ID", "PRIORITY"})})
@IdClass(IoTAction.IoTActionId.class)
public class IoTAction implements Serializable {
@Id
@JoinColumn(name = "ROUTINE_QUERY_ID", updatable = false, nullable = false)
@ManyToOne(fetch = FetchType.LAZY)
private RoutineQuery routineQuery;
@Id
@JoinColumn(name = "DEVICE_ID", updatable = false, nullable = false)
@ManyToOne(fetch = FetchType.LAZY)
private Device device;
@Lob
@Column(name = "ACTION", nullable = false)
private Blob action;
@Column(name = "PRIORITY", nullable = false)
private int priority;
public RoutineQuery getRoutineQuery() {
return routineQuery;
}
public void setRoutineQuery(RoutineQuery routineQuery) {
this.routineQuery = routineQuery;
}
public Device getDevice() {
return device;
}
public void setDevice(Device device) {
this.device = device;
}
public Blob getAction() {
return action;
}
public void setAction(Blob action) {
this.action = action;
}
public int getPriority() {
return priority;
}
public void setPriority(int priority) {
this.priority = priority;
}
static class IoTActionId implements Serializable {
private RoutineQuery routineQuery;
private Device device;
}
}
|
SophistSolutions/Stroika
|
Library/Sources/Stroika/Frameworks/SystemPerformance/Capturer.cpp
|
/*
* Copyright(c) Sophist Solutions, Inc. 1990-2021. All rights reserved
*/
#include "../StroikaPreComp.h"
#include "../../Foundation/Containers/SortedMapping.h"
#include "../../Foundation/Execution/Sleep.h"
#include "Capturer.h"
using namespace Stroika::Foundation;
using namespace Stroika::Foundation::Containers;
using namespace Stroika::Frameworks;
using namespace Stroika::Frameworks::SystemPerformance;
/*
********************************************************************************
************************* SystemPerformance::Capturer **************************
********************************************************************************
*/
Capturer::Capturer ()
: pMostRecentMeasurements{
[qStroika_Foundation_Common_Property_ExtraCaptureStuff] ([[maybe_unused]] const auto* property) -> MeasurementSet {
const Capturer* thisObj = qStroika_Foundation_Common_Property_OuterObjPtr (property, &Capturer::pMostRecentMeasurements);
return thisObj->fCurrentMeasurementSet_.load ();
}}
, pMeasurementsCallbacks{[qStroika_Foundation_Common_Property_ExtraCaptureStuff] ([[maybe_unused]] const auto* property) -> Collection<NewMeasurementsCallbackType> {
const Capturer* thisObj = qStroika_Foundation_Common_Property_OuterObjPtr (property, &Capturer::pMeasurementsCallbacks);
return thisObj->fCallbacks_.load ();
},
[qStroika_Foundation_Common_Property_ExtraCaptureStuff] ([[maybe_unused]] auto* property, const auto& callbacks) {
Capturer* thisObj = qStroika_Foundation_Common_Property_OuterObjPtr (property, &Capturer::pMeasurementsCallbacks);
thisObj->fCallbacks_ = callbacks;
}}
, pCaptureSets{[qStroika_Foundation_Common_Property_ExtraCaptureStuff] ([[maybe_unused]] const auto* property) -> Collection<CaptureSet> {
const Capturer* thisObj = qStroika_Foundation_Common_Property_OuterObjPtr (property, &Capturer::pCaptureSets);
return thisObj->fCaptureSets_.load ();
},
[qStroika_Foundation_Common_Property_ExtraCaptureStuff] ([[maybe_unused]] auto* property, const auto& captureSets) {
Capturer* thisObj = qStroika_Foundation_Common_Property_OuterObjPtr (property, &Capturer::pCaptureSets);
thisObj->fCaptureSets_ = captureSets;
}}
{
}
Capturer::Capturer (const CaptureSet& cs)
: Capturer{}
{
AddCaptureSet (cs);
}
Collection<Capturer::NewMeasurementsCallbackType> Capturer::GetMeasurementsCallbacks () const
{
return fCallbacks_;
}
void Capturer::SetMeasurementsCallbacks (const Collection<NewMeasurementsCallbackType>& callbacks)
{
fCallbacks_ = callbacks;
}
void Capturer::AddMeasurementsCallback (const NewMeasurementsCallbackType& cb)
{
fCallbacks_.rwget ()->Add (cb);
}
void Capturer::RemoveMeasurementsCallback (const NewMeasurementsCallbackType& cb)
{
fCallbacks_.rwget ()->Remove (cb);
}
Collection<CaptureSet> Capturer::GetCaptureSets () const
{
return fCaptureSets_;
}
void Capturer::SetCaptureSets (const Collection<CaptureSet>& captureSets)
{
auto rwLock = fCaptureSets_.rwget ();
rwLock.store (captureSets);
fCaptureSetChangeCount_++;
ManageRunner_ (not captureSets.empty ());
}
void Capturer::AddCaptureSet (const CaptureSet& cs)
{
RunnerOnce_ (cs);
auto rwLock = fCaptureSets_.rwget ();
rwLock->Add (cs);
fCaptureSetChangeCount_++;
ManageRunner_ (true); // start while still holding lock
}
void Capturer::ManageRunner_ (bool on)
{
bool threadPoolRunning = fThreadPool_.GetTasksCount () != 0;
if (on) {
if (not threadPoolRunning) {
fThreadPool_.AddTask ([this] () { Runner_ (); });
fThreadPool_.SetPoolSize (1);
}
}
else {
if (threadPoolRunning) {
fThreadPool_.AbortTasks ();
fThreadPool_.SetPoolSize (0);
}
}
}
void Capturer::Runner_ ()
{
//
// really only need one thread - and just wait right amount of time to wakeup to service next captureset.
//
// Compute an list of the 'next runs' (sorted by next first). Then WaitUnil () on that.
// and then RunOnce_(thatCaptureSet); this doesn't exactly guarantee they run at the right time but close enuf and with just one thread
//
// First walk list of capture-sets and produce such an 'queue' as a local variable. At top of loop, recompute if change count
// indicates in needs recomputing (rare). Else just keep adjusting it.
uint64_t changeCountForCaptureSet{0};
// NOTE - we use a SortedMapping instead of an acutal Queue, because we always know WHEN each item should run next, and a sorted mapping
// tells is quicly and easily the 'next' item to run. If we used a Q, we would add back the just ran item at the end but it might
// not belong at the end of the Queue cuz it runs on a different schedule
SortedMapping<DurationSecondsType, CaptureSet> runQueue;
auto recomputeSortOrder = [&] () {
DurationSecondsType now = Time::GetTickCount ();
auto lock = fCaptureSets_.cget (); // hold lock until I've examined changeCount
// it would be nice to be able to preserve the timing info in the runQueue for items not changed, but that would require
// being able to tell which CaptureSets were new and old, which is not always possible given the current API (SetCaptureSet).
// and updating the capture sets should be rare, so it should cause little problem to recompute it, and reset the counters.
runQueue.clear ();
SortedMapping<DurationSecondsType, CaptureSet> tmp;
for (const auto& i : lock.load ()) {
runQueue.Add (now + i.pRunPeriod ().As<DurationSecondsType> (), i);
}
changeCountForCaptureSet = fCaptureSetChangeCount_;
};
// Note this runs in a threadpool which can be canceled as needed, so this need not check for a termination condition - it will be ended by a thread abort
while (true) {
if (changeCountForCaptureSet < fCaptureSetChangeCount_) {
recomputeSortOrder (); // updates runQueue as a side-effect
}
Assert (not runQueue.empty ()); // because otherwise the thread would have been aborted, and we wouldn't get this far (race??? - maybe need to do ifcheck)
// otherwise pop the first item from the Q, and wait til then. Then process it, and push it back onto the Q with the appropriate
// 'next' time.
auto iterator = runQueue.begin ();
Assert (iterator != runQueue.end ());
KeyValuePair<DurationSecondsType, CaptureSet> runNext = *iterator;
Execution::SleepUntil (runNext.fKey);
RunnerOnce_ (runNext.fValue);
runQueue.erase (iterator);
runQueue.Add (runNext.fKey + runNext.fValue.pRunPeriod ().As<DurationSecondsType> (), runNext.fValue); // interpret time offset as wrt leading edge
}
}
void Capturer::RunnerOnce_ (const CaptureSet& cs)
{
MeasurementSet measurements;
for (Instrument i : cs.pInstruments ()) {
try {
measurements.MergeAdditions (i.Capture ());
}
catch (const Execution::Thread::AbortException&) {
Execution::ReThrow ();
}
catch (...) {
DbgTrace ("Eating exception in Capturer runner");
}
}
UpdateMeasurementSet_ (measurements);
}
void Capturer::UpdateMeasurementSet_ (const MeasurementSet& ms)
{
fCurrentMeasurementSet_.rwget ()->MergeAdditions (ms);
for (const auto& cb : fCallbacks_.load ()) {
cb (ms);
}
}
|
andyyau520/uniclubdev
|
build/common/api/form.js
|
<filename>build/common/api/form.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = function (apiEngine) {
return {
form: function form(formName) {
return {
field: function field(fieldName, value) {
return {
validate: function validate() {
return apiEngine.post("/api/forms/" + formName + "/fields/" + fieldName + "/validation", {
data: { value: value }
});
}
};
}
};
}
};
};
//# sourceMappingURL=data:application/json;base64,<KEY>zLCJzb3VyY2VzIjpbImFwaS9mb3JtLmpzIl0sIm5hbWVzIjpbImFwaUVuZ2luZSIsImZvcm0iLCJmb3JtTmFtZSIsImZpZWxkIiwiZmllbGROYW1lIiwidmFsdWUiLCJ2YWxpZGF0ZSIsInBvc3QiLCJkYXRhIl0sIm1hcHBpbmdzIjoiOzs7Ozs7a0JBQWUsVUFBQ0EsU0FBRDtBQUFBLFNBQWdCO0FBQzdCQyxVQUFNLGNBQUNDLFFBQUQ7QUFBQSxhQUFlO0FBQ25CQyxlQUFPLGVBQUNDLFNBQUQsRUFBWUMsS0FBWjtBQUFBLGlCQUF1QjtBQUM1QkMsc0JBQVU7QUFBQSxxQkFBTU4sVUFBVU8sSUFBVixpQkFDQUwsUUFEQSxnQkFDbUJFLFNBRG5CLGtCQUMyQztBQUN2REksc0JBQU0sRUFBRUgsWUFBRjtBQURpRCxlQUQzQyxDQUFOO0FBQUE7QUFEa0IsV0FBdkI7QUFBQTtBQURZLE9BQWY7QUFBQTtBQUR1QixHQUFoQjtBQUFBLEMiLCJmaWxlIjoiYXBpL2Zvcm0uanMiLCJzb3VyY2VSb290IjoiLi9zcmMifQ==
|
Mihaaai/RealEstateMarketplace
|
project/RealEstateMarketPlace/forms/__init__.py
|
from .LoginForm import LoginForm
from .RegisterForm import RegisterForm
from .AddListingForm import AddListingForm
from .UpdateListingForm import UpdateListingForm
from .UpdateProfileForm import UpdateProfileForm
from .ForgotPasswordForm import ForgotPasswordForm
from .ResetPasswordForm import ResetPasswordForm
|
melardev/CppQtConsoleSnippets
|
filesystem/create_red_image.h
|
<filename>filesystem/create_red_image.h
#pragma once
#include <QImage>
#include <QDir>
#include <QDebug>
namespace CreateRedImage
{
inline void main()
{
// 100px width, 100px height
QImage image(100, 100, QImage::Format_ARGB32);
const QColor color = QColor(255, 0, 0);
image.fill(color);
QDir directory = QDir::current();
const QString filepath = directory.absoluteFilePath("red.png");
if (image.save(filepath))
{
qDebug() << "File saved successfully";
qDebug() << "You can find the image in " << directory.absolutePath();
}
}
}
|
yaseen2211/ChatGem
|
config/routes.rb
|
<reponame>yaseen2211/ChatGem<gh_stars>0
ChatEngine::Engine.routes.draw do
# mount ActionCable.server => '/cable'
authenticate :user do
get 'inbox',to: 'chats#inbox'
resources :chats, except:[:edit,:new,:update] do
resources :messages, only: [:create]
end
end
end
|
YoranSys/gravitee-access-management
|
gravitee-am-repository/gravitee-am-repository-jdbc/src/main/java/io/gravitee/am/repository/jdbc/management/api/JdbcApplicationRepository.java
|
/**
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gravitee.am.repository.jdbc.management.api;
import io.gravitee.am.common.utils.RandomString;
import io.gravitee.am.model.Application;
import io.gravitee.am.model.application.ApplicationOAuthSettings;
import io.gravitee.am.model.application.ApplicationScopeSettings;
import io.gravitee.am.model.application.ApplicationSettings;
import io.gravitee.am.model.common.Page;
import io.gravitee.am.repository.jdbc.management.AbstractJdbcRepository;
import io.gravitee.am.repository.jdbc.management.api.model.JdbcApplication;
import io.gravitee.am.repository.jdbc.management.api.spring.application.SpringApplicationFactorRepository;
import io.gravitee.am.repository.jdbc.management.api.spring.application.SpringApplicationIdentityRepository;
import io.gravitee.am.repository.jdbc.management.api.spring.application.SpringApplicationRepository;
import io.gravitee.am.repository.jdbc.management.api.spring.application.SpringApplicationScopeRepository;
import io.gravitee.am.repository.management.api.ApplicationRepository;
import io.reactivex.Completable;
import io.reactivex.Flowable;
import io.reactivex.Maybe;
import io.reactivex.Single;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort;
import org.springframework.data.relational.core.query.Query;
import org.springframework.data.relational.core.query.Update;
import org.springframework.data.relational.core.sql.SqlIdentifier;
import org.springframework.r2dbc.core.DatabaseClient;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.reactive.TransactionalOperator;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import java.time.LocalDateTime;
import java.util.*;
import static org.springframework.data.relational.core.query.Criteria.where;
import static org.springframework.data.relational.core.query.Query.query;
import static reactor.adapter.rxjava.RxJava2Adapter.*;
/**
* @author <NAME> (eric.leleu at graviteesource.com)
* @author GraviteeSource Team
*/
@Repository
public class JdbcApplicationRepository extends AbstractJdbcRepository implements ApplicationRepository, InitializingBean {
public static final int MAX_CONCURRENCY = 1;
public static final String COL_ID = "id";
public static final String COL_TYPE = "type";
public static final String COL_ENABLED = "enabled";
public static final String COL_TEMPLATE = "template";
public static final String COL_NAME = "name";
public static final String COL_DESCRIPTION = "description";
public static final String COL_DOMAIN = "domain";
public static final String COL_CERTIFICATE = "certificate";
public static final String COL_CREATED_AT = "created_at";
public static final String COL_UPDATED_AT = "updated_at";
public static final String COL_METADATA = "metadata";
public static final String COL_SETTINGS = "settings";
private static final List<String> columns = List.of(COL_ID,
COL_TYPE,
COL_ENABLED,
COL_TEMPLATE,
COL_NAME,
COL_DESCRIPTION,
COL_DOMAIN,
COL_CERTIFICATE,
COL_CREATED_AT,
COL_UPDATED_AT,
COL_METADATA,
COL_SETTINGS);
@Autowired
private SpringApplicationRepository applicationRepository;
@Autowired
private SpringApplicationFactorRepository factorRepository;
@Autowired
private SpringApplicationScopeRepository scopeRepository;
@Autowired
private SpringApplicationIdentityRepository identityRepository;
private String INSERT_STATEMENT;
private String UPDATE_STATEMENT;
protected Application toEntity(JdbcApplication entity) {
return mapper.map(entity, Application.class);
}
protected JdbcApplication toJdbcEntity(Application entity) {
return mapper.map(entity, JdbcApplication.class);
}
@Override
public void afterPropertiesSet() throws Exception {
this.INSERT_STATEMENT = createInsertStatement("applications", columns);
this.UPDATE_STATEMENT = createUpdateStatement("applications", columns, List.of(COL_ID));
}
private Single<Application> completeApplication(Application entity) {
return Single.just(entity).flatMap(app ->
identityRepository.findAllByApplicationId(app.getId()).map(JdbcApplication.Identity::getIdentity).toList().map(idps -> {
app.setIdentities(new HashSet<>(idps));
return app;
})
).flatMap(app ->
factorRepository.findAllByApplicationId(app.getId()).map(JdbcApplication.Factor::getFactor).toList().map(factors -> {
app.setFactors(new HashSet<>(factors));
return app;
})
).flatMap(app ->
scopeRepository.findAllByApplicationId(app.getId()).map(jdbcScopeSettings -> mapper.map(jdbcScopeSettings, ApplicationScopeSettings.class)).toList().map(scopeSettings -> {
if (app.getSettings() != null && app.getSettings().getOauth() != null) {
app.getSettings().getOauth().setScopeSettings(scopeSettings);
}
return app;
})
);// do not read grant tables, information already present into the settings object
}
@Override
public Flowable<Application> findAll() {
LOGGER.debug("findAll()");
return applicationRepository.findAll()
.map(this::toEntity)
.flatMap(app -> completeApplication(app).toFlowable());
}
@Override
public Single<Page<Application>> findAll(int page, int size) {
LOGGER.debug("findAll({}, {})", page, size);
return fluxToFlowable(template.select(JdbcApplication.class)
.matching(Query.empty().with(PageRequest.of(page, size, Sort.by(COL_ID))))
.all())
.map(this::toEntity)
.flatMap(app -> completeApplication(app).toFlowable(), MAX_CONCURRENCY)
.toList()
.flatMap(data -> applicationRepository.count().map(total -> new Page<Application>(data, page, total)))
.doOnError((error) -> LOGGER.error("Unable to retrieve all applications (page={}/size={})", page, size, error));
}
@Override
public Flowable<Application> findByDomain(String domain) {
LOGGER.debug("findByDomain({})",domain);
return applicationRepository.findByDomain(domain)
.map(this::toEntity)
.flatMap(app -> completeApplication(app).toFlowable());
}
@Override
public Single<Page<Application>> findByDomain(String domain, int page, int size) {
LOGGER.debug("findByDomain({}, {}, {})", domain, page, size);
return fluxToFlowable(template.select(JdbcApplication.class)
.matching(query(where(COL_DOMAIN).is(domain)).with(PageRequest.of(page, size, Sort.by(COL_ID))))
.all())
.map(this::toEntity)
.flatMap(app -> completeApplication(app).toFlowable(), MAX_CONCURRENCY)
.toList()
.flatMap(data -> applicationRepository.countByDomain(domain).map(total -> new Page<Application>(data, page, total)))
.doOnError((error) -> LOGGER.error("Unable to retrieve all applications with domain {} (page={}/size={})", domain, page, size, error));
}
@Override
public Single<Page<Application>> search(String domain, String query, int page, int size) {
LOGGER.debug("search({}, {}, {}, {})", domain, query, page, size);
boolean wildcardMatch = query.contains("*");
String wildcardQuery = query.replaceAll("\\*+", "%");
String search = databaseDialectHelper.buildSearchApplicationsQuery(wildcardMatch, page, size);
String count = databaseDialectHelper.buildCountApplicationsQuery(wildcardMatch);
return fluxToFlowable(template.getDatabaseClient().sql(search)
.bind(COL_DOMAIN, domain)
.bind("value", wildcardMatch ? wildcardQuery.toUpperCase() : query.toUpperCase())
.map(row -> rowMapper.read(JdbcApplication.class, row))
.all())
.map(this::toEntity)
.flatMap(app -> completeApplication(app).toFlowable())
.toList()
.flatMap(data -> monoToSingle(template.getDatabaseClient().sql(count)
.bind(COL_DOMAIN, domain)
.bind("value", wildcardMatch ? wildcardQuery.toUpperCase() : query.toUpperCase())
.map(row -> row.get(0, Long.class)).first())
.map(total -> new Page<Application>(data, page, total)))
.doOnError((error) -> LOGGER.error("Unable to retrieve all applications with domain {} (page={}/size={})", domain, page, size, error));
}
@Override
public Flowable<Application> findByCertificate(String certificate) {
LOGGER.debug("findByCertificate({})", certificate);
return applicationRepository.findByCertificate(certificate)
.map(this::toEntity)
.flatMap(app -> completeApplication(app).toFlowable());
}
@Override
public Flowable<Application> findByIdentityProvider(String identityProvider) {
LOGGER.debug("findByIdentityProvider({})", identityProvider);
// identity is a keyword with mssql
return fluxToFlowable(template.getDatabaseClient()
.sql("SELECT a.* FROM applications a INNER JOIN application_identities i ON a.id = i.application_id where i." + databaseDialectHelper.toSql(SqlIdentifier.quoted("identity")) + " = :identity")
.bind("identity", identityProvider)
.map(row -> rowMapper.read(JdbcApplication.class, row)).all())
.map(this::toEntity)
.flatMap(app -> completeApplication(app).toFlowable());
}
@Override
public Flowable<Application> findByFactor(String factor) {
LOGGER.debug("findByFactor({})", factor);
return applicationRepository.findAllByFactor(factor)
.map(this::toEntity)
.flatMap(app -> completeApplication(app).toFlowable());
}
@Override
public Flowable<Application> findByDomainAndExtensionGrant(String domain, String extensionGrant) {
LOGGER.debug("findByDomainAndExtensionGrant({}, {})", domain, extensionGrant);
return applicationRepository.findAllByDomainAndGrant(domain, extensionGrant)
.map(this::toEntity)
.flatMap(app -> completeApplication(app).toFlowable());
}
@Override
public Flowable<Application> findByIdIn(List<String> ids) {
LOGGER.debug("findByIdIn({})", ids);
if (ids == null || ids.isEmpty()) {
return Flowable.empty();
}
return applicationRepository.findByIdIn(ids)
.map(this::toEntity)
.flatMap(app -> completeApplication(app).toFlowable());
}
@Override
public Single<Long> count() {
return applicationRepository.count();
}
@Override
public Single<Long> countByDomain(String domain) {
return applicationRepository.countByDomain(domain);
}
@Override
public Maybe<Application> findByDomainAndClientId(String domain, String clientId) {
LOGGER.debug("findByDomainAndClientId({}, {})", domain, clientId);
return fluxToFlowable(template.getDatabaseClient().sql(databaseDialectHelper.buildFindApplicationByDomainAndClient())
.bind(COL_DOMAIN, domain)
.bind("clientId", clientId)
.map(row -> rowMapper.read(JdbcApplication.class, row))
.all())
.map(this::toEntity)
.flatMap(app -> completeApplication(app).toFlowable())
.firstElement();
}
@Override
public Maybe<Application> findById(String id) {
LOGGER.debug("findById({}", id);
return applicationRepository.findById(id)
.map(this::toEntity)
.flatMap(app -> completeApplication(app).toMaybe());
}
@Override
public Single<Application> create(Application item) {
item.setId(item.getId() == null ? RandomString.generate() : item.getId());
LOGGER.debug("Create Application with id {}", item.getId());
TransactionalOperator trx = TransactionalOperator.create(tm);
DatabaseClient.GenericExecuteSpec sql = template.getDatabaseClient().sql(INSERT_STATEMENT);
sql = addQuotedField(sql, COL_ID, item.getId(), String.class);
sql = addQuotedField(sql, COL_TYPE, item.getType() == null ? null : item.getType().name(), String.class);
sql = addQuotedField(sql, COL_ENABLED, item.isEnabled(), Boolean.class);
sql = addQuotedField(sql, COL_TEMPLATE, item.isTemplate(), Boolean.class);
sql = addQuotedField(sql, COL_NAME, item.getName(), String.class);
sql = addQuotedField(sql, COL_DESCRIPTION, item.getDescription(), String.class);
sql = addQuotedField(sql, COL_DOMAIN, item.getDomain(), String.class);
sql = addQuotedField(sql, COL_CERTIFICATE, item.getCertificate(), String.class);
sql = addQuotedField(sql, COL_CREATED_AT, dateConverter.convertTo(item.getCreatedAt(), null), LocalDateTime.class);
sql = addQuotedField(sql, COL_UPDATED_AT, dateConverter.convertTo(item.getUpdatedAt(), null), LocalDateTime.class);
sql = databaseDialectHelper.addJsonField(sql, COL_METADATA, item.getMetadata());
sql = databaseDialectHelper.addJsonField(sql, COL_SETTINGS, item.getSettings());
Mono<Integer> insertAction = sql.fetch().rowsUpdated();
insertAction = persistChildEntities(insertAction, item);
return monoToSingle(insertAction.as(trx::transactional))
.flatMap((i) -> this.findById(item.getId()).toSingle());
}
@Override
public Single<Application> update(Application item) {
LOGGER.debug("Update Application with id {}", item.getId());
TransactionalOperator trx = TransactionalOperator.create(tm);
DatabaseClient.GenericExecuteSpec sql = template.getDatabaseClient().sql(UPDATE_STATEMENT);
sql = addQuotedField(sql, COL_ID, item.getId(), String.class);
sql = addQuotedField(sql, COL_TYPE, item.getType() == null ? null : item.getType().name(), String.class);
sql = addQuotedField(sql, COL_ENABLED, item.isEnabled(), Boolean.class);
sql = addQuotedField(sql, COL_TEMPLATE, item.isTemplate(), Boolean.class);
sql = addQuotedField(sql, COL_NAME, item.getName(), String.class);
sql = addQuotedField(sql, COL_DESCRIPTION, item.getDescription(), String.class);
sql = addQuotedField(sql, COL_DOMAIN, item.getDomain(), String.class);
sql = addQuotedField(sql, COL_CERTIFICATE, item.getCertificate(), String.class);
sql = addQuotedField(sql, COL_CREATED_AT, dateConverter.convertTo(item.getCreatedAt(), null), LocalDateTime.class);
sql = addQuotedField(sql, COL_UPDATED_AT, dateConverter.convertTo(item.getUpdatedAt(), null), LocalDateTime.class);
sql = databaseDialectHelper.addJsonField(sql, COL_METADATA, item.getMetadata());
sql = databaseDialectHelper.addJsonField(sql, COL_SETTINGS, item.getSettings());
Mono<Integer> updateAction = sql.fetch().rowsUpdated();
updateAction = deleteChildEntities(item.getId()).then(updateAction);
updateAction = persistChildEntities(updateAction, item);
return monoToSingle(updateAction.as(trx::transactional))
.flatMap((i) -> this.findById(item.getId()).toSingle());
}
@Override
public Completable delete(String id) {
LOGGER.debug("delete({})", id);
TransactionalOperator trx = TransactionalOperator.create(tm);
Mono<Integer> delete = template.delete(JdbcApplication.class).matching(query(where(COL_ID).is(id))).all();
return monoToCompletable(delete.then(deleteChildEntities(id)).as(trx::transactional))
.andThen(applicationRepository.deleteById(id));
}
private Mono<Integer> deleteChildEntities(String appId) {
Mono<Integer> identities = template.delete(JdbcApplication.Identity.class).matching(query(where("application_id").is(appId))).all();
Mono<Integer> factors = template.delete(JdbcApplication.Factor.class).matching(query(where("application_id").is(appId))).all();
Mono<Integer> grants = template.delete(JdbcApplication.Grant.class).matching(query(where("application_id").is(appId))).all();
Mono<Integer> scopeSettings = template.delete(JdbcApplication.ScopeSettings.class).matching(query(where("application_id").is(appId))).all();
return factors.then(identities).then(grants).then(scopeSettings);
}
private Mono<Integer> persistChildEntities(Mono<Integer> actionFlow, Application app) {
final Set<String> identities = app.getIdentities();
if (identities != null && !identities.isEmpty()) {
actionFlow = actionFlow.then(Flux.fromIterable(identities).concatMap(idp -> {
final String identity = databaseDialectHelper.toSql(SqlIdentifier.quoted("identity"));
String INSERT_STMT = "INSERT INTO application_identities(application_id, " + identity + ") VALUES (:app, :idpid)";
final DatabaseClient.GenericExecuteSpec sql = template.getDatabaseClient()
.sql(INSERT_STMT)
.bind("app", app.getId())
.bind("idpid", idp);
return sql.fetch().rowsUpdated();
}).reduce(Integer::sum));
}
final Set<String> factors = app.getFactors();
if (factors != null && !factors.isEmpty()) {
actionFlow = actionFlow.then(Flux.fromIterable(factors).concatMap(value -> {
String INSERT_STMT = "INSERT INTO application_factors(application_id, factor) VALUES (:app, :factor)";
final DatabaseClient.GenericExecuteSpec sql = template.getDatabaseClient()
.sql(INSERT_STMT)
.bind("app", app.getId())
.bind("factor", value);
return sql.fetch().rowsUpdated();
}).reduce(Integer::sum));
}
final List<String> grants = Optional.ofNullable(app.getSettings()).map(ApplicationSettings::getOauth).map(ApplicationOAuthSettings::getGrantTypes).orElse(Collections.emptyList());
if (grants != null && !grants.isEmpty()) {
actionFlow = actionFlow.then(Flux.fromIterable(grants).concatMap(value -> {
String INSERT_STMT = "INSERT INTO application_grants(application_id, grant_type) VALUES (:app, :grant)";
final DatabaseClient.GenericExecuteSpec sql = template.getDatabaseClient()
.sql(INSERT_STMT)
.bind("app", app.getId())
.bind("grant", value);
return sql.fetch().rowsUpdated();
}).reduce(Integer::sum));
}
final List<ApplicationScopeSettings> scopeSettings = Optional.ofNullable(app.getSettings()).map(ApplicationSettings::getOauth).map(ApplicationOAuthSettings::getScopeSettings).orElse(Collections.emptyList());
if (scopeSettings != null && !scopeSettings.isEmpty()) {
actionFlow = actionFlow.then(Flux.fromIterable(scopeSettings).concatMap(value -> {
String INSERT_STMT = "INSERT INTO application_scope_settings(application_id, scope, is_default, scope_approval) VALUES (:app, :scope, :default, :approval)";
DatabaseClient.GenericExecuteSpec sql = template.getDatabaseClient()
.sql(INSERT_STMT)
.bind("app", app.getId())
.bind("default", value.isDefaultScope());
sql = value.getScope() == null ? sql.bindNull("scope", String.class) : sql.bind("scope", value.getScope());
sql = value.getScopeApproval() == null ? sql.bindNull("approval", Integer.class) : sql.bind("approval", value.getScopeApproval());
return sql.fetch().rowsUpdated();
}).reduce(Integer::sum));
}
return actionFlow;
}
}
|
metamatex/metamate
|
hackernews-svc/gen/v0/mql/GetPostsEndpoint_.go
|
// generated by metactl sdk gen
package mql
const (
GetPostsEndpointName = "GetPostsEndpoint"
)
type GetPostsEndpoint struct {
Filter *GetPostsRequestFilter `json:"filter,omitempty" yaml:"filter,omitempty"`
}
|
TeamSPoon/appdapter
|
maven/ext.bundle.swoop/src_swoop/org/mindswap/swoop/utils/owlapi/diff/NegativeDiff.java
|
<gh_stars>0
package org.mindswap.swoop.utils.owlapi.diff;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import org.semanticweb.owl.model.OWLAnnotationInstance;
import org.semanticweb.owl.model.OWLAnnotationProperty;
import org.semanticweb.owl.model.OWLClass;
import org.semanticweb.owl.model.OWLClassAxiom;
import org.semanticweb.owl.model.OWLClassAxiomVisitor;
import org.semanticweb.owl.model.OWLDataFactory;
import org.semanticweb.owl.model.OWLDataProperty;
import org.semanticweb.owl.model.OWLDataPropertyRangeAxiom;
import org.semanticweb.owl.model.OWLDataRange;
import org.semanticweb.owl.model.OWLDataType;
import org.semanticweb.owl.model.OWLDataValue;
import org.semanticweb.owl.model.OWLDescription;
import org.semanticweb.owl.model.OWLDifferentIndividualsAxiom;
import org.semanticweb.owl.model.OWLDisjointClassesAxiom;
import org.semanticweb.owl.model.OWLEntity;
import org.semanticweb.owl.model.OWLEntityVisitor;
import org.semanticweb.owl.model.OWLEnumeration;
import org.semanticweb.owl.model.OWLEquivalentClassesAxiom;
import org.semanticweb.owl.model.OWLEquivalentPropertiesAxiom;
import org.semanticweb.owl.model.OWLException;
import org.semanticweb.owl.model.OWLFunctionalPropertyAxiom;
import org.semanticweb.owl.model.OWLIndividual;
import org.semanticweb.owl.model.OWLIndividualAxiom;
import org.semanticweb.owl.model.OWLIndividualAxiomVisitor;
import org.semanticweb.owl.model.OWLInverseFunctionalPropertyAxiom;
import org.semanticweb.owl.model.OWLInversePropertyAxiom;
import org.semanticweb.owl.model.OWLObject;
import org.semanticweb.owl.model.OWLObjectProperty;
import org.semanticweb.owl.model.OWLObjectPropertyRangeAxiom;
import org.semanticweb.owl.model.OWLOntology;
import org.semanticweb.owl.model.OWLProperty;
import org.semanticweb.owl.model.OWLPropertyAxiom;
import org.semanticweb.owl.model.OWLPropertyAxiomVisitor;
import org.semanticweb.owl.model.OWLPropertyDomainAxiom;
import org.semanticweb.owl.model.OWLSameIndividualsAxiom;
import org.semanticweb.owl.model.OWLSubClassAxiom;
import org.semanticweb.owl.model.OWLSubPropertyAxiom;
import org.semanticweb.owl.model.OWLSymmetricPropertyAxiom;
import org.semanticweb.owl.model.OWLTransitivePropertyAxiom;
import org.semanticweb.owl.model.change.RemoveAnnotationInstance;
import org.semanticweb.owl.model.change.RemoveClassAxiom;
import org.semanticweb.owl.model.change.RemoveDataPropertyInstance;
import org.semanticweb.owl.model.change.RemoveDataPropertyRange;
import org.semanticweb.owl.model.change.RemoveDataType;
import org.semanticweb.owl.model.change.RemoveDomain;
import org.semanticweb.owl.model.change.RemoveEntity;
import org.semanticweb.owl.model.change.RemoveEnumeration;
import org.semanticweb.owl.model.change.RemoveIndividualAxiom;
import org.semanticweb.owl.model.change.RemoveIndividualClass;
import org.semanticweb.owl.model.change.RemoveObjectPropertyInstance;
import org.semanticweb.owl.model.change.RemoveObjectPropertyRange;
import org.semanticweb.owl.model.change.RemovePropertyAxiom;
import org.semanticweb.owl.model.change.RemoveSuperClass;
import org.semanticweb.owl.model.change.RemoveSuperProperty;
import org.semanticweb.owl.model.change.SetDeprecated;
import org.semanticweb.owl.model.change.SetFunctional;
import org.semanticweb.owl.model.change.SetInverseFunctional;
import org.semanticweb.owl.model.change.SetOneToOne;
import org.semanticweb.owl.model.change.SetSymmetric;
import org.semanticweb.owl.model.change.SetTransitive;
public class NegativeDiff implements OWLEntityVisitor, OWLClassAxiomVisitor, OWLPropertyAxiomVisitor, OWLIndividualAxiomVisitor {
/**
* Get the differences as a set of OWLChanges, ready to apply to the
* target.
* @param source The source ontology
* @param destination The destination ontology
* @param target The OWLOntology the changes should be applied to.
* @return
* @throws OWLException
*/
public static List getChanges(OWLOntology source, OWLOntology destination, OWLOntology target) throws OWLException {
NegativeDiff ndiff = new NegativeDiff(source, destination);
return ndiff.getChanges(target);
}
private List changes;
private OWLCopy copier;
private OWLOntology destination;
private OWLDataFactory factory;
private OWLOntology source;
private OWLOntology target;
public NegativeDiff(OWLOntology source, OWLOntology destination) {
this.source = source;
this.destination = destination;
changes = new Vector();
}
/**
* Remove any excess annotations an object might have.
* @param src
* @param dst
* @param tgt
* @throws OWLException
*/
protected void annotationRemover(OWLObject src, OWLObject dst, OWLObject tgt) throws OWLException {
Set srcAnnotations = src.getAnnotations(source);
Set dstAnnotations = dst.getAnnotations(destination);
for (Iterator iter = srcAnnotations.iterator(); iter.hasNext();) {
OWLAnnotationInstance annotation = (OWLAnnotationInstance) iter.next();
if (!dstAnnotations.contains(annotation)) {
OWLAnnotationProperty tgtProp = (OWLAnnotationProperty) copier.copy(annotation.getProperty());
changes.add(new RemoveAnnotationInstance(target, tgt, tgtProp, annotation.getContent(), null));
}
}
}
/**
* Adds changes between OWLProperties (not Data or Object property specific changes)
* @param src
* @param dst
* @param tgt
* @throws OWLException
*/
private void genericPropertyChanges(OWLProperty entity, OWLProperty dstEntity, OWLProperty tgtEntity) throws OWLException {
annotationRemover(entity, dstEntity, tgtEntity);
if (entity.isDeprecated(source) && !dstEntity.isDeprecated(destination)) {
changes.add(new SetDeprecated(target, tgtEntity, false, null));
}
if (entity.isFunctional(source) && !dstEntity.isFunctional(destination)) {
changes.add(new SetFunctional(target, tgtEntity, false, null));
}
for (Iterator iter = entity.getDomains(source).iterator(); iter.hasNext();) {
OWLDescription desc = (OWLDescription) iter.next();
if (!dstEntity.getDomains(destination).contains(desc)) {
OWLDescription tgtDesc = copier.copy(desc);
changes.add(new RemoveDomain(target, tgtEntity, tgtDesc, null));
}
}
for (Iterator iter = entity.getSuperProperties(source).iterator(); iter.hasNext();) {
OWLProperty prop = (OWLProperty) iter.next();
if (!dstEntity.getSuperProperties(destination).contains(prop)) {
OWLProperty tgtProp = (OWLProperty) copier.copy(prop);
changes.add(new RemoveSuperProperty(target, tgtEntity, tgtProp, null));
}
}
}
/**
* Get the differences as a set of OWLChanges, ready to apply to the
* target.
* @param target The OWLOntology the changes should be applied to.
* @return
* @throws OWLException
*/
public List getChanges(OWLOntology target) throws OWLException {
changes = new Vector();
this.target = target;
factory = target.getOWLDataFactory();
copier = new OWLCopy(source, target);
Set entities = new HashSet();
entities.addAll(source.getClasses());
entities.addAll(source.getIndividuals());
entities.addAll(source.getAnnotationProperties());
entities.addAll(source.getDataProperties());
entities.addAll(source.getObjectProperties());
for (Iterator iter = entities.iterator(); iter.hasNext();) {
((OWLEntity) iter.next()).accept(this);
}
for (Iterator iter = source.getClassAxioms().iterator(); iter.hasNext();) {
((OWLClassAxiom) iter.next()).accept(this);
}
for (Iterator iter = source.getPropertyAxioms().iterator(); iter.hasNext();) {
((OWLPropertyAxiom) iter.next()).accept(this);
}
for (Iterator iter = source.getIndividualAxioms().iterator(); iter.hasNext();) {
((OWLIndividualAxiom) iter.next()).accept(this);
}
// Data types don't have a specific visitor, don't want all of OWLObjectVisitor
for (Iterator iter = source.getDatatypes().iterator(); iter.hasNext();) {
OWLDataType srcDT = (OWLDataType) iter.next();
if (!destination.getDatatypes().contains(srcDT)) {
OWLDataType tgtDT = factory.getOWLConcreteDataType(srcDT.getURI());
changes.add(new RemoveDataType(target, tgtDT, null));
}
}
return changes;
}
public void visit(OWLAnnotationProperty entity) throws OWLException {
OWLAnnotationProperty tgtEntity = factory.getOWLAnnotationProperty(entity.getURI());
OWLAnnotationProperty dstEntity = destination.getAnnotationProperty(entity.getURI());
if (dstEntity == null) {
changes.add(new RemoveEntity(target, tgtEntity, null));
dstEntity = destination.getOWLDataFactory().getOWLAnnotationProperty(entity.getURI());
}
annotationRemover(entity, dstEntity, tgtEntity);
}
public void visit(OWLClass entity) throws OWLException {
OWLClass tgtEntity = factory.getOWLClass(entity.getURI());
OWLClass dstEntity = destination.getClass(entity.getURI());
if (dstEntity == null) {
changes.add(new RemoveEntity(target, tgtEntity, null));
dstEntity = destination.getOWLDataFactory().getOWLClass(entity.getURI());
}
annotationRemover(entity, dstEntity, tgtEntity);
for (Iterator iter = entity.getEnumerations(source).iterator(); iter.hasNext();) {
OWLEnumeration srcEnum = (OWLEnumeration) iter.next();
if (!dstEntity.getEnumerations(destination).contains(srcEnum)) {
changes.add(new RemoveEnumeration(target, tgtEntity, (OWLEnumeration) copier.copy(srcEnum), null));
}
}
for (Iterator iter = entity.getSuperClasses(source).iterator(); iter.hasNext();) {
OWLDescription desc = (OWLDescription) iter.next();
if (!dstEntity.getSuperClasses(destination).contains(desc)) {
changes.add(new RemoveSuperClass(target, tgtEntity, copier.copy(desc), null));
}
}
}
public void visit(OWLDataProperty entity) throws OWLException {
OWLDataProperty tgtEntity = factory.getOWLDataProperty(entity.getURI());
OWLDataProperty dstEntity = destination.getDataProperty(entity.getURI());
if (dstEntity == null) {
changes.add(new RemoveEntity(target, tgtEntity, null));
dstEntity = destination.getOWLDataFactory().getOWLDataProperty(entity.getURI());
}
genericPropertyChanges(entity, dstEntity, tgtEntity);
for (Iterator iter = entity.getRanges(source).iterator(); iter.hasNext();) {
OWLDataRange range = (OWLDataRange) iter.next();
if (!dstEntity.getRanges(destination).contains(range)) {
OWLDataRange tgtRange = copier.copyDataRange(range);
changes.add(new RemoveDataPropertyRange(target, tgtEntity, tgtRange, null));
}
}
}
public void visit(OWLDataPropertyRangeAxiom axiom) throws OWLException {
if (!destination.getPropertyAxioms().contains(axiom)) {
OWLDataProperty tgtProperty = (OWLDataProperty) copier.copy(axiom.getProperty());
OWLDataRange tgtRange = copier.copyDataRange(axiom.getRange());
OWLPropertyAxiom tgtAxiom = factory.getOWLDataPropertyRangeAxiom(tgtProperty, tgtRange);
changes.add(new RemovePropertyAxiom(target, tgtAxiom, null));
}
}
public void visit(OWLDifferentIndividualsAxiom axiom) throws OWLException {
if (!destination.getIndividualAxioms().contains(axiom)) {
Set different = new HashSet();
for (Iterator iter = axiom.getIndividuals().iterator(); iter.hasNext();) {
different.add(copier.copy((OWLIndividual) iter.next()));
}
OWLIndividualAxiom tgtAxiom = factory.getOWLDifferentIndividualsAxiom(different);
changes.add(new RemoveIndividualAxiom(target, tgtAxiom, null));
}
}
public void visit(OWLDisjointClassesAxiom axiom) throws OWLException {
if (!destination.getClassAxioms().contains(axiom)) {
Set disjoints = new HashSet();
for (Iterator iter = axiom.getDisjointClasses().iterator(); iter.hasNext();) {
disjoints.add(copier.copy((OWLDescription) iter.next()));
}
OWLClassAxiom tgtAxiom = factory.getOWLDisjointClassesAxiom(disjoints);
changes.add(new RemoveClassAxiom(target, tgtAxiom, null));
}
}
public void visit(OWLEquivalentClassesAxiom axiom) throws OWLException {
if (!destination.getClassAxioms().contains(axiom)) {
Set equivalents = new HashSet();
for (Iterator iter = axiom.getEquivalentClasses().iterator(); iter.hasNext();) {
equivalents.add(copier.copy((OWLDescription) iter.next()));
}
OWLClassAxiom tgtAxiom = factory.getOWLEquivalentClassesAxiom(equivalents);
changes.add(new RemoveClassAxiom(target, tgtAxiom, null));
}
}
public void visit(OWLEquivalentPropertiesAxiom axiom) throws OWLException {
if (!destination.getPropertyAxioms().contains(axiom)) {
Set equivalents = new HashSet();
for (Iterator iter = axiom.getProperties().iterator(); iter.hasNext();) {
equivalents.add(copier.copy((OWLProperty) iter.next()));
}
OWLPropertyAxiom tgtAxiom = factory.getOWLEquivalentPropertiesAxiom(equivalents);
changes.add(new RemovePropertyAxiom(target, tgtAxiom, null));
}
}
public void visit(OWLFunctionalPropertyAxiom axiom) throws OWLException {
if (!destination.getPropertyAxioms().contains(axiom)) {
OWLProperty tgtProp = (OWLProperty) copier.copy(axiom.getProperty());
OWLPropertyAxiom tgtAxiom = factory.getOWLFunctionalPropertyAxiom(tgtProp);
changes.add(new RemovePropertyAxiom(target, tgtAxiom, null));
}
}
public void visit(OWLIndividual entity) throws OWLException {
OWLIndividual tgtEntity;
OWLIndividual dstEntity;
if (entity.isAnonymous()) {
tgtEntity = factory.getAnonOWLIndividual(entity.getAnonId());
dstEntity = destination.getIndividual(entity.getAnonId());
} else {
tgtEntity = factory.getOWLIndividual(entity.getURI());
dstEntity = destination.getIndividual(entity.getURI());
}
if (dstEntity == null) {
changes.add(new RemoveEntity(target, tgtEntity, null));
if (entity.isAnonymous()) {
dstEntity = destination.getOWLDataFactory().getAnonOWLIndividual(entity.getAnonId());
} else {
dstEntity = destination.getOWLDataFactory().getOWLIndividual(entity.getURI());
}
}
annotationRemover(entity, dstEntity, tgtEntity);
Set srcTypes = entity.getTypes(source);
Set dstTypes = dstEntity.getTypes(destination);
for (Iterator typeIter = srcTypes.iterator(); typeIter.hasNext();) {
OWLDescription srcType = (OWLDescription) typeIter.next();
if (!dstTypes.contains(srcType)) {
OWLDescription tgtType = copier.copy(srcType);
changes.add(new RemoveIndividualClass(target, tgtEntity, tgtType, null));
}
}
Map srcProps = entity.getDataPropertyValues(source);
Map dstProps = dstEntity.getDataPropertyValues(destination);
for (Iterator propIter = srcProps.keySet().iterator(); propIter.hasNext();) {
OWLDataProperty prop = (OWLDataProperty) propIter.next();
OWLDataProperty dstProp = destination.getOWLDataFactory().getOWLDataProperty(prop.getURI());
OWLDataProperty tgtProp = factory.getOWLDataProperty(prop.getURI());
Set srcValues = (Set) srcProps.get(prop);
Set dstValues = (Set) dstProps.get(dstProp);
if (dstValues == null) {
dstValues = Collections.EMPTY_SET;
}
for (Iterator valueIter = srcValues.iterator(); valueIter.hasNext();) {
OWLDataValue dv = (OWLDataValue) valueIter.next();
if (!dstValues.contains(dv)) {
OWLDataValue tgtDV = factory.getOWLConcreteData(dv.getURI(), dv.getLang(), dv.getValue());
changes.add(new RemoveDataPropertyInstance(target, tgtEntity, tgtProp, tgtDV, null));
}
}
}
srcProps = entity.getObjectPropertyValues(source);
dstProps = dstEntity.getObjectPropertyValues(destination);
for (Iterator propIter = srcProps.keySet().iterator(); propIter.hasNext();) {
OWLObjectProperty prop = (OWLObjectProperty) propIter.next();
OWLObjectProperty dstProp = destination.getOWLDataFactory().getOWLObjectProperty(prop.getURI());
OWLObjectProperty tgtProp = factory.getOWLObjectProperty(prop.getURI());
Set srcValues = (Set) srcProps.get(prop);
Set dstValues = (Set) dstProps.get(dstProp);
if (dstValues == null) {
dstValues = Collections.EMPTY_SET;
}
for (Iterator valueIter = srcValues.iterator(); valueIter.hasNext();) {
OWLIndividual object = (OWLIndividual) valueIter.next();
OWLIndividual dstObject;
OWLIndividual tgtInd;
if (object.isAnonymous()) {
dstObject = destination.getOWLDataFactory().getAnonOWLIndividual(object.getAnonId());
tgtInd = factory.getAnonOWLIndividual(object.getAnonId());
} else {
dstObject = destination.getOWLDataFactory().getOWLIndividual(object.getURI());
tgtInd = factory.getOWLIndividual(object.getURI());
}
if (!dstValues.contains(dstObject)) {
changes.add(new RemoveObjectPropertyInstance(target, tgtEntity, tgtProp, tgtInd, null));
}
}
}
}
public void visit(OWLInverseFunctionalPropertyAxiom axiom) throws OWLException {
if (!destination.getPropertyAxioms().contains(axiom)) {
OWLObjectProperty tgtProp = (OWLObjectProperty) copier.copy(axiom.getProperty());
OWLPropertyAxiom tgtAxiom = factory.getOWLInverseFunctionalPropertyAxiom(tgtProp);
changes.add(new RemovePropertyAxiom(target, tgtAxiom, null));
}
}
public void visit(OWLInversePropertyAxiom axiom) throws OWLException {
if (!destination.getPropertyAxioms().contains(axiom)) {
OWLObjectProperty tgtProp = (OWLObjectProperty) copier.copy(axiom.getProperty());
OWLObjectProperty tgtInverse = (OWLObjectProperty) copier.copy(axiom.getInverseProperty());
OWLPropertyAxiom tgtAxiom = factory.getOWLInversePropertyAxiom(tgtProp, tgtInverse);
changes.add(new RemovePropertyAxiom(target, tgtAxiom, null));
}
}
public void visit(OWLObjectProperty entity) throws OWLException {
OWLObjectProperty tgtEntity = factory.getOWLObjectProperty(entity.getURI());
OWLObjectProperty dstEntity = destination.getObjectProperty(entity.getURI());
if (dstEntity == null) {
changes.add(new RemoveEntity(target, tgtEntity, null));
dstEntity = destination.getOWLDataFactory().getOWLObjectProperty(entity.getURI());
}
genericPropertyChanges(entity, dstEntity, tgtEntity);
if (entity.isInverseFunctional(source) && !dstEntity.isInverseFunctional(destination)) {
changes.add(new SetInverseFunctional(target, tgtEntity, false, null));
}
if (entity.isSymmetric(source) && !dstEntity.isSymmetric(destination)) {
changes.add(new SetSymmetric(target, tgtEntity, false, null));
}
if (entity.isTransitive(source) && !dstEntity.isTransitive(destination)) {
changes.add(new SetTransitive(target, tgtEntity, false, null));
}
if (entity.isOneToOne(source) && !dstEntity.isOneToOne(destination)) {
changes.add(new SetOneToOne(target, tgtEntity, false, null));
}
for (Iterator iter = entity.getRanges(source).iterator(); iter.hasNext();) {
OWLDescription desc = (OWLDescription) iter.next();
if (!dstEntity.getRanges(destination).contains(desc)) {
OWLDescription tgtDesc = copier.copy(desc);
changes.add(new RemoveObjectPropertyRange(target, tgtEntity, tgtDesc, null));
}
}
}
public void visit(OWLObjectPropertyRangeAxiom axiom) throws OWLException {
if (!destination.getPropertyAxioms().contains(axiom)) {
OWLObjectProperty tgtProperty = (OWLObjectProperty) copier.copy(axiom.getProperty());
OWLDescription tgtRange = copier.copy(axiom.getRange());
OWLPropertyAxiom tgtAxiom = factory.getOWLObjectPropertyRangeAxiom(tgtProperty, tgtRange);
changes.add(new RemovePropertyAxiom(target, tgtAxiom, null));
}
}
public void visit(OWLPropertyDomainAxiom axiom) throws OWLException {
if (!destination.getPropertyAxioms().contains(axiom)) {
OWLProperty tgtProperty = (OWLProperty) copier.copy(axiom.getProperty());
OWLDescription tgtDomain = copier.copy(axiom.getDomain());
OWLPropertyAxiom tgtAxiom = factory.getOWLPropertyDomainAxiom(tgtProperty, tgtDomain);
changes.add(new RemovePropertyAxiom(target, tgtAxiom, null));
}
}
public void visit(OWLSameIndividualsAxiom axiom) throws OWLException {
if (!destination.getIndividualAxioms().contains(axiom)) {
Set equivalents = new HashSet();
for (Iterator iter = axiom.getIndividuals().iterator(); iter.hasNext();) {
equivalents.add(copier.copy((OWLIndividual) iter.next()));
}
OWLIndividualAxiom tgtAxiom = factory.getOWLSameIndividualsAxiom(equivalents);
changes.add(new RemoveIndividualAxiom(target, tgtAxiom, null));
}
}
public void visit(OWLSubClassAxiom axiom) throws OWLException {
if (!destination.getClassAxioms().contains(axiom)) {
OWLDescription subClass = copier.copy(axiom.getSubClass());
OWLDescription superClass = copier.copy(axiom.getSuperClass());
OWLClassAxiom tgtAxiom = factory.getOWLSubClassAxiom(subClass, superClass);
changes.add(new RemoveClassAxiom(target, tgtAxiom, null));
}
}
public void visit(OWLSubPropertyAxiom axiom) throws OWLException {
if (!destination.getPropertyAxioms().contains(axiom)) {
OWLProperty subProp = (OWLProperty) copier.copy(axiom.getSubProperty());
OWLProperty superProp = (OWLProperty) copier.copy(axiom.getSuperProperty());
OWLPropertyAxiom tgtAxiom = factory.getOWLSubPropertyAxiom(subProp, superProp);
changes.add(new RemovePropertyAxiom(target, tgtAxiom, null));
}
}
public void visit(OWLSymmetricPropertyAxiom axiom) throws OWLException {
if (!destination.getPropertyAxioms().contains(axiom)) {
OWLObjectProperty tgtProp = (OWLObjectProperty) copier.copy(axiom.getProperty());
OWLPropertyAxiom tgtAxiom = factory.getOWLSymmetricPropertyAxiom(tgtProp);
changes.add(new RemovePropertyAxiom(target, tgtAxiom, null));
}
}
public void visit(OWLTransitivePropertyAxiom axiom) throws OWLException {
if (!destination.getPropertyAxioms().contains(axiom)) {
OWLObjectProperty tgtProp = (OWLObjectProperty) copier.copy(axiom.getProperty());
OWLPropertyAxiom tgtAxiom = factory.getOWLTransitivePropertyAxiom(tgtProp);
changes.add(new RemovePropertyAxiom(target, tgtAxiom, null));
}
}
}
|
darknight1050/BeatSaber-Quest-Codegen
|
include/Zenject/ConventionFilterTypesBinder.hpp
|
<gh_stars>0
// Autogenerated from CppHeaderCreator
// Created by Sc2ad
// =========================================================================
#pragma once
// Begin includes
#include "extern/beatsaber-hook/shared/utils/typedefs.h"
#include <initializer_list>
// Including type: Zenject.ConventionAssemblySelectionBinder
#include "Zenject/ConventionAssemblySelectionBinder.hpp"
#include "extern/beatsaber-hook/shared/utils/il2cpp-utils-methods.hpp"
#include "extern/beatsaber-hook/shared/utils/il2cpp-utils-properties.hpp"
#include "extern/beatsaber-hook/shared/utils/il2cpp-utils-fields.hpp"
#include "extern/beatsaber-hook/shared/utils/utils.h"
// Completed includes
// Begin forward declares
// Forward declaring namespace: Zenject
namespace Zenject {
// Forward declaring type: ConventionBindInfo
class ConventionBindInfo;
}
// Forward declaring namespace: System
namespace System {
// Forward declaring type: Type
class Type;
// Forward declaring type: Attribute
class Attribute;
// Forward declaring type: Func`2<T, TResult>
template<typename T, typename TResult>
class Func_2;
}
// Forward declaring namespace: System::Collections::Generic
namespace System::Collections::Generic {
// Forward declaring type: IEnumerable`1<T>
template<typename T>
class IEnumerable_1;
}
// Forward declaring namespace: System::Text::RegularExpressions
namespace System::Text::RegularExpressions {
// Forward declaring type: RegexOptions
struct RegexOptions;
// Forward declaring type: Regex
class Regex;
}
// Completed forward declares
// Type namespace: Zenject
namespace Zenject {
// Size: 0x18
#pragma pack(push, 1)
// Autogenerated type: Zenject.ConventionFilterTypesBinder
// [NoReflectionBakingAttribute] Offset: DDC6A0
class ConventionFilterTypesBinder : public Zenject::ConventionAssemblySelectionBinder {
public:
// Nested type: Zenject::ConventionFilterTypesBinder::$$c__DisplayClass2_0
class $$c__DisplayClass2_0;
// Nested type: Zenject::ConventionFilterTypesBinder::$$c__DisplayClass4_0
class $$c__DisplayClass4_0;
// Nested type: Zenject::ConventionFilterTypesBinder::$$c__DisplayClass6_0
class $$c__DisplayClass6_0;
// Nested type: Zenject::ConventionFilterTypesBinder::$$c__DisplayClass8_0
class $$c__DisplayClass8_0;
// Nested type: Zenject::ConventionFilterTypesBinder::$$c__DisplayClass9_0_1<T>
template<typename T>
class $$c__DisplayClass9_0_1;
// Nested type: Zenject::ConventionFilterTypesBinder::$$c__DisplayClass13_0
class $$c__DisplayClass13_0;
// Nested type: Zenject::ConventionFilterTypesBinder::$$c__DisplayClass13_1
class $$c__DisplayClass13_1;
// Nested type: Zenject::ConventionFilterTypesBinder::$$c__DisplayClass14_0
class $$c__DisplayClass14_0;
// Nested type: Zenject::ConventionFilterTypesBinder::$$c__DisplayClass15_0
class $$c__DisplayClass15_0;
// Nested type: Zenject::ConventionFilterTypesBinder::$$c__DisplayClass18_0
class $$c__DisplayClass18_0;
// Creating value type constructor for type: ConventionFilterTypesBinder
ConventionFilterTypesBinder() noexcept {}
// public Zenject.ConventionFilterTypesBinder DerivingFromOrEqual()
// Offset: 0xFFFFFFFF
template<class T>
Zenject::ConventionFilterTypesBinder* DerivingFromOrEqual() {
static auto ___internal__logger = ::Logger::get().WithContext("Zenject::ConventionFilterTypesBinder::DerivingFromOrEqual");
static auto* ___internal__method = THROW_UNLESS((::il2cpp_utils::FindMethod(this, "DerivingFromOrEqual", std::vector<Il2CppClass*>{::il2cpp_utils::il2cpp_type_check::il2cpp_no_arg_class<T>::get()}, ::std::vector<const Il2CppType*>{})));
static auto* ___generic__method = THROW_UNLESS(::il2cpp_utils::MakeGenericMethod(___internal__method, std::vector<Il2CppClass*>{::il2cpp_utils::il2cpp_type_check::il2cpp_no_arg_class<T>::get()}));
return ::il2cpp_utils::RunMethodThrow<Zenject::ConventionFilterTypesBinder*, false>(this, ___generic__method);
}
// public Zenject.ConventionFilterTypesBinder DerivingFromOrEqual(System.Type parentType)
// Offset: 0x13745C8
Zenject::ConventionFilterTypesBinder* DerivingFromOrEqual(System::Type* parentType);
// public Zenject.ConventionFilterTypesBinder DerivingFrom()
// Offset: 0xFFFFFFFF
template<class T>
Zenject::ConventionFilterTypesBinder* DerivingFrom() {
static auto ___internal__logger = ::Logger::get().WithContext("Zenject::ConventionFilterTypesBinder::DerivingFrom");
static auto* ___internal__method = THROW_UNLESS((::il2cpp_utils::FindMethod(this, "DerivingFrom", std::vector<Il2CppClass*>{::il2cpp_utils::il2cpp_type_check::il2cpp_no_arg_class<T>::get()}, ::std::vector<const Il2CppType*>{})));
static auto* ___generic__method = THROW_UNLESS(::il2cpp_utils::MakeGenericMethod(___internal__method, std::vector<Il2CppClass*>{::il2cpp_utils::il2cpp_type_check::il2cpp_no_arg_class<T>::get()}));
return ::il2cpp_utils::RunMethodThrow<Zenject::ConventionFilterTypesBinder*, false>(this, ___generic__method);
}
// public Zenject.ConventionFilterTypesBinder DerivingFrom(System.Type parentType)
// Offset: 0x1374690
Zenject::ConventionFilterTypesBinder* DerivingFrom(System::Type* parentType);
// public Zenject.ConventionFilterTypesBinder WithAttribute()
// Offset: 0xFFFFFFFF
template<class T>
Zenject::ConventionFilterTypesBinder* WithAttribute() {
static_assert(std::is_convertible_v<T, System::Attribute*>);
static auto ___internal__logger = ::Logger::get().WithContext("Zenject::ConventionFilterTypesBinder::WithAttribute");
static auto* ___internal__method = THROW_UNLESS((::il2cpp_utils::FindMethod(this, "WithAttribute", std::vector<Il2CppClass*>{::il2cpp_utils::il2cpp_type_check::il2cpp_no_arg_class<T>::get()}, ::std::vector<const Il2CppType*>{})));
static auto* ___generic__method = THROW_UNLESS(::il2cpp_utils::MakeGenericMethod(___internal__method, std::vector<Il2CppClass*>{::il2cpp_utils::il2cpp_type_check::il2cpp_no_arg_class<T>::get()}));
return ::il2cpp_utils::RunMethodThrow<Zenject::ConventionFilterTypesBinder*, false>(this, ___generic__method);
}
// public Zenject.ConventionFilterTypesBinder WithAttribute(System.Type attribute)
// Offset: 0x1374758
Zenject::ConventionFilterTypesBinder* WithAttribute(System::Type* attribute);
// public Zenject.ConventionFilterTypesBinder WithoutAttribute()
// Offset: 0xFFFFFFFF
template<class T>
Zenject::ConventionFilterTypesBinder* WithoutAttribute() {
static_assert(std::is_convertible_v<T, System::Attribute*>);
static auto ___internal__logger = ::Logger::get().WithContext("Zenject::ConventionFilterTypesBinder::WithoutAttribute");
static auto* ___internal__method = THROW_UNLESS((::il2cpp_utils::FindMethod(this, "WithoutAttribute", std::vector<Il2CppClass*>{::il2cpp_utils::il2cpp_type_check::il2cpp_no_arg_class<T>::get()}, ::std::vector<const Il2CppType*>{})));
static auto* ___generic__method = THROW_UNLESS(::il2cpp_utils::MakeGenericMethod(___internal__method, std::vector<Il2CppClass*>{::il2cpp_utils::il2cpp_type_check::il2cpp_no_arg_class<T>::get()}));
return ::il2cpp_utils::RunMethodThrow<Zenject::ConventionFilterTypesBinder*, false>(this, ___generic__method);
}
// public Zenject.ConventionFilterTypesBinder WithoutAttribute(System.Type attribute)
// Offset: 0x137485C
Zenject::ConventionFilterTypesBinder* WithoutAttribute(System::Type* attribute);
// public Zenject.ConventionFilterTypesBinder WithAttributeWhere(System.Func`2<T,System.Boolean> predicate)
// Offset: 0xFFFFFFFF
template<class T>
Zenject::ConventionFilterTypesBinder* WithAttributeWhere(System::Func_2<T, bool>* predicate) {
static_assert(std::is_convertible_v<T, System::Attribute*>);
static auto ___internal__logger = ::Logger::get().WithContext("Zenject::ConventionFilterTypesBinder::WithAttributeWhere");
static auto* ___internal__method = THROW_UNLESS((::il2cpp_utils::FindMethod(this, "WithAttributeWhere", std::vector<Il2CppClass*>{::il2cpp_utils::il2cpp_type_check::il2cpp_no_arg_class<T>::get()}, ::std::vector<const Il2CppType*>{::il2cpp_utils::ExtractType(predicate)})));
static auto* ___generic__method = THROW_UNLESS(::il2cpp_utils::MakeGenericMethod(___internal__method, std::vector<Il2CppClass*>{::il2cpp_utils::il2cpp_type_check::il2cpp_no_arg_class<T>::get()}));
return ::il2cpp_utils::RunMethodThrow<Zenject::ConventionFilterTypesBinder*, false>(this, ___generic__method, predicate);
}
// public Zenject.ConventionFilterTypesBinder Where(System.Func`2<System.Type,System.Boolean> predicate)
// Offset: 0x1374960
Zenject::ConventionFilterTypesBinder* Where(System::Func_2<System::Type*, bool>* predicate);
// public Zenject.ConventionFilterTypesBinder InNamespace(System.String ns)
// Offset: 0x1374990
Zenject::ConventionFilterTypesBinder* InNamespace(::Il2CppString* ns);
// public Zenject.ConventionFilterTypesBinder InNamespaces(params System.String[] namespaces)
// Offset: 0x1374A40
Zenject::ConventionFilterTypesBinder* InNamespaces(::Array<::Il2CppString*>* namespaces);
// Creating initializer_list -> params proxy for: Zenject.ConventionFilterTypesBinder InNamespaces(params System.String[] namespaces)
Zenject::ConventionFilterTypesBinder* InNamespaces(std::initializer_list<::Il2CppString*> namespaces);
// Creating TArgs -> initializer_list proxy for: Zenject.ConventionFilterTypesBinder InNamespaces(params System.String[] namespaces)
template<class ...TParams>
Zenject::ConventionFilterTypesBinder* InNamespaces(TParams&&... namespaces) {
return InNamespaces({namespaces...});
}
// public Zenject.ConventionFilterTypesBinder InNamespaces(System.Collections.Generic.IEnumerable`1<System.String> namespaces)
// Offset: 0x1374A44
Zenject::ConventionFilterTypesBinder* InNamespaces(System::Collections::Generic::IEnumerable_1<::Il2CppString*>* namespaces);
// public Zenject.ConventionFilterTypesBinder WithSuffix(System.String suffix)
// Offset: 0x1374B0C
Zenject::ConventionFilterTypesBinder* WithSuffix(::Il2CppString* suffix);
// public Zenject.ConventionFilterTypesBinder WithPrefix(System.String prefix)
// Offset: 0x1374BD4
Zenject::ConventionFilterTypesBinder* WithPrefix(::Il2CppString* prefix);
// public Zenject.ConventionFilterTypesBinder MatchingRegex(System.String pattern)
// Offset: 0x1374C9C
Zenject::ConventionFilterTypesBinder* MatchingRegex(::Il2CppString* pattern);
// public Zenject.ConventionFilterTypesBinder MatchingRegex(System.String pattern, System.Text.RegularExpressions.RegexOptions options)
// Offset: 0x1374CA4
Zenject::ConventionFilterTypesBinder* MatchingRegex(::Il2CppString* pattern, System::Text::RegularExpressions::RegexOptions options);
// public Zenject.ConventionFilterTypesBinder MatchingRegex(System.Text.RegularExpressions.Regex regex)
// Offset: 0x1374D20
Zenject::ConventionFilterTypesBinder* MatchingRegex(System::Text::RegularExpressions::Regex* regex);
// static private System.Boolean IsInNamespace(System.Type type, System.String requiredNs)
// Offset: 0x1374DE8
static bool IsInNamespace(System::Type* type, ::Il2CppString* requiredNs);
// public System.Void .ctor(Zenject.ConventionBindInfo bindInfo)
// Offset: 0x137459C
// Implemented from: Zenject.ConventionAssemblySelectionBinder
// Base method: System.Void ConventionAssemblySelectionBinder::.ctor(Zenject.ConventionBindInfo bindInfo)
template<::il2cpp_utils::CreationType creationType = ::il2cpp_utils::CreationType::Temporary>
static ConventionFilterTypesBinder* New_ctor(Zenject::ConventionBindInfo* bindInfo) {
static auto ___internal__logger = ::Logger::get().WithContext("Zenject::ConventionFilterTypesBinder::.ctor");
return THROW_UNLESS((::il2cpp_utils::New<ConventionFilterTypesBinder*, creationType>(bindInfo)));
}
}; // Zenject.ConventionFilterTypesBinder
#pragma pack(pop)
}
DEFINE_IL2CPP_ARG_TYPE(Zenject::ConventionFilterTypesBinder*, "Zenject", "ConventionFilterTypesBinder");
|
cmancone/mygrations
|
tests/formats/mysql/file_reader/test_insert_parser.py
|
import unittest
from mygrations.formats.mysql.file_reader.insert_parser import insert_parser
class test_insert_parser(unittest.TestCase):
def test_simple(self):
parser = insert_parser()
returned = parser.parse("INSERT INTO test_table (`col1`,`col2`) VALUES ('val','val2');")
# we should have matched
self.assertTrue(parser.matched)
# and we should have matched everything
self.assertEquals('', returned)
# we should have lots of data now
self.assertEquals('test_table', parser.table)
self.assertEquals(['col1', 'col2'], parser.columns)
self.assertEquals([['val', 'val2']], parser.raw_rows)
self.assertTrue(parser.has_semicolon)
self.assertEquals([], parser.errors)
def test_multiple_values(self):
parser = insert_parser()
returned = parser.parse("INSERT INTO test_table (`col1`,`col2`) VALUES ('val','val2'),('val3','val4')")
# we should have matched
self.assertTrue(parser.matched)
# and we should have matched everything
self.assertEquals('', returned)
# we should have lots of data now
self.assertEquals('test_table', parser.table)
self.assertEquals(['col1', 'col2'], parser.columns)
self.assertEquals([['val', 'val2'], ['val3', 'val4']], parser.raw_rows)
self.assertFalse(parser.has_semicolon)
self.assertEquals([], parser.errors)
def test_missing_comma(self):
parser = insert_parser()
returned = parser.parse("INSERT INTO test_table (`col1`,`col2`) VALUES ('val','val2')('val3','val4')")
# we should have matched
self.assertTrue(parser.matched)
# and we should have matched everything
self.assertEquals('', returned)
# we should have lots of data now
self.assertEquals('test_table', parser.table)
self.assertEquals(['col1', 'col2'], parser.columns)
self.assertEquals([['val', 'val2'], ['val3', 'val4']], parser.raw_rows)
self.assertFalse(parser.has_semicolon)
self.assertEquals(1, len(parser.warnings))
|
Sohieeb/competitive-programming
|
Kattis/Big Truck.cpp
|
<filename>Kattis/Big Truck.cpp
#include <bits/stdc++.h>
using namespace std;
using namespace __gnu_cxx;
typedef double db;
typedef long long ll;
typedef pair<db, db> pdd;
typedef pair<ll, ll> pll;
typedef pair<int, int> pii;
typedef unsigned long long ull;
#define F first
#define S second
#define pnl printf("\n")
#define sz(x) (int)x.size()
#define sf(x) scanf("%d",&x)
#define pf(x) printf("%d\n",x)
#define all(x) x.begin(),x.end()
#define rall(x) x.rbegin(),x.rend()
#define rep(i, n) for(int i = 0; i < n; ++i)
const db eps = 1e-9;
const db pi = acos(-1);
const int INF = 0x3f3f3f3f;
const ll LL_INF = 0x3f3f3f3f3f3f3f3f;
const int mod = 1000 * 1000 * 1000 + 7;
int n, m;
int a[105];
vector<pii> adj[105];
bool cmp(pair<int, pii> a, pair<int, pii> b) {
if (a.S.F != b.S.F)
return a.S.F > b.S.F;
return a.S.S < b.S.S;
}
pii dijkstra() {
priority_queue<pair<int, pii>, vector<pair<int, pii>>, decltype(&cmp)> pq(&cmp);
pii dist[105];
for (int i = 0; i <= n; ++i)
dist[i] = {0x3f3f3f3f, a[i]};
dist[1] = {0, a[1]};
pq.push({1, dist[1]});
while (!pq.empty()) {
pair<int, pii> curNode = pq.top();
pq.pop();
if (curNode.S.F > dist[curNode.F].F)
continue;
for (auto node : adj[curNode.F]) {
int v = node.F, d = node.S;
if (dist[v].F > curNode.S.F + d) {
dist[v] = {curNode.S.F + d, curNode.S.S + a[v]};
pq.push({v, dist[v]});
} else if (dist[v].F == curNode.S.F + d && dist[v].S < curNode.S.S + a[v]) {
dist[v] = {curNode.S.F + d, curNode.S.S + a[v]};
pq.push({v, dist[v]});
}
}
}
return dist[n];
}
int main() {
scanf("%d", &n);
for (int i = 1; i <= n; ++i)
scanf("%d", &a[i]);
scanf("%d", &m);
for (int i = 0; i < m; ++i) {
int a, b, d;
scanf("%d%d%d", &a, &b, &d);
adj[a].push_back({b, d});
adj[b].push_back({a, d});
}
pii ans = dijkstra();
if (ans.F > 100000)
cout << "impossible\n";
else
cout << ans.F << " " << ans.S << endl;
return 0;
}
|
open-gtd/server
|
tags/presentation/converters/modified.go
|
package converters
import (
"github.com/open-gtd/server/contract/tags"
"github.com/open-gtd/server/tags/domain"
)
func ConvertAllToModified(t []domain.Tag) ([]tags.ModifiedTag, error) {
result := make([]tags.ModifiedTag, len(t))
for i, tag := range t {
pTag, err := ConvertToModifiedTag(tag)
if err != nil {
return make([]tags.ModifiedTag, 0), err
}
result[i] = pTag
}
return result, nil
}
func ConvertToModifiedTag(t domain.Tag) (tags.ModifiedTag, error) {
typeDescriptor, err := ConvertTypeToPresentation(t.GetType())
if err != nil {
return tags.ModifiedTag{}, err
}
return tags.ModifiedTag{
OriginalName: string(t.GetOriginalName()),
Name: string(t.GetName()),
Type: typeDescriptor,
}, nil
}
|
freak4pc/reactivex.github.io
|
js/goog/testing/proto2/proto2_test.js
|
// Copyright 2012 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
goog.provide('goog.testing.proto2Test');
goog.setTestOnly('goog.testing.proto2Test');
goog.require('goog.testing.jsunit');
goog.require('goog.testing.proto2');
goog.require('proto2.TestAllTypes');
function testAssertEquals() {
var assertProto2Equals = goog.testing.proto2.assertEquals;
assertProto2Equals(new proto2.TestAllTypes, new proto2.TestAllTypes);
assertProto2Equals(new proto2.TestAllTypes, new proto2.TestAllTypes, 'oops');
var ex = assertThrows(goog.partial(assertProto2Equals,
new proto2.TestAllTypes, new proto2.TestAllTypes.NestedMessage));
assertEquals(
'Message type mismatch: TestAllTypes != TestAllTypes.NestedMessage',
ex.message);
var message = new proto2.TestAllTypes;
message.setOptionalInt32(1);
ex = assertThrows(goog.partial(assertProto2Equals,
new proto2.TestAllTypes, message));
assertEquals('optional_int32 should not be present', ex.message);
ex = assertThrows(goog.partial(assertProto2Equals,
new proto2.TestAllTypes, message, 'oops'));
assertEquals('oops\noptional_int32 should not be present', ex.message);
}
function testFindDifferences_EmptyMessages() {
assertEquals('', goog.testing.proto2.findDifferences_(
new proto2.TestAllTypes, new proto2.TestAllTypes, ''));
}
function testFindDifferences_FieldNotPresent() {
var message = new proto2.TestAllTypes;
message.setOptionalInt32(0);
var empty = new proto2.TestAllTypes;
assertEquals('optional_int32 should not be present',
goog.testing.proto2.findDifferences_(empty, message, ''));
assertEquals('optional_int32 should be present',
goog.testing.proto2.findDifferences_(message, empty, ''));
assertEquals('path/optional_int32 should be present',
goog.testing.proto2.findDifferences_(message, empty, 'path'));
}
function testFindDifferences_IntFieldDiffers() {
var message1 = new proto2.TestAllTypes;
message1.setOptionalInt32(1);
var message2 = new proto2.TestAllTypes;
message2.setOptionalInt32(2);
assertEquals('optional_int32 should be 1, but was 2',
goog.testing.proto2.findDifferences_(message1, message2, ''));
}
function testFindDifferences_NestedIntFieldDiffers() {
var message1 = new proto2.TestAllTypes;
var nested1 = new proto2.TestAllTypes.NestedMessage();
nested1.setB(1);
message1.setOptionalNestedMessage(nested1);
var message2 = new proto2.TestAllTypes;
var nested2 = new proto2.TestAllTypes.NestedMessage();
nested2.setB(2);
message2.setOptionalNestedMessage(nested2);
assertEquals('optional_nested_message/b should be 1, but was 2',
goog.testing.proto2.findDifferences_(message1, message2, ''));
}
function testFindDifferences_RepeatedFieldLengthDiffers() {
var message1 = new proto2.TestAllTypes;
message1.addRepeatedInt32(1);
var message2 = new proto2.TestAllTypes;
message2.addRepeatedInt32(1);
message2.addRepeatedInt32(2);
assertEquals('repeated_int32 should have 1 items, but has 2',
goog.testing.proto2.findDifferences_(message1, message2, ''));
}
function testFindDifferences_RepeatedFieldItemDiffers() {
var message1 = new proto2.TestAllTypes;
message1.addRepeatedInt32(1);
var message2 = new proto2.TestAllTypes;
message2.addRepeatedInt32(2);
assertEquals('repeated_int32[0] should be 1, but was 2',
goog.testing.proto2.findDifferences_(message1, message2, ''));
}
function testFindDifferences_RepeatedNestedMessageDiffers() {
var message1 = new proto2.TestAllTypes;
var nested1 = new proto2.TestAllTypes.NestedMessage();
nested1.setB(1);
message1.addRepeatedNestedMessage(nested1);
var message2 = new proto2.TestAllTypes;
var nested2 = new proto2.TestAllTypes.NestedMessage();
nested2.setB(2);
message2.addRepeatedNestedMessage(nested2);
assertEquals('repeated_nested_message[0]/b should be 1, but was 2',
goog.testing.proto2.findDifferences_(message1, message2, ''));
}
function testFromObject() {
var nested = new proto2.TestAllTypes.NestedMessage();
nested.setB(1);
var message = new proto2.TestAllTypes;
message.addRepeatedNestedMessage(nested);
message.setOptionalInt32(2);
// Successfully deserializes simple as well as message fields.
assertObjectEquals(
message,
goog.testing.proto2.fromObject(proto2.TestAllTypes, {
'optional_int32': 2,
'repeated_nested_message': [{'b': 1}]
}));
// Fails if the field name is not recognized.
assertThrows(function() {
goog.testing.proto2.fromObject(proto2.TestAllTypes, {'unknown': 1});
});
// Fails if the value type is wrong in the JSON object.
assertThrows(function() {
goog.testing.proto2.fromObject(proto2.TestAllTypes,
{'optional_int32': '1'});
});
}
|
laifagen4897/WeBASE-Node-Manager
|
src/test/java/node/mgr/test/alert/mail/AlertContentTest.java
|
<reponame>laifagen4897/WeBASE-Node-Manager
/**
* Copyright 2014-2021 the original author or authors.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package node.mgr.test.alert.mail;
import com.webank.webase.node.mgr.base.tools.AlertRuleTools;
import java.util.ArrayList;
import java.util.List;
import org.junit.Test;
public class AlertContentTest {
@Test
public void testAlertTitle() {
System.out.println(AlertRuleTools.getAlertTypeStrFromEnum(1));
}
/**
* AlertRuleTools.processMailContent:
* String param cast to List<String>
* replaceText(String) now support List<String>
*/
@Test
public void testAlertParamReplaceList() {
String alertContent = "{nodeId}节点异常,请到“节点管理”页面查看具体信息。{english}";
String params = "[\"{nodeId}\",\"{english}\"]";
List<String> replaceList = new ArrayList<>();
replaceList.add("0x111");
replaceList.add("hello ennnglish");
String res = AlertRuleTools.processMailContent(alertContent, params, replaceList);
System.out.println(alertContent);
System.out.println(res);
}
}
|
mucahitozel55/IEEE_MADC_2019
|
app/src/main/java/com/arapeak/adkya/model/getSecondMaterial/ResultGetSecondMaterial.java
|
<gh_stars>1-10
package com.arapeak.adkya.model.getSecondMaterial;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import java.util.List;
public class ResultGetSecondMaterial {
@SerializedName("status")
@Expose
private Boolean status;
@SerializedName("data")
@Expose
private List<SecondMaterialData> data = null;
public Boolean getStatus() {
return status;
}
public void setStatus(Boolean status) {
this.status = status;
}
public List<SecondMaterialData> getData() {
return data;
}
public void setData(List<SecondMaterialData> data) {
this.data = data;
}
}
|
CoyoteRulea/auriazules.com
|
web/themes/particle/source/default/_patterns/01-atoms/svg/__tests__/svg.test.js
|
<reponame>CoyoteRulea/auriazules.com<filename>web/themes/particle/source/default/_patterns/01-atoms/svg/__tests__/svg.test.js
import { name } from '../index';
test('svg component is registered', () => {
expect(name).toBe('svg');
});
|
LIZHICHAOUNICORN/Toolkits
|
leetcode/DFS/serialize_and_deserialize_binary_tree.cc
|
#include <stack>
#include <vector>
#include <limits.h>
#include "third_party/gflags/include/gflags.h"
#include "third_party/glog/include/logging.h"
// Problem:
// https://leetcode-cn.com/problems/serialize-and-deserialize-binary-tree/
using namespace std;
struct TreeNode {
int val;
TreeNode* left;
TreeNode* right;
TreeNode(int x) : val(x), left(NULL), right(NULL) {}
};
class Codec {
private:
void scale(vector<int>& nums, size_t sz) {
if (nums.size() < sz) {
while (nums.size() < sz) {
nums.emplace_back(INT_MAX);
}
}
}
void dfs(TreeNode* node, vector<int>& nums, int index) {
scale(nums, 2 * index + 3);
if (node == nullptr) {
nums[index] = INT_MAX;
return;
}
if (node->left) {
dfs(node->left, nums, index * 2 + 1);
} else {
// nums.resize(2*index+2);
// scale(nums, 2*index+2);
nums[index * 2 + 1] = INT_MAX;
}
if (node->right) {
dfs(node->right, nums, index * 2 + 2);
} else {
// scale(nums, 2*index+3);
// nums.resize(2*index+3);
nums[index * 2 + 2] = INT_MAX;
}
nums[index] = node->val;
}
TreeNode* buildTree(const vector<int>& nums) {
TreeNode* node = new TreeNode(nums[0]);
vector<TreeNode*> nodes(nums.size(), nullptr);
nodes[0] = node;
for (int i = 0; i < nums.size(); ++i) {
int left = 2 * i + 1;
int right = 2 * i + 2;
if (left < nums.size()) {
if (nums[left] != INT_MAX) {
nodes[i]->left = new TreeNode(nums[left]);
nodes[left] = nodes[i]->left;
} else {
if (nodes[i] != nullptr) {
nodes[i]->left = nullptr;
}
}
}
if (right < nums.size()) {
if (nums[right] != INT_MAX) {
nodes[i]->right = new TreeNode(nums[right]);
nodes[right] = nodes[i]->right;
} else {
if (nodes[i] != nullptr) {
nodes[i]->right = nullptr;
}
}
}
}
return node;
}
public:
// Encodes a tree to a single string.
string serialize(TreeNode* root) {
string ret;
if (root == nullptr) return ret;
vector<int> tree;
// post-order
dfs(root, tree, 0);
for (const int& num : tree) {
if (num != INT_MAX) {
ret += to_string(num);
ret += "#";
} else {
ret += "Nan";
ret += "#";
}
}
return ret;
}
// Decodes your encoded data to tree.
TreeNode* deserialize(string data) {
if (data.empty()) return nullptr;
vector<int> nums;
for (int i = 0; i < data.size(); ++i) {
int start = i;
while (data[i] != '#') {
++i;
}
const string& sub = data.substr(start, i - start);
if (sub != "Nan") {
int num = std::stoi(sub);
nums.emplace_back(num);
} else {
nums.emplace_back(INT_MAX);
}
}
return buildTree(nums);
}
};
// Your Codec object will be instantiated and called as such:
// Codec ser, deser;
// TreeNode* ans = deser.deserialize(ser.serialize(root));
int main(int argc, char* argv[]) {
google::InitGoogleLogging(argv[0]);
gflags::ParseCommandLineFlags(&argc, &argv, false);
Codec codec;
auto ptr = codec.serialize(nullptr);
LOG(INFO) << ptr;
return 0;
}
|
cogentcloud/ef-cms
|
web-client/src/presenter/sequences/openPdfPreviewModalSequence.js
|
import { getPDFForPreviewAction } from '../actions/getPDFForPreviewAction';
import { openPdfPreviewModalAction } from '../actions/openPdfPreviewModalAction';
export const openPdfPreviewModalSequence = [
getPDFForPreviewAction,
openPdfPreviewModalAction,
];
|
Andrei-Marcu/My-AoC2019-submissions
|
10/10.cpp
|
<filename>10/10.cpp
#include <iostream>
#include <fstream>
#include <cstring>
#include <cstdlib>
#include <utility>
#include <vector>
#include <algorithm>
#include <set>
#include <cmath>
#include <map>
using namespace std;
typedef pair<int, int> Angle;
ifstream fin("input.txt");
struct Coord {
int x, y, pos;
Angle angle;
Coord() {}
Coord(int x, int y) : x(x), y(y) {}
Coord& operator-(Coord& rhs) {
return *new Coord(x - rhs.x, y - rhs.y);
}
int distSqr() {
return x * x + y * y;
}
void gcd() {
if (!x)
{
if (!y)
return;
y /= abs(y);
return;
}
if (!y) {
x /= abs(x);
return;
}
int a = abs(x), b = abs(y), r = a % b;
while (r)
{
a = b;
b = r;
r = a % b;
}
x /= b;
y /= b;
}
Coord& operator/=(Coord& rhs) {
auto& rez = *this - rhs;
rez.gcd();
angle.first = rez.x;
angle.second = rez.y;
return *this;
}
double getAbsAngle() {
auto& x = angle.first;
auto& y = angle.second;
if (x == 0 && y == 0) {
pos = INT_MAX;
return 4.0;
}
if (x == 0 && y == -1)
return -4.0;
return atan2(-x, y);
}
bool operator<(Coord& rhs) {
double thisAngle = getAbsAngle();
double rhsAngle = rhs.getAbsAngle();
if (this->pos == rhs.pos) {
return thisAngle < rhsAngle;
}
return this->pos < rhs.pos;
}
}best1;
bool lessDist(Coord* c1, Coord* c2) {
return (*c1 - best1).distSqr() < (*c2 - best1).distSqr();
}
double getAbsAngl(int x, int y) {
if (x == 0 && y == 0) {
return 4.0;
}
if (x == 0 && y == -1)
return -4.0;
return atan2(-x, y);
}
int main() {
char s[200];
vector<Coord> asteroids;
for (size_t y = 0; fin >> s; y++)
{
for (size_t x = 0; s[x]; x++)
{
if (s[x] == '#') {
asteroids.emplace_back(x, y);
}
}
}
size_t mx = 0;
for (auto& asteroid : asteroids)
{
set<Angle> angles;
for (auto& comp : asteroids)
{
comp /= asteroid;
angles.insert(comp.angle);
}
if (mx < angles.size()) {
mx = angles.size();
best1 = asteroid;
}
}
cout /*<< best1.x << ' ' << best1.y << '\n'*/ << mx - 1;
map<Angle, vector<Coord*>> astMap;
for (auto& asteroid : asteroids)
{
asteroid /= best1;
astMap[asteroid.angle].push_back(&asteroid);
}
for (auto& elem : astMap)
{
auto& vec = elem.second;
sort(vec.begin(), vec.end(), lessDist);
for (size_t i = 0; i < vec.size(); i++)
{
vec[i]->pos = i;
}
}
sort(asteroids.begin(), asteroids.end());
cout << ' ' << (asteroids[199].x * 100 + asteroids[199].y);
}
|
lechium/iOS1351Headers
|
usr/libexec/backupd/MBSiriPlugin.h
|
<reponame>lechium/iOS1351Headers
//
// Generated by classdumpios 1.0.1 (64 bit) (iOS port by DreamDevLost)(Debug version compiled Sep 26 2020 13:48:20).
//
// Copyright (C) 1997-2019 <NAME>.
//
#import <objc/NSObject.h>
#import "MBPlugin-Protocol.h"
@interface MBSiriPlugin : NSObject <MBPlugin>
{
}
- (_Bool)_deviceIsNonAopDevice; // IMP=0x00000001001d4cd8
- (id)startingRestoreWithEngine:(id)arg1; // IMP=0x00000001001d4ac4
- (id)startingBackupWithEngine:(id)arg1; // IMP=0x00000001001d4608
- (id)_standardizePath:(id)arg1 fromDomain:(id)arg2; // IMP=0x00000001001d4404
@end
|
alpaka-group/mallocMC
|
alpaka/test/unit/kernel/src/KernelWithoutTemplatedAccParam.cpp
|
/* Copyright 2019 <NAME>, <NAME>, <NAME>
*
* This file is part of alpaka.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
#include <alpaka/kernel/Traits.hpp>
#include <alpaka/test/KernelExecutionFixture.hpp>
#include <catch2/catch.hpp>
//#############################################################################
//! It is not possible to use a alpaka kernel function object without a templated operator() when the CUDA accelerator
//! is hard-coded.
//!
//! However, compiling such kernels with a CPU device works fine.
//!
//! When the CUDA accelerator is used, the following error is triggered:
//! /alpaka/include/alpaka/workdiv/Traits.hpp(...): error: calling a __device__ function("getWorkDiv") from a __host__
//! __device__ function("getWorkDiv") is not allowed The kernel function objects function call operator is attributed
//! with ALPAKA_FN_ACC which is identical to __host__ __device__. The 'alpaka::getWorkDiv<...>(acc)' function that is
//! called has the ALPAKA_FN_HOST_ACC attribute (also equal to __host__ __device__). The underlying trait calls the
//! CUDA specialized method which has the __device__ attribute. Because this call chain does not contain any templates
//! and therefore no calls depending on input types, everything can be resolved at the first time the template is
//! parsed which results in the given error.
//!
//! Currently, the only possible way to solve this is to make the function call operator a template nonetheless by
//! providing an unused template parameter.
using Dim = alpaka::DimInt<2u>;
using Idx = std::uint32_t;
#if defined(ALPAKA_ACC_CPU_SERIAL_ENABLED)
using AccCpu = alpaka::AccCpuSerial<Dim, Idx>;
#endif
#if defined(ALPAKA_ACC_GPU_HIP_ENABLED) && BOOST_LANG_HIP
using AccGpu = alpaka::AccGpuHipRt<Dim, Idx>;
#elif defined(ALPAKA_ACC_GPU_CUDA_ENABLED) && BOOST_LANG_CUDA
using AccGpu = alpaka::AccGpuCudaRt<Dim, Idx>;
#endif
#if defined(ALPAKA_ACC_CPU_SERIAL_ENABLED)
//#############################################################################
struct KernelNoTemplateCpu
{
//-----------------------------------------------------------------------------
ALPAKA_FN_ACC
auto operator()(AccCpu const& acc, bool* success) const -> void
{
ALPAKA_CHECK(
*success,
static_cast<alpaka::Idx<AccCpu>>(1) == (alpaka::getWorkDiv<alpaka::Grid, alpaka::Threads>(acc)).prod());
}
};
//-----------------------------------------------------------------------------
TEST_CASE("kernelNoTemplateCpu", "[kernel]")
{
alpaka::test::KernelExecutionFixture<AccCpu> fixture(alpaka::Vec<Dim, Idx>::ones());
KernelNoTemplateCpu kernel;
REQUIRE(fixture(kernel));
}
#endif
/*#if defined(ALPAKA_ACC_GPU_CUDA_ENABLED) && BOOST_LANG_CUDA
//#############################################################################
//! DO NOT ENABLE! COMPILATION WILL FAIL!
struct KernelNoTemplateGpu
{
//-----------------------------------------------------------------------------
ALPAKA_FN_ACC
auto operator()(
AccGpu const & acc,
bool* success) const
-> void
{
ALPAKA_CHECK(
*success,
static_cast<alpaka::Idx<AccGpu>>(1) == (alpaka::getWorkDiv<alpaka::Grid, alpaka::Threads>(acc)).prod());
}
};
//-----------------------------------------------------------------------------
TEST_CASE("kernelNoTemplateGpu", "[kernel]")
{
alpaka::test::KernelExecutionFixture<AccGpu> fixture(
alpaka::Vec<Dim, Idx>::ones());
KernelNoTemplateGpu kernel;
REQUIRE(fixture(kernel));
}
#endif*/
#if defined(ALPAKA_ACC_CPU_SERIAL_ENABLED)
//#############################################################################
struct KernelWithoutTemplateParamCpu
{
//-----------------------------------------------------------------------------
template<typename TNotUsed = void>
ALPAKA_FN_ACC auto operator()(AccCpu const& acc, bool* success) const -> void
{
ALPAKA_CHECK(
*success,
static_cast<alpaka::Idx<AccCpu>>(1) == (alpaka::getWorkDiv<alpaka::Grid, alpaka::Threads>(acc)).prod());
}
};
//-----------------------------------------------------------------------------
TEST_CASE("kernelWithoutTemplateParamCpu", "[kernel]")
{
alpaka::test::KernelExecutionFixture<AccCpu> fixture(alpaka::Vec<Dim, Idx>::ones());
KernelWithoutTemplateParamCpu kernel;
REQUIRE(fixture(kernel));
}
#endif
#if(defined(ALPAKA_ACC_GPU_CUDA_ENABLED) && BOOST_LANG_CUDA) || (defined(ALPAKA_ACC_GPU_HIP_ENABLED) && BOOST_LANG_HIP)
//#############################################################################
struct KernelWithoutTemplateParamGpu
{
//-----------------------------------------------------------------------------
template<typename TNotUsed = void>
ALPAKA_FN_ACC auto operator()(AccGpu const& acc, bool* success) const -> void
{
ALPAKA_CHECK(
*success,
static_cast<alpaka::Idx<AccGpu>>(1) == (alpaka::getWorkDiv<alpaka::Grid, alpaka::Threads>(acc)).prod());
}
};
//-----------------------------------------------------------------------------
TEST_CASE("kernelWithoutTemplateParamGpu", "[kernel]")
{
alpaka::test::KernelExecutionFixture<AccGpu> fixture(alpaka::Vec<Dim, Idx>::ones());
KernelWithoutTemplateParamGpu kernel;
REQUIRE(fixture(kernel));
}
#endif
|
denis-yuen/dockstore-cli
|
dockstore-cli-integration-testing/src/test/java/io/dockstore/client/cli/WorkflowIT.java
|
<filename>dockstore-cli-integration-testing/src/test/java/io/dockstore/client/cli/WorkflowIT.java
/*
* Copyright 2018 OICR
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.dockstore.client.cli;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import javax.ws.rs.core.GenericType;
import com.google.common.collect.Lists;
import io.dockstore.client.cli.nested.WorkflowClient;
import io.dockstore.common.CommonTestUtilities;
import io.dockstore.common.ConfidentialTest;
import io.dockstore.common.SourceControl;
import io.dockstore.common.WorkflowTest;
import io.dropwizard.testing.ResourceHelpers;
import io.swagger.client.ApiClient;
import io.swagger.client.api.HostedApi;
import io.swagger.client.api.WorkflowsApi;
import io.swagger.client.model.SourceFile;
import io.swagger.client.model.Workflow;
import io.swagger.client.model.WorkflowVersion;
import org.apache.commons.io.FileUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.contrib.java.lang.system.ExpectedSystemExit;
import org.junit.contrib.java.lang.system.SystemErrRule;
import org.junit.contrib.java.lang.system.SystemOutRule;
import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
import static io.swagger.client.model.ToolDescriptor.TypeEnum.CWL;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
/**
* Extra confidential integration tests, focus on testing workflow interactions
* {@link io.dockstore.client.cli.BaseIT}
*
* @author dyuen
*/
@Category({ ConfidentialTest.class, WorkflowTest.class })
public class WorkflowIT extends BaseIT {
@Rule
public final SystemOutRule systemOutRule = new SystemOutRule().enableLog().muteForSuccessfulTests();
@Rule
public final SystemErrRule systemErrRule = new SystemErrRule().enableLog().muteForSuccessfulTests();
@Rule
public final ExpectedSystemExit systemExit = ExpectedSystemExit.none();
@Rule
public final ExpectedException thrown = ExpectedException.none();
private final String clientConfig = ResourceHelpers.resourceFilePath("clientConfig");
private final String jsonFilePath = ResourceHelpers.resourceFilePath("wc-job.json");
@Before
@Override
public void resetDBBetweenTests() throws Exception {
CommonTestUtilities.cleanStatePrivate2(SUPPORT, false);
}
@Test
public void testWorkflowLaunchOrNotLaunchBasedOnCredentials() throws IOException {
String toolpath = SourceControl.GITHUB.toString() + "/DockstoreTestUser2/md5sum-checker/test";
testingPostgres.runUpdateStatement("update enduser set isadmin = 't' where username = 'DockstoreTestUser2';");
final ApiClient webClient = getWebClient(USER_2_USERNAME, testingPostgres);
WorkflowsApi workflowApi = new WorkflowsApi(webClient);
Workflow workflow = workflowApi.manualRegister(SourceControl.GITHUB.getFriendlyName(), "DockstoreTestUser2/md5sum-checker",
"/checker-workflow-wrapping-workflow.cwl", "test", "cwl", null);
assertEquals("There should be one user of the workflow after manually registering it.", 1, workflow.getUsers().size());
Workflow refresh = workflowApi.refresh(workflow.getId(), true);
assertFalse(refresh.isIsPublished());
// should be able to launch properly with correct credentials even though the workflow is not published
FileUtils.writeStringToFile(new File("md5sum.input"), "foo", StandardCharsets.UTF_8);
Client.main(
new String[] { "--config", ResourceHelpers.resourceFilePath("config_file2.txt"), "workflow", "launch", "--entry", toolpath,
"--json", ResourceHelpers.resourceFilePath("md5sum_cwl.json"), "--script" });
// should not be able to launch properly with incorrect credentials
systemExit.expectSystemExitWithStatus(Client.ENTRY_NOT_FOUND);
Client.main(
new String[] { "--config", ResourceHelpers.resourceFilePath("config_file.txt"), "workflow", "launch", "--entry", toolpath,
"--json", ResourceHelpers.resourceFilePath("md5sum_cwl.json"), "--script" });
}
/**
* This tests that you are able to download zip files for versions of a workflow
*/
@Test
public void downloadZipFile() throws IOException {
String toolpath = SourceControl.GITHUB.toString() + "/DockstoreTestUser2/md5sum-checker/test";
final ApiClient webClient = getWebClient(USER_2_USERNAME, testingPostgres);
WorkflowsApi workflowApi = new WorkflowsApi(webClient);
// Register and refresh workflow
Workflow workflow = workflowApi
.manualRegister(SourceControl.GITHUB.getFriendlyName(), "DockstoreTestUser2/md5sum-checker", "/md5sum/md5sum-workflow.cwl",
"test", "cwl", null);
Workflow refresh = workflowApi.refresh(workflow.getId(), true);
Long workflowId = refresh.getId();
WorkflowVersion workflowVersion = refresh.getWorkflowVersions().get(0);
Long versionId = workflowVersion.getId();
// Download unpublished workflow version
workflowApi.getWorkflowZip(workflowId, versionId);
byte[] arbitraryURL = SwaggerUtility.getArbitraryURL("/workflows/" + workflowId + "/zip/" + versionId, new GenericType<byte[]>() {
}, webClient);
File tempZip = File.createTempFile("temp", "zip");
Path write = Files.write(tempZip.toPath(), arbitraryURL);
ZipFile zipFile = new ZipFile(write.toFile());
assertTrue("zip file seems incorrect",
zipFile.stream().map(ZipEntry::getName).collect(Collectors.toList()).contains("md5sum/md5sum-workflow.cwl"));
// should not be able to get zip anonymously before publication
boolean thrownException = false;
try {
SwaggerUtility.getArbitraryURL("/workflows/" + workflowId + "/zip/" + versionId, new GenericType<byte[]>() {
}, CommonTestUtilities.getWebClient(false, null, testingPostgres));
} catch (Exception e) {
thrownException = true;
}
assertTrue(thrownException);
tempZip.deleteOnExit();
// Download published workflow version
Client.main(
new String[] { "--config", ResourceHelpers.resourceFilePath("config_file2.txt"), "workflow", "publish", "--entry", toolpath,
"--script" });
arbitraryURL = SwaggerUtility.getArbitraryURL("/workflows/" + workflowId + "/zip/" + versionId, new GenericType<byte[]>() {
}, CommonTestUtilities.getWebClient(false, null, testingPostgres));
File tempZip2 = File.createTempFile("temp", "zip");
write = Files.write(tempZip2.toPath(), arbitraryURL);
zipFile = new ZipFile(write.toFile());
assertTrue("zip file seems incorrect",
zipFile.stream().map(ZipEntry::getName).collect(Collectors.toList()).contains("md5sum/md5sum-workflow.cwl"));
tempZip2.deleteOnExit();
// download and unzip via CLI
Client.main(new String[] { "--config", ResourceHelpers.resourceFilePath("config_file2.txt"), "workflow", "download", "--entry",
toolpath + ":" + workflowVersion.getName(), "--script" });
zipFile.stream().forEach((ZipEntry entry) -> {
if (!(entry).isDirectory()) {
File innerFile = new File(System.getProperty("user.dir"), entry.getName());
assert (innerFile.exists());
assert (innerFile.delete());
}
});
// download zip via CLI
Client.main(new String[] { "--config", ResourceHelpers.resourceFilePath("config_file2.txt"), "workflow", "download", "--entry",
toolpath + ":" + workflowVersion.getName(), "--zip", "--script" });
File downloadedZip = new File(new WorkflowClient(null, null, null, false).zipFilename(workflow));
assert (downloadedZip.exists());
assert (downloadedZip.delete());
}
@Test
public void testCheckerWorkflowDownloadBasedOnCredentials() throws IOException {
String toolpath = SourceControl.GITHUB.toString() + "/DockstoreTestUser2/md5sum-checker/test";
testingPostgres.runUpdateStatement("update enduser set isadmin = 't' where username = 'DockstoreTestUser2';");
final ApiClient webClient = getWebClient(USER_2_USERNAME, testingPostgres);
WorkflowsApi workflowApi = new WorkflowsApi(webClient);
Workflow workflow = workflowApi
.manualRegister(SourceControl.GITHUB.getFriendlyName(), "DockstoreTestUser2/md5sum-checker", "/md5sum/md5sum-workflow.cwl",
"test", "cwl", null);
Workflow refresh = workflowApi.refresh(workflow.getId(), true);
assertFalse(refresh.isIsPublished());
workflowApi.registerCheckerWorkflow("checker-workflow-wrapping-workflow.cwl", workflow.getId(), "cwl", "checker-input-cwl.json");
workflowApi.refresh(workflow.getId(), true);
final String fileWithIncorrectCredentials = ResourceHelpers.resourceFilePath("config_file.txt");
final String fileWithCorrectCredentials = ResourceHelpers.resourceFilePath("config_file2.txt");
// should be able to download properly with correct credentials even though the workflow is not published
FileUtils.writeStringToFile(new File("md5sum.input"), "foo", StandardCharsets.UTF_8);
Client.main(
new String[] { "--config", fileWithCorrectCredentials, "checker", "download", "--entry", toolpath, "--version", "master",
"--script" });
// Publish the workflow
Client.main(new String[] { "--config", fileWithCorrectCredentials, "workflow", "publish", "--entry", toolpath, "--script" });
// should be able to download properly with incorrect credentials because the entry is published
Client.main(
new String[] { "--config", fileWithIncorrectCredentials, "checker", "download", "--entry", toolpath, "--version", "master",
"--script" });
// Unpublish the workflow
Client.main(
new String[] { "--config", fileWithCorrectCredentials, "workflow", "publish", "--entry", toolpath, "--unpub", "--script" });
// should not be able to download properly with incorrect credentials because the entry is not published
systemExit.expectSystemExitWithStatus(Client.ENTRY_NOT_FOUND);
Client.main(
new String[] { "--config", fileWithIncorrectCredentials, "checker", "download", "--entry", toolpath, "--version", "master",
"--script" });
}
@Test
public void testCheckerWorkflowLaunchBasedOnCredentials() throws IOException {
String toolpath = SourceControl.GITHUB.toString() + "/DockstoreTestUser2/md5sum-checker/test";
testingPostgres.runUpdateStatement("update enduser set isadmin = 't' where username = 'DockstoreTestUser2';");
final ApiClient webClient = getWebClient(USER_2_USERNAME, testingPostgres);
WorkflowsApi workflowApi = new WorkflowsApi(webClient);
Workflow workflow = workflowApi
.manualRegister(SourceControl.GITHUB.getFriendlyName(), "DockstoreTestUser2/md5sum-checker", "/md5sum/md5sum-workflow.cwl",
"test", "cwl", null);
Workflow refresh = workflowApi.refresh(workflow.getId(), true);
Assert.assertFalse(refresh.isIsPublished());
workflowApi.registerCheckerWorkflow("/checker-workflow-wrapping-workflow.cwl", workflow.getId(), "cwl", "checker-input-cwl.json");
workflowApi.refresh(workflow.getId(), true);
// should be able to launch properly with correct credentials even though the workflow is not published
FileUtils.writeStringToFile(new File("md5sum.input"), "foo", StandardCharsets.UTF_8);
Client.main(
new String[] { "--config", ResourceHelpers.resourceFilePath("config_file2.txt"), "checker", "launch", "--entry", toolpath,
"--json", ResourceHelpers.resourceFilePath("md5sum_cwl.json"), "--script" });
// should be able to launch properly with incorrect credentials but the entry is published
Client.main(
new String[] { "--config", ResourceHelpers.resourceFilePath("config_file2.txt"), "workflow", "publish", "--entry", toolpath,
"--script" });
Client.main(
new String[] { "--config", ResourceHelpers.resourceFilePath("config_file.txt"), "checker", "launch", "--entry", toolpath,
"--json", ResourceHelpers.resourceFilePath("md5sum_cwl.json"), "--script" });
// should not be able to launch properly with incorrect credentials
Client.main(
new String[] { "--config", ResourceHelpers.resourceFilePath("config_file2.txt"), "workflow", "publish", "--entry", toolpath,
"--unpub", "--script" });
systemExit.expectSystemExitWithStatus(Client.ENTRY_NOT_FOUND);
Client.main(
new String[] { "--config", ResourceHelpers.resourceFilePath("config_file.txt"), "checker", "launch", "--entry", toolpath,
"--json", ResourceHelpers.resourceFilePath("md5sum_cwl.json"), "--script" });
}
@Test
public void testHostedWorkflowMetadataAndLaunch() throws IOException {
final ApiClient webClient = getWebClient(USER_2_USERNAME, testingPostgres);
HostedApi hostedApi = new HostedApi(webClient);
Workflow hostedWorkflow = hostedApi.createHostedWorkflow("name", null, CWL.toString(), null, null);
assertNotNull(hostedWorkflow.getLastModifiedDate());
assertNotNull(hostedWorkflow.getLastUpdated());
// make a couple garbage edits
SourceFile source = new SourceFile();
source.setPath("/Dockstore.cwl");
source.setAbsolutePath("/Dockstore.cwl");
source.setContent("cwlVersion: v1.0\nclass: Workflow");
source.setType(SourceFile.TypeEnum.DOCKSTORE_CWL);
SourceFile source1 = new SourceFile();
source1.setPath("sorttool.cwl");
source1.setContent("foo");
source1.setAbsolutePath("/sorttool.cwl");
source1.setType(SourceFile.TypeEnum.DOCKSTORE_CWL);
SourceFile source2 = new SourceFile();
source2.setPath("revtool.cwl");
source2.setContent("foo");
source2.setAbsolutePath("/revtool.cwl");
source2.setType(SourceFile.TypeEnum.DOCKSTORE_CWL);
hostedApi.editHostedWorkflow(hostedWorkflow.getId(), Lists.newArrayList(source, source1, source2));
source.setContent("cwlVersion: v1.0\nclass: Workflow");
source1.setContent("food");
source2.setContent("food");
final Workflow updatedHostedWorkflow = hostedApi
.editHostedWorkflow(hostedWorkflow.getId(), Lists.newArrayList(source, source1, source2));
assertNotNull(updatedHostedWorkflow.getLastModifiedDate());
assertNotNull(updatedHostedWorkflow.getLastUpdated());
// note that this workflow contains metadata defined on the inputs to the workflow in the old (pre-map) CWL way that is still valid v1.0 CWL
source.setContent(FileUtils
.readFileToString(new File(ResourceHelpers.resourceFilePath("hosted_metadata/Dockstore.cwl")), StandardCharsets.UTF_8));
source1.setContent(
FileUtils.readFileToString(new File(ResourceHelpers.resourceFilePath("hosted_metadata/sorttool.cwl")), StandardCharsets.UTF_8));
source2.setContent(
FileUtils.readFileToString(new File(ResourceHelpers.resourceFilePath("hosted_metadata/revtool.cwl")), StandardCharsets.UTF_8));
Workflow workflow = hostedApi.editHostedWorkflow(hostedWorkflow.getId(), Lists.newArrayList(source, source1, source2));
assertFalse(workflow.getInputFileFormats().isEmpty());
assertFalse(workflow.getOutputFileFormats().isEmpty());
// launch the workflow, note that the latest version of the workflow should launch (i.e. the working one)
Client.main(new String[] { "--config", ResourceHelpers.resourceFilePath("config_file2.txt"), "workflow", "launch", "--entry",
workflow.getFullWorkflowPath(), "--json", ResourceHelpers.resourceFilePath("revsort-job.json"), "--script" });
WorkflowsApi workflowsApi = new WorkflowsApi(webClient);
workflowsApi.publish(workflow.getId(), SwaggerUtility.createPublishRequest(true));
// should also launch successfully with the wrong credentials when published
Client.main(new String[] { "--config", ResourceHelpers.resourceFilePath("config_file.txt"), "workflow", "launch", "--entry",
workflow.getFullWorkflowPath(), "--json", ResourceHelpers.resourceFilePath("revsort-job.json"), "--script" });
}
/**
* Test for cwl1.1
* Of the languages support features, this tests:
* Workflow Registration
* Metadata Display
* Validation
* Launch remote workflow
*/
@Test
public void cwlVersion11() {
final ApiClient userApiClient = CommonTestUtilities.getWebClient(true, USER_2_USERNAME, testingPostgres);
WorkflowsApi userWorkflowsApi = new WorkflowsApi(userApiClient);
userWorkflowsApi.manualRegister("github", "dockstore-testing/Workflows-For-CI", "/cwl/v1.1/metadata.cwl", "metadata", "cwl",
"/cwl/v1.1/cat-job.json");
final Workflow workflowByPathGithub = userWorkflowsApi
.getWorkflowByPath("github.com/dockstore-testing/Workflows-For-CI/metadata", null, WorkflowClient.BIOWORKFLOW);
final Workflow workflow = userWorkflowsApi.refresh(workflowByPathGithub.getId(), true);
Assert.assertEquals("Print the contents of a file to stdout using 'cat' running in a docker container.", workflow.getDescription());
Assert.assertEquals("<NAME>", workflow.getAuthor());
Assert.assertEquals("<EMAIL>", workflow.getEmail());
Assert.assertTrue(workflow.getWorkflowVersions().stream().anyMatch(versions -> "master".equals(versions.getName())));
Optional<WorkflowVersion> optionalWorkflowVersion = workflow.getWorkflowVersions().stream()
.filter(version -> "master".equalsIgnoreCase(version.getName())).findFirst();
assertTrue(optionalWorkflowVersion.isPresent());
WorkflowVersion workflowVersion = optionalWorkflowVersion.get();
// verify sourcefiles
final io.dockstore.openapi.client.ApiClient userOpenApiClient = CommonTestUtilities.getOpenApiWebClient(true, USER_2_USERNAME, testingPostgres);
io.dockstore.openapi.client.api.WorkflowsApi openApiWorkflowApi = new io.dockstore.openapi.client.api.WorkflowsApi(userOpenApiClient);
List<io.dockstore.openapi.client.model.SourceFile> sourceFileList = openApiWorkflowApi.getWorkflowVersionsSourcefiles(workflow.getId(), workflowVersion.getId(), null);
Assert.assertEquals(2, sourceFileList.size());
Assert.assertTrue(sourceFileList.stream().anyMatch(sourceFile -> sourceFile.getPath().equals("/cwl/v1.1/cat-job.json")));
Assert.assertTrue(sourceFileList.stream().anyMatch(sourceFile -> sourceFile.getPath().equals("/cwl/v1.1/metadata.cwl")));
// Check validation works. It is invalid because this is a tool and not a workflow.
Assert.assertFalse(workflowVersion.isValid());
userWorkflowsApi
.manualRegister("github", "dockstore-testing/Workflows-For-CI", "/cwl/v1.1/count-lines1-wf.cwl", "count-lines1-wf", "cwl",
"/cwl/v1.1/wc-job.json");
final Workflow workflowByPathGithub2 = userWorkflowsApi
.getWorkflowByPath("github.com/dockstore-testing/Workflows-For-CI/count-lines1-wf", null, WorkflowClient.BIOWORKFLOW);
final Workflow workflow2 = userWorkflowsApi.refresh(workflowByPathGithub2.getId(), true);
Assert.assertTrue(workflow.getWorkflowVersions().stream().anyMatch(versions -> "master".equals(versions.getName())));
Optional<WorkflowVersion> optionalWorkflowVersion2 = workflow2.getWorkflowVersions().stream()
.filter(version -> "master".equalsIgnoreCase(version.getName())).findFirst();
assertTrue(optionalWorkflowVersion2.isPresent());
WorkflowVersion workflowVersion2 = optionalWorkflowVersion2.get();
// Check validation works. It should be valid
Assert.assertTrue(workflowVersion2.isValid());
userWorkflowsApi.publish(workflowByPathGithub2.getId(), SwaggerUtility.createPublishRequest(true));
List<String> args = new ArrayList<>();
args.add("workflow");
args.add("launch");
args.add("--entry");
args.add("github.com/dockstore-testing/Workflows-For-CI/count-lines1-wf");
args.add("--yaml");
args.add(jsonFilePath);
args.add("--config");
args.add(clientConfig);
args.add("--script");
Client.main(args.toArray(new String[0]));
Assert.assertTrue(systemOutRule.getLog().contains("Final process status is success"));
}
}
|
supavti06/nurturing_in-place-web-master
|
src/components/blogCard.js
|
import React from 'react';
import { makeStyles } from '@material-ui/core/styles';
import Grid from '@material-ui/core/Grid';
import Paper from '@material-ui/core/Paper';
import Chip from '@material-ui/core/Chip';
import Box from '@material-ui/core/Box';
import Typography from '@material-ui/core/Typography';
import ButtonBase from '@material-ui/core/ButtonBase';
import Link from '@material-ui/core/Link';
const useStyles = makeStyles(theme => ({
root: {
flexGrow: 1,
padding: 10,
},
paper: {
padding: theme.spacing(2),
margin: 'auto',
maxWidth: 1000,
// background: 'rgba(240, 255, 247,0.75)',
boxShadow: 'none',
// border: '2px solid rgb(230,230,230)',
// borderLeft: '1px solid #80ffbd',
boxShadow: ' 0px 0px 20px -7px rgb(180,180,200)',
},
image: {
maxWidth: 180,
maxHeight: 120,
marginTop: 30,
marginLeft: 10
},
img: {
// margin: 'auto',
display: 'block',
maxWidth: '100%',
maxHeight: '100%',
},
// badges: {
// margin: 'auto',
// display: 'block',
// maxWidth: 50,
// maxHeight: '100%',
// borderRadius: '70px',
// },
textContainer: {
// paddingTop: 20,
marginTop: 15,
maxWidth: 390
},
titleText: {
lineHeight: 1.3
}
}));
export default function BlogCard(props) {
const { node, path} = props
const classes = useStyles();
const addThumbStringToImg = (text) => {
if(props.pathPrefix === '/kernels/PAX/'){
return text
}
var textArray = text.split(".jpg")
return (textArray[0] + "-thumb.jpg")
};
const formulateUrlFromTitle = (title, Title) => {
// if(typeof Title === 'undefined'){
title = props.pathPrefix + title.replace(/,/g,'');
// }
// else{
// title = props.pathPrefix + Title.replace(/,/g,'');
// }
// title = props.pathPrefix + title.replace(/,/g,'');
title = title.replace(/\’/gi,'');
title = title.replace(/\'/gi,'');
title = title.replace(/\s/g, '-');
return title
};
return (
<div className={classes.root}>
<Paper className={classes.paper} elevation={10} square>
<Link style={{textDecoration: 'none' ,color: 'inherit'}}href={formulateUrlFromTitle(node.title, node.Title)}>
<Grid container spacing={2} >
<Box mr={3} xs={12} sm={4}>
<Grid item>
<ButtonBase className={classes.image} >
<img
className={classes.img}
src={addThumbStringToImg(node.image)}
alt={addThumbStringToImg(node.image)}
/>
</ButtonBase>
</Grid>
</Box>
<Grid container lg={7} sm={5} xs={5}>
<Grid item spacing={0}>
<Grid item className={classes.textContainer}>
<Typography className={classes.titleText} variant="h6">
{node.title}
</Typography>
{/* <Typography variant="body1" color="textSecondary" >
{
(new Date(node.publishDate)).toLocaleDateString()
}
</Typography> */}
<Typography variant="body2" style={{ cursor: 'pointer' }}>
{node.subheading}
</Typography>
{/* {
node.categories &&
node.categories.map( obj => (
<Link style={{padding:'15px 5px 15px 5px' }} href={`/categories/${obj.Name}`}>
<Chip label={obj.Name}/>
</Link>
))
} */}
</Grid>
</Grid>
</Grid>
</Grid>
</Link>
</Paper>
</div>)
}
|
fossabot/me-1
|
packages/resume/src/lib/data/resume.js
|
<reponame>fossabot/me-1
import {Map} from "immutable";
import {createSelector} from "reselect";
import defaultResume from "../../resumes/resume.json";
import {FETCHING_RESUME_SUCCESS} from "../actions/fetchResume";
import Resume from "../resume";
const defaultVariant = "resume";
const defaultState = Map({
resumes: Map({resume: Resume.fromResume(defaultResume)})
});
export const resumeReducer = (state = defaultState, action) => {
switch (action.type) {
case FETCHING_RESUME_SUCCESS: {
if (action.payload.resume) {
return state.set("resumes", state.get("resumes").set(
action.payload.variant || defaultVariant,
action.payload.resume
));
}
return state;
}
default:
return state;
}
};
export default resumeReducer;
export const getResumes = state => state.get("resumes");
export const getResume = createSelector(
getResumes,
resumes => resumes.first() || null
);
const getVariant = (state, variant) => variant;
export const getResumeVariant = createSelector(
[getResumes, getVariant],
(resumes, variant) => {
const resume = resumes.get(variant);
return resume ? resume : null;
}
);
|
solarb0526/Spring-Security-Third-Edition
|
Chapter15/chapter15.00-calendar/src/test/java/com/packtpub/springsecurity/core/authority/CalendarUserAuthorityUtilsTests.java
|
package com.packtpub.springsecurity.core.authority;
import com.packtpub.springsecurity.CalendarStubs;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.Collection;
//import static org.assertj.core.api.Assertions.assertThat;
@RunWith(SpringRunner.class)
@SpringBootTest
public class CalendarUserAuthorityUtilsTests {
@Test
public void createAuthorities_User() {
Collection<? extends GrantedAuthority> roles =
CalendarUserAuthorityUtils.createAuthorities(CalendarStubs.user1());
System.out.println("*****************************************");
System.out.println("roles: " + roles);
// assertThat(roles,
// containsInAnyOrder(
// new SimpleGrantedAuthority("USER_ROLE")));
}
@Test
public void validateUser_Admin() {
Collection<? extends GrantedAuthority> roles =
CalendarUserAuthorityUtils.createAuthorities(CalendarStubs.admin1());
System.out.println("*****************************************");
System.out.println("roles: " + roles);
// assertThat(roles,
// containsInAnyOrder(
// new SimpleGrantedAuthority("ADMIN_ROLE")));
}
} // The End...
|
ppavlidis/modinvreg
|
src/main/java/ubc/pavlab/rdp/model/UserPrinciple.java
|
<gh_stars>0
package ubc.pavlab.rdp.model;
import lombok.AllArgsConstructor;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.userdetails.UserDetails;
import java.util.Collection;
import java.util.stream.Collectors;
/**
* Created by mjacobson on 07/02/18.
*/
@AllArgsConstructor
public class UserPrinciple implements UserDetails {
private final User user;
public Integer getId() {
return user.getId();
}
@Override
public Collection<? extends GrantedAuthority> getAuthorities() {
return user.getRoles()
.stream()
.map( r -> new SimpleGrantedAuthority( r.getRole() ) )
.collect( Collectors.toSet() );
}
@Override
public String getPassword() {
return user.getPassword();
}
@Override
public String getUsername() {
return user.getEmail();
}
@Override
public boolean isAccountNonExpired() {
return true;
}
@Override
public boolean isAccountNonLocked() {
return true;
}
@Override
public boolean isCredentialsNonExpired() {
return true;
}
@Override
public boolean isEnabled() {
return user.isEnabled();
}
}
|
alevohin/JavaTorrent
|
bittorrent/src/main/java/org/johnnei/javatorrent/internal/torrent/TorrentProcessor.java
|
package org.johnnei.javatorrent.internal.torrent;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Optional;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.function.Supplier;
import org.johnnei.javatorrent.TorrentClient;
import org.johnnei.javatorrent.internal.torrent.selection.PieceSelectionHandler;
import org.johnnei.javatorrent.internal.torrent.selection.PieceSelectionState;
import org.johnnei.javatorrent.internal.tracker.TrackerManager;
import org.johnnei.javatorrent.phases.IDownloadPhase;
import org.johnnei.javatorrent.torrent.AbstractFileSet;
import org.johnnei.javatorrent.torrent.PeerStateAccess;
import org.johnnei.javatorrent.torrent.Torrent;
import org.johnnei.javatorrent.torrent.peer.Peer;
import org.johnnei.javatorrent.torrent.peer.PeerDirection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A state machine wrapped around the torrent
*/
class TorrentProcessor implements PeerStateAccess {
private static final Logger LOGGER = LoggerFactory.getLogger(TorrentProcessor.class);
private final TorrentManager torrentManager;
private final TorrentClient torrentClient;
private final TrackerManager trackerManager;
private final Torrent torrent;
private IDownloadPhase downloadPhase;
private PieceSelectionHandler pieceSelectionHandler;
private Collection<ScheduledFuture<?>> scheduledTasks;
public TorrentProcessor(TorrentManager torrentManager, TrackerManager trackerManager, TorrentClient torrentClient, Torrent torrent) {
this.torrentManager = torrentManager;
this.trackerManager = trackerManager;
this.torrentClient = torrentClient;
this.torrent = torrent;
scheduledTasks = new ArrayList<>(3);
downloadPhase = torrentClient.getPhaseRegulator().createInitialPhase(torrentClient, torrent);
doPhaseEnter();
scheduledTasks.add(torrentClient.getExecutorService().scheduleAtFixedRate(this::updateTorrentState, 0, 250, TimeUnit.MILLISECONDS));
scheduledTasks.add(torrentClient.getExecutorService().scheduleAtFixedRate(this::updateChokingStates, 1, 10, TimeUnit.SECONDS));
scheduledTasks.add(torrentClient.getExecutorService().scheduleAtFixedRate(this::removeDisconnectedPeers, 30, 60, TimeUnit.SECONDS));
scheduledTasks.add(torrentClient.getExecutorService().scheduleAtFixedRate(this::updateTrackerStates, 10, 30, TimeUnit.SECONDS));
}
public void updateTrackerStates() {
trackerManager.announce(torrent);
}
public void removeDisconnectedPeers() {
torrent.getPeers().stream().
filter(p -> p.getBitTorrentSocket().closed()).
forEach(torrent::removePeer);
}
public void updateChokingStates() {
torrent.getPeers().forEach(downloadPhase.getChokingStrategy()::updateChoking);
}
public void updateTorrentState() {
try {
if (downloadPhase.isDone()) {
downloadPhase.onPhaseExit();
Optional<IDownloadPhase> newPhase = torrentClient.getPhaseRegulator().createNextPhase(downloadPhase, torrentClient, torrent);
if (newPhase.isPresent()) {
LOGGER.info("Torrent transitioning from {} to {}", downloadPhase, newPhase.get());
downloadPhase = newPhase.get();
doPhaseEnter();
} else {
LOGGER.info("Torrent ended from {}", downloadPhase);
shutdownTorrent();
return;
}
}
downloadPhase.process();
pieceSelectionHandler.updateState();
} catch (Exception e) {
LOGGER.error("Failed to update torrent state", e);
shutdownTorrent();
}
}
public void shutdownTorrent() {
for (ScheduledFuture<?> task : scheduledTasks) {
task.cancel(false);
}
torrentManager.removeTorrent(torrent);
}
@Override
public int getPendingBlocks(Peer peer, PeerDirection direction) {
if (direction == PeerDirection.Upload) {
return peer.getWorkQueueSize(PeerDirection.Upload);
} else {
return pieceSelectionHandler.getBlockQueueFor(peer);
}
}
private void doPhaseEnter() {
downloadPhase.onPhaseEnter();
Supplier<Optional<AbstractFileSet>> fileSetSupplier = () -> downloadPhase.getFileSet();
pieceSelectionHandler = new PieceSelectionHandler(
fileSetSupplier,
downloadPhase.getPiecePrioritizer(),
new PieceSelectionState(torrent, downloadPhase::isPeerSupportedForDownload, fileSetSupplier)
);
}
}
|
b09/algorithmic-programming
|
test/com/anuragkapur/misc/MergeKArraysTest.java
|
<reponame>b09/algorithmic-programming
package com.anuragkapur.misc;
import org.junit.Test;
public class MergeKArraysTest {
@Test
public void testMerge2Arrays() throws Exception {
MergeKArrays obj = new MergeKArrays();
int result[] = obj.merge2Arrays(new int[] {1,4,5,8,13,22}, new int[]{1,3,4,7,8,9});
for (int i=0; i<result.length; i++) {
System.out.println(result[i]);
}
}
@Test
public void testMergeKArrays() throws Exception {
MergeKArrays obj = new MergeKArrays();
int arrays[][] = new int[4][];
arrays[0] = new int[6];
arrays[0][0] = 1;
arrays[0][1] = 4;
arrays[0][2] = 5;
arrays[0][3] = 8;
arrays[0][4] = 13;
arrays[0][5] = 22;
arrays[1] = new int[6];
arrays[1][0] = 1;
arrays[1][1] = 3;
arrays[1][2] = 4;
arrays[1][3] = 7;
arrays[1][4] = 8;
arrays[1][5] = 9;
arrays[2] = new int[6];
arrays[2][0] = 5;
arrays[2][1] = 10;
arrays[2][2] = 12;
arrays[2][3] = 21;
arrays[2][4] = 22;
arrays[2][5] = 23;
arrays[3] = new int[6];
arrays[3][0] = 7;
arrays[3][1] = 8;
arrays[3][2] = 9;
arrays[3][3] = 19;
arrays[3][4] = 24;
arrays[3][5] = 25;
int result[][] = obj.mergeKArrays(arrays);
for (int i=0; i<result.length; i++) {
for (int j=0; j<result[i].length; j++) {
System.out.println(result[i][j]);
}
}
}
}
|
iwaiawi/cellmon
|
src/main/scala/iwai/cellmon/model/core/service/LocationChangeService.scala
|
<reponame>iwaiawi/cellmon<filename>src/main/scala/iwai/cellmon/model/core/service/LocationChangeService.scala
package iwai.cellmon.model.core.service
import iwai.cellmon.model.core.entity.common.Period
import iwai.cellmon.model.core.entity.location.LocationChange
import iwai.cellmon.model.core.repository.locationchange.LocationChangeRepository
import scalaz.concurrent.Task
class LocationChangeService(repo: LocationChangeRepository) {
def store(change: LocationChange): Task[(LocationChangeService, LocationChange)] = {
repo.put(change).map(t => (this, t._2))
}
def search(period: Period): Task[Seq[LocationChange]] = {
repo.getMulti(period)
}
def delete(period: Period): Task[(LocationChangeService, Seq[LocationChange])] = {
repo.removeMulti(period).map(t => (this, t._2))
}
}
object LocationChangeService {
def apply(repo: LocationChangeRepository): LocationChangeService = new LocationChangeService(repo)
}
|
abos5/pythontutor
|
scrapy/opengameart/opengameart/spiders/sound.py
|
import re
import scrapy
from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import CrawlSpider, Rule
from opengameart.items import SoundItem
class SoundSpider(scrapy.Spider):
'''
'''
name = 'sound'
start_urls = [
'http://opengameart.org/art-search-advanced?keys=&title=&field_art_tags_tid_op=and&field_art_tags_tid=&name=&field_art_type_tid%5B%5D=12&field_art_type_tid%5B%5D=13&field_art_licenses_tid%5B%5D=2&field_art_licenses_tid%5B%5D=3&field_art_licenses_tid%5B%5D=6&field_art_licenses_tid%5B%5D=5&field_art_licenses_tid%5B%5D=10310&field_art_licenses_tid%5B%5D=4&field_art_licenses_tid%5B%5D=8&field_art_licenses_tid%5B%5D=7&sort_by=count&sort_order=DESC&items_per_page=24&Collection=&page=0',
]
def parse(self, response):
found = response.css('.play-button')
pages = [
response.urljoin(u)
for u in response.css('.pager-item a ::attr("href")').extract()
]
for f in found:
item = SoundItem()
item['mp3'] = f.css('::attr("data-mp3-url")').extract_first()
item['ogg'] = f.css('::attr("data-ogg-url")').extract_first()
yield item
for p in pages:
yield scrapy.Request(p, callback=self.parse)
# eof
|
gemxd/gemfirexd-oss
|
gemfirexd/tools/src/dunit/java/com/pivotal/gemfirexd/internal/engine/management/AggregateStatementStatsDUnit.java
|
<gh_stars>10-100
/*
* Copyright (c) 2010-2015 <NAME>, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.internal.engine.management;
import java.lang.management.ManagementFactory;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.Properties;
import java.util.Set;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectInstance;
import javax.management.ObjectName;
import com.gemstone.gemfire.management.internal.ManagementConstants;
import com.pivotal.gemfirexd.DistributedSQLTestBase;
import com.pivotal.gemfirexd.TestUtil;
import com.pivotal.gemfirexd.internal.engine.Misc;
import com.pivotal.gemfirexd.internal.engine.GemFireXDQueryObserverHolder;
import com.pivotal.gemfirexd.internal.engine.GfxdConstants;
import com.pivotal.gemfirexd.internal.engine.management.impl.AggregateStatementMBean;
import com.pivotal.gemfirexd.internal.engine.management.impl.InternalManagementService;
import com.pivotal.gemfirexd.internal.engine.management.impl.ManagementUtils;
import com.pivotal.gemfirexd.internal.impl.jdbc.EmbedStatement;
import com.pivotal.gemfirexd.stats.StatementStatsDUnit.StatementStatsObserver;
import dunit.VM;
/**
* @author rishim
*
*/
@SuppressWarnings("serial")
public class AggregateStatementStatsDUnit extends DistributedSQLTestBase {
/** The <code>MBeanServer</code> for this application */
public static MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer();
public AggregateStatementStatsDUnit(String name) {
super(name);
}
/**
* test for all basic statement stats gathered during dml execution.
*
* @see com.pivotal.gemfirexd.internal.impl.sql.StatementStats
* @throws Exception
* on failure.
*/
public void testStatementStats() throws Exception {
try {
Properties serverInfo = new Properties();
serverInfo.setProperty("gemfire.enable-time-statistics", "true");
serverInfo.setProperty("statistic-sample-rate", "100");
serverInfo.setProperty("statistic-sampling-enabled", "true");
startServerVMs(1, 0, null, serverInfo);
serverInfo.setProperty("jmx-manager", "true");
serverInfo.setProperty("jmx-manager-start", "true");
serverInfo.setProperty("jmx-manager-port", "0");// No need to start an Agent for this test
startServerVMs(1, 0, null, serverInfo);
Properties info = new Properties();
info.setProperty("host-data", "false");
info.setProperty("gemfire.enable-time-statistics", "true");
// start a client, register the driver.
startClientVMs(1, 0, null, info);
// enable StatementStats for all connections in this VM
System.setProperty(GfxdConstants.GFXD_ENABLE_STATS, "true");
// check that stats are enabled with System property set
Connection conn = TestUtil.getConnection(info);
checkAggregateMBean(conn, true, 1);
conn.close();
stopVMNums(1,-1);
} finally {
GemFireXDQueryObserverHolder.clearInstance();
System.clearProperty(GfxdConstants.GFXD_ENABLE_STATS);
}
}
private static StatementStatsObserver ob1;
private static StatementStatsObserver ob2;
private void checkAggregateMBean(Connection conn, final boolean enableStats, final int numTimesSampled)
throws Exception {
final VM serverVM = this.serverVMs.get(1); // Server Started as a manager
final Statement stmt = conn.createStatement();
final String createSchemaOrder = "create schema trade";
stmt.execute(createSchemaOrder);
stmt.execute("create table trade.customers (cid int not null, "
+ "cust_name varchar(100), since date, addr varchar(100), tid int, " + "primary key (cid))");
PreparedStatement psInsertCust = conn.prepareStatement("insert into " + "trade.customers values (?,?,?,?,?)");
java.sql.Date since = new java.sql.Date(System.currentTimeMillis());
// Insert 0-10.
for (int i = 0; i < 10; i++) {
psInsertCust.setInt(1, i);
psInsertCust.setString(2, "XXXX" + i);
since = new java.sql.Date(System.currentTimeMillis());
psInsertCust.setDate(3, since);
psInsertCust.setString(4, "XXXX" + i);
psInsertCust.setInt(5, i);
psInsertCust.executeUpdate();
}
final PreparedStatement psSelectCust = conn.prepareStatement("select * "
+ "from trade.customers where cust_name = ?");
for (int i = 0; i < 10; i++) {
psSelectCust.setString(1, "XXXX" + i);
ResultSet rs = psSelectCust.executeQuery();
assertTrue("Should return one row", rs.next());
assertFalse("Should not return more than one row", rs.next());
rs.close();
}
String stmtId = ((EmbedStatement) psSelectCust).getStatementStats().getStatsId();
System.out.println("Statement Id = " + stmtId);
printStatementMBeans();
serverVM.invoke(this.getClass(), "isAggregateStatementMBeanCreated", new Object[] { stmtId });
psSelectCust.close();
psInsertCust.close();
stmt.close();
}
public static void isAggregateStatementMBeanCreated(String stmtId) {
final InternalManagementService service = InternalManagementService.getInstance(Misc.getMemStore());
final ObjectName statementObjectName = ManagementUtils.getAggregateStatementMBeanName(stmtId);
printStatementMBeans();
waitForCriterion(new WaitCriterion() {
public String description() {
return "Waiting for the statement aggregator to get reflected at managing node";
}
public boolean done() {
AggregateStatementMXBean bean = service.getMBeanInstance(statementObjectName, AggregateStatementMXBean.class);
boolean done = (bean != null);
return done;
}
}, ManagementConstants.REFRESH_TIME * 4, 500, true);
AggregateStatementMBean aggStatementMBean = (AggregateStatementMBean)service.getMBeanInstance(statementObjectName,
AggregateStatementMXBean.class);
printValues(AggregateStatementMXBean.class, aggStatementMBean);
}
private static void printValues(Class<?> mbeanInterface, Object mbeanObject) {
final Method[] methodArray = mbeanInterface.getMethods();
Object[] args = null;
for (Method m : methodArray) {
String name = m.getName();
String attrName = "";
if (name.startsWith("get")) {
attrName = name.substring(3);
} else if (name.startsWith("is") && m.getReturnType() == boolean.class) {
attrName = name.substring(2);
}
try {
Object val = m.invoke(mbeanObject, args);
System.out.println(attrName +" = "+val);
} catch (IllegalArgumentException e) {
fail("printValues failed" + e);
} catch (IllegalAccessException e) {
fail("printValues failed" + e);
} catch (InvocationTargetException e) {
fail("printValues failed" + e);
}
}
}
public static void printStatementMBeans() {
try {
Set<ObjectInstance> objNames = mbeanServer.queryMBeans(new ObjectName("GemFireXD:service=Statement,*"), null);
System.out.println(objNames);
} catch (MalformedObjectNameException e) {
fail("printStatementMBeans failed" + e);
} catch (NullPointerException e) {
fail("printStatementMBeans failed" + e);
}
}
@Override
public void tearDown2() throws Exception {
ob1 = null;
ob2 = null;
super.tearDown2();
}
}
|
cosmicray001/Online_judge_Solutions-
|
uri/1052.cpp
|
<reponame>cosmicray001/Online_judge_Solutions-
#include <iostream>
using namespace std;
int main() {
int m;
scanf("%d", &m);
if(m == 1)
{
printf("January\n");
}
else if(m == 2)
{
printf("February\n");
}
else if(m == 3)
{
printf("March\n");
}
else if(m == 4)
{
printf("April\n");
}
else if(m == 5)
{
printf("May\n");
}
else if(m == 6)
{
printf("June\n");
}
else if(m == 7)
{
printf("July\n");
}
else if(m == 8)
{
printf("August\n");
}
else if(m == 9)
{
printf("September\n");
}
else if(m == 10)
{
printf("October\n");
}
else if(m == 11)
{
printf("November\n");
}
else if(m == 12)
{
printf("December\n");
}
return 0;
}
|
aerys/minko
|
plugin/http-worker/src/minko/net/HTTPWorker.cpp
|
/*
Copyright (c) 2014 Aerys
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute,
sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#include "minko/net/HTTPWorker.hpp"
#include "minko/net/HTTPRequest.hpp"
using namespace minko;
using namespace minko::net;
namespace minko
{
namespace net
{
typedef std::unordered_map<std::string, std::string> StringStringUnorderedMap;
MINKO_DEFINE_WORKER(HTTPWorker,
{
std::stringstream inputStream(std::string(input.begin(), input.end()));
auto urlSize = 0;
auto usernameSize = 0;
auto passwordSize = 0;
auto numAdditionalHeaders = 0;
auto verifyPeer = true;
auto buffered = false;
auto postFieldsSize = 0;
inputStream.read(reinterpret_cast<char*>(&urlSize), 4);
auto urlData = std::vector<char>(urlSize);
if (urlSize > 0)
inputStream.read(urlData.data(), urlSize);
inputStream.read(reinterpret_cast<char*>(&usernameSize), 4);
auto usernameData = std::vector<char>(usernameSize);
if (usernameSize > 0)
inputStream.read(usernameData.data(), usernameSize);
inputStream.read(reinterpret_cast<char*>(&passwordSize), 4);
auto passwordData = std::vector<char>(passwordSize);
if (passwordSize > 0)
inputStream.read(passwordData.data(), passwordSize);
const auto url = std::string(urlData.begin(), urlData.end());
const auto username = std::string(usernameData.begin(), usernameData.end());
const auto password = std::string(passwordData.begin(), passwordData.end());
inputStream.read(reinterpret_cast<char*>(&numAdditionalHeaders), 4);
StringStringUnorderedMap additionalHeaders;
for (auto i = 0; i < numAdditionalHeaders; ++i)
{
auto keySize = 0;
auto valueSize = 0;
inputStream.read(reinterpret_cast<char*>(&keySize), 4);
inputStream.read(reinterpret_cast<char*>(&valueSize), 4);
auto keyData = std::vector<char>(keySize);
auto valueData = std::vector<char>(valueSize);
if (keySize > 0)
inputStream.read(keyData.data(), keySize);
if (valueSize > 0)
inputStream.read(valueData.data(), valueSize);
additionalHeaders.insert(std::make_pair(
std::string(keyData.begin(), keyData.end()),
std::string(valueData.begin(), valueData.end())
));
}
inputStream.read(reinterpret_cast<char*>(&verifyPeer), 1);
inputStream.read(reinterpret_cast<char*>(&buffered), 1);
inputStream.read(reinterpret_cast<char*>(&postFieldsSize), 4);
auto postFieldsData = std::vector<char>(postFieldsSize);
if (postFieldsSize > 0)
inputStream.read(postFieldsData.data(), postFieldsSize);
const auto postFields = std::string(postFieldsData.begin(), postFieldsData.end());
HTTPRequest request(url, username, password, additionalHeaders.empty() ? nullptr : &additionalHeaders, postFields);
request.verifyPeer(verifyPeer);
request.buffered(buffered);
auto _0 = request.progress()->connect([&](float p) {
Message message { "progress" };
message.set(p);
post(message);
});
auto _1 = request.error()->connect([&](int e, const std::string& errorMessage) {
Message message{ "error" };
message.set(std::vector<char>(errorMessage.begin(), errorMessage.end()));
post(message);
});
auto _2 = request.complete()->connect([&](const std::vector<char>& output) {
Message message{ "complete" };
message.set(output);
post(message);
});
auto _3 = request.bufferSignal()->connect([&](const std::vector<char>& buffer) {
Message message{ "buffer" };
message.set(buffer);
post(message);
});
request.run();
});
}
}
|
openknowledge/smallrye-async-api
|
spec/api/src/main/java/io/smallrye/asyncapi/spec/annotations/binding/MessageBindings.java
|
<reponame>openknowledge/smallrye-async-api
/*
* Copyright (C) open knowledge GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package io.smallrye.asyncapi.spec.annotations.binding;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import io.smallrye.asyncapi.spec.annotations.binding.amqp.AMQPMessageBinding;
import io.smallrye.asyncapi.spec.annotations.binding.http.HTTPMessageBinding;
import io.smallrye.asyncapi.spec.annotations.binding.kafka.KafkaMessageBinding;
import io.smallrye.asyncapi.spec.annotations.binding.mqtt.MQTTMessageBinding;
import io.smallrye.asyncapi.spec.annotations.schema.Schema;
@Target({ ElementType.METHOD, ElementType.TYPE })
@Retention(RetentionPolicy.RUNTIME)
@Inherited
public @interface MessageBindings {
/**
* A array where the items describe protocol-specific definitions for the message.
*
* @return bindings of the message
*/
MessageBinding[] binding() default {};
/**
* amqp-specific definitions for the channel.
*
* @return amqp bindings of the channel
*/
AMQPMessageBinding amqp() default @AMQPMessageBinding(messageType = "", contentEncoding = "");
/**
* http-specific definitions for the channel.
*
* @return http bindings of the channel
*/
HTTPMessageBinding http() default @HTTPMessageBinding(headers = @Schema());
/**
* kafka-specific definitions for the channel.
*
* @return kafka bindings of the channel
*/
KafkaMessageBinding kafka() default @KafkaMessageBinding(key = @Schema);
/**
* mqtt-specific definitions for the channel.
*
* @return mqtt bindings of the channel
*/
MQTTMessageBinding mqtt() default @MQTTMessageBinding;
}
|
klunge/KlungeFramework
|
java-api/src/test/java/com/kloia/eventapis/core/CompositeRepositoryImplTest.java
|
<filename>java-api/src/test/java/com/kloia/eventapis/core/CompositeRepositoryImplTest.java
package com.kloia.eventapis.core;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.kloia.eventapis.api.IdCreationStrategy;
import com.kloia.eventapis.api.Views;
import com.kloia.eventapis.api.impl.UUIDCreationStrategy;
import com.kloia.eventapis.cassandra.ConcurrencyResolver;
import com.kloia.eventapis.cassandra.ConcurrentEventException;
import com.kloia.eventapis.cassandra.DefaultConcurrencyResolver;
import com.kloia.eventapis.cassandra.EntityEvent;
import com.kloia.eventapis.common.EventKey;
import com.kloia.eventapis.common.EventRecorder;
import com.kloia.eventapis.common.EventType;
import com.kloia.eventapis.common.PublishedEvent;
import com.kloia.eventapis.exception.EventStoreException;
import com.kloia.eventapis.kafka.IOperationRepository;
import com.kloia.eventapis.kafka.SerializableConsumer;
import com.kloia.eventapis.pojos.Event;
import com.kloia.eventapis.pojos.EventState;
import com.kloia.eventapis.view.Entity;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertThat;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/**
* Created by orhanburak.bozan on 19/08/2017.
*/
@RunWith(MockitoJUnitRunner.class)
public class CompositeRepositoryImplTest {
@Rule
public ExpectedException expectedException = ExpectedException.none();
@InjectMocks
private CompositeRepositoryImpl compositeRepository;
@Mock
private EventRecorder eventRecorder;
@Mock
private ObjectMapper objectMapper;
@Mock
private IOperationRepository operationRepository;
@Mock
private IdCreationStrategy idCreationStrategy = new UUIDCreationStrategy();
@Captor
private ArgumentCaptor<Function<EntityEvent, ConcurrencyResolver<ConcurrentEventException>>> concurrencyResolverFactoryCaptor;
@Captor
private ArgumentCaptor<Optional<EventKey>> previousEventKeyCaptor;
@Mock
private ObjectWriter objectWriter;
@Mock
private EventKey eventKey;
private PublishedEvent successEvent;
private PublishedEvent failEvent;
private PublishedEvent intermediateEvent;
private String intermediateEventJson;
private String successEventJson;
private String failEventJson;
private Map<String, String> userContextMap;
@Before
public void setUp() throws ConcurrentEventException, EventStoreException, JsonProcessingException {
successEvent = new SuccessEvent();
failEvent = new FailEvent();
intermediateEvent = new IntermediateEvent();
intermediateEventJson = "{IntermediateEvent}";
successEventJson = "{SuccessEvent}";
failEventJson = "{FailEvent}";
userContextMap = new HashMap<>();
when(objectMapper.writerWithView(Views.PublishedOnly.class)).thenReturn(objectWriter);
// when(userContext.getUserContext()).thenReturn(userContextMap);
// when(operationContext.getContext()).thenReturn(new Context("opId"));
// when(operationContext.getContextOpId()).thenReturn(OperationContext.OP_ID);
// when(operationContext.getCommandContext()).thenReturn("eventId");
}
@Test
public void shouldMarkFail() {
compositeRepository.markFail("opId");
verify(eventRecorder).markFail("opId");
}
private void mockCommon(PublishedEvent event) throws EventStoreException, ConcurrentEventException, JsonProcessingException {
when(eventRecorder.recordEntityEvent(eq(event), anyLong(), previousEventKeyCaptor.capture(), concurrencyResolverFactoryCaptor.capture())).thenReturn(eventKey);
when(objectWriter.writeValueAsString(event)).thenReturn("{" + event.getClass().getSimpleName() + "}");
}
private void assertCommon(PublishedEvent event) {
verify(operationRepository).publishEvent(eq(event.getClass().getSimpleName()), eq("{" + event.getClass().getSimpleName() + "}"), anyLong());
// assertThat(publishedEventWrapper.getUserContext(), equalTo(userContextMap));
// assertThat(publishedEventWrapper.getContext().getOpId(), equalTo("opId"));
// assertThat(publishedEventWrapper.getEvent(), equalTo("{" + event.getClass().getSimpleName() + "}"));
}
@Test
public void shouldRecordAndPublishWithPublishedEvent() throws ConcurrentEventException, EventStoreException, JsonProcessingException {
mockCommon(intermediateEvent);
EventKey actual = compositeRepository.recordAndPublish(intermediateEvent);
assertCommon(intermediateEvent);
assertThat(actual, equalTo(eventKey));
assertThat(previousEventKeyCaptor.getValue(), equalTo(Optional.empty()));
assertThat(concurrencyResolverFactoryCaptor.getValue().apply(new EntityEvent()).getClass(), equalTo(DefaultConcurrencyResolver.class));
}
@Test
public void shouldRecordAndPublishWithPreviousEventAndPublishedEvent() throws JsonProcessingException, EventStoreException, ConcurrentEventException {
mockCommon(intermediateEvent);
Entity previousEntity = mock(Entity.class);
EventKey previousEntityEventKey = new EventKey();
when(previousEntity.getEventKey()).thenReturn(previousEntityEventKey);
EventKey actual = compositeRepository.recordAndPublish(previousEntity, intermediateEvent);
assertCommon(intermediateEvent);
assertThat(actual, equalTo(eventKey));
assertThat(previousEventKeyCaptor.getValue().isPresent(), equalTo(true));
assertThat(previousEventKeyCaptor.getValue().get(), equalTo(previousEntityEventKey));
assertThat(concurrencyResolverFactoryCaptor.getValue().apply(new EntityEvent()).getClass(), equalTo(DefaultConcurrencyResolver.class));
}
@Test
public void shouldRecordAndPublishWithPreviousEventKeyAndPublishedEvent() throws JsonProcessingException, EventStoreException, ConcurrentEventException {
mockCommon(intermediateEvent);
EventKey previousEntityEventKey = new EventKey();
EventKey actual = compositeRepository.recordAndPublish(previousEntityEventKey, intermediateEvent);
assertCommon(intermediateEvent);
assertThat(actual, equalTo(eventKey));
assertThat(previousEventKeyCaptor.getValue().isPresent(), equalTo(true));
assertThat(previousEventKeyCaptor.getValue().get(), equalTo(previousEntityEventKey));
assertThat(concurrencyResolverFactoryCaptor.getValue().apply(new EntityEvent()).getClass(), equalTo(DefaultConcurrencyResolver.class));
}
@Test
public void shouldRecordAndPublishWithPreviousEventAndPublishedEventAndConcurrencyResolverFactory() throws JsonProcessingException, EventStoreException, ConcurrentEventException {
mockCommon(intermediateEvent);
Entity previousEntity = mock(Entity.class);
EventKey previousEntityEventKey = new EventKey();
when(previousEntity.getEventKey()).thenReturn(previousEntityEventKey);
ConcurrencyResolver concurrencyResolver = mock(ConcurrencyResolver.class);
Function<EntityEvent, ConcurrencyResolver<ConcurrentEventException>> factory = entityEvent -> concurrencyResolver;
EventKey actual = compositeRepository.recordAndPublish(previousEntity, intermediateEvent, factory);
assertCommon(intermediateEvent);
assertThat(actual, equalTo(eventKey));
assertThat(previousEventKeyCaptor.getValue().isPresent(), equalTo(true));
assertThat(previousEventKeyCaptor.getValue().get(), equalTo(previousEntityEventKey));
assertThat(concurrencyResolverFactoryCaptor.getValue(), equalTo(factory));
}
@Test
public void shouldRecordAndPublishWithPreviousEventKeyAndPublishedEventAndConcurrencyResolverFactory() throws JsonProcessingException, EventStoreException, ConcurrentEventException {
mockCommon(intermediateEvent);
EventKey previousEntityEventKey = new EventKey();
ConcurrencyResolver concurrencyResolver = mock(ConcurrencyResolver.class);
Function<EntityEvent, ConcurrencyResolver<ConcurrentEventException>> factory = entityEvent -> concurrencyResolver;
EventKey actual = compositeRepository.recordAndPublish(previousEntityEventKey, intermediateEvent, factory);
assertCommon(intermediateEvent);
assertThat(actual, equalTo(eventKey));
assertThat(previousEventKeyCaptor.getValue().isPresent(), equalTo(true));
assertThat(previousEventKeyCaptor.getValue().get(), equalTo(previousEntityEventKey));
assertThat(concurrencyResolverFactoryCaptor.getValue(), equalTo(factory));
}
@Test
public void shouldSuccessOperationWithSuccessEvent() throws ConcurrentEventException, EventStoreException, JsonProcessingException {
when(eventRecorder.recordEntityEvent(eq(successEvent), anyLong(), previousEventKeyCaptor.capture(), concurrencyResolverFactoryCaptor.capture())).thenReturn(eventKey);
when(objectWriter.writeValueAsString(successEvent)).thenReturn(successEventJson);
compositeRepository.recordAndPublish(successEvent);
ArgumentCaptor<SerializableConsumer> serializableConsumerCaptor = ArgumentCaptor.forClass(SerializableConsumer.class);
verify(operationRepository).successOperation(eq("SuccessEvent"), serializableConsumerCaptor.capture());
Event event = new Event();
serializableConsumerCaptor.getValue().accept(event);
assertThat(event.getEventState(), equalTo(EventState.TXN_SUCCEEDED));
}
@Test
public void shouldFailOperationWithFailEvent() throws ConcurrentEventException, EventStoreException, JsonProcessingException {
when(eventRecorder.recordEntityEvent(eq(failEvent), anyLong(), previousEventKeyCaptor.capture(), concurrencyResolverFactoryCaptor.capture())).thenReturn(eventKey);
when(objectWriter.writeValueAsString(failEvent)).thenReturn(failEventJson);
compositeRepository.recordAndPublish(failEvent);
ArgumentCaptor<SerializableConsumer> serializableConsumerCaptor = ArgumentCaptor.forClass(SerializableConsumer.class);
verify(operationRepository).failOperation(eq("FailEvent"), serializableConsumerCaptor.capture());
Event event = new Event();
serializableConsumerCaptor.getValue().accept(event);
assertThat(event.getEventState(), equalTo(EventState.TXN_FAILED));
}
@Test
public void shouldThrowExceptionWhenObjectWriterThrowsException() throws JsonProcessingException, ConcurrentEventException, EventStoreException {
expectedException.expect(EventStoreException.class);
doThrow(JsonProcessingException.class).when(objectWriter).writeValueAsString(intermediateEvent);
compositeRepository.recordAndPublish(intermediateEvent);
}
private static class IntermediateEvent extends PublishedEvent {
@Override
public EventType getEventType() {
return EventType.EVENT;
}
}
private static class FailEvent extends PublishedEvent {
@Override
public EventType getEventType() {
return EventType.OP_FAIL;
}
}
private static class SuccessEvent extends PublishedEvent {
@Override
public EventType getEventType() {
return EventType.OP_SUCCESS;
}
}
}
|
rafmos/graphql-fhir
|
src/resources/1_0_2/inputs/healthcareservice.input.js
|
<filename>src/resources/1_0_2/inputs/healthcareservice.input.js<gh_stars>0
const { GraphQLInputObjectType, GraphQLEnumType, GraphQLNonNull, GraphQLString, GraphQLList, GraphQLBoolean } = require('graphql');
const { extendSchema } = require('../../../utils/schema.utils');
let HealthcareServiceResourceInputType = new GraphQLEnumType({
name: 'HealthcareServiceResourceInputType',
values: {
HealthcareService: { value: 'HealthcareService' }
}
});
/**
* @name exports
* @summary HealthcareService Input Schema
*/
module.exports = new GraphQLInputObjectType({
name: 'HealthcareService_Input',
description: 'Base StructureDefinition for HealthcareService Resource.',
fields: () => extendSchema(require('./domainresource.input'), {
resourceType: {
type: new GraphQLNonNull(HealthcareServiceResourceInputType),
description: 'Type of this resource.'
},
identifier: {
type: new GraphQLList(require('./identifier.input')),
description: 'External identifiers for this item.'
},
providedBy: {
type: require('./reference.input'),
description: 'The organization that provides this healthcare service.'
},
serviceCategory: {
type: require('./codeableconcept.input'),
description: 'Identifies the broad category of service being performed or delivered.'
},
serviceType: {
type: new GraphQLList(require('./healthcareserviceservicetype.input')),
description: 'A specific type of service that may be delivered or performed.'
},
location: {
type: new GraphQLNonNull(require('./reference.input')),
description: 'The location where this healthcare service may be provided.'
},
serviceName: {
type: GraphQLString,
description: 'Further description of the service as it would be presented to a consumer while searching.'
},
_serviceName: {
type: require('./element.input'),
description: 'Further description of the service as it would be presented to a consumer while searching.'
},
comment: {
type: GraphQLString,
description: 'Any additional description of the service and/or any specific issues not covered by the other attributes, which can be displayed as further detail under the serviceName.'
},
_comment: {
type: require('./element.input'),
description: 'Any additional description of the service and/or any specific issues not covered by the other attributes, which can be displayed as further detail under the serviceName.'
},
extraDetails: {
type: GraphQLString,
description: 'Extra details about the service that can\'t be placed in the other fields.'
},
_extraDetails: {
type: require('./element.input'),
description: 'Extra details about the service that can\'t be placed in the other fields.'
},
photo: {
type: require('./attachment.input'),
description: 'If there is a photo/symbol associated with this HealthcareService, it may be included here to facilitate quick identification of the service in a list.'
},
telecom: {
type: new GraphQLList(require('./contactpoint.input')),
description: 'List of contacts related to this specific healthcare service.'
},
coverageArea: {
type: new GraphQLList(require('./reference.input')),
description: 'The location(s) that this service is available to (not where the service is provided).'
},
// ValueSetReference: http://hl7.org/fhir/ValueSet/service-provision-conditions
serviceProvisionCode: {
type: new GraphQLList(require('./codeableconcept.input')),
description: 'The code(s) that detail the conditions under which the healthcare service is available/offered.'
},
eligibility: {
type: require('./codeableconcept.input'),
description: 'Does this service have specific eligibility requirements that need to be met in order to use the service?.'
},
eligibilityNote: {
type: GraphQLString,
description: 'Describes the eligibility conditions for the service.'
},
_eligibilityNote: {
type: require('./element.input'),
description: 'Describes the eligibility conditions for the service.'
},
programName: {
type: new GraphQLList(GraphQLString),
description: 'Program Names that can be used to categorize the service.'
},
_programName: {
type: require('./element.input'),
description: 'Program Names that can be used to categorize the service.'
},
characteristic: {
type: new GraphQLList(require('./codeableconcept.input')),
description: 'Collection of characteristics (attributes).'
},
// ValueSetReference: http://hl7.org/fhir/ValueSet/service-referral-method
referralMethod: {
type: new GraphQLList(require('./codeableconcept.input')),
description: 'Ways that the service accepts referrals, if this is not provided then it is implied that no referral is required.'
},
publicKey: {
type: GraphQLString,
description: 'The public part of the \'keys\' allocated to an Organization by an accredited body to support secure exchange of data over the internet. To be provided by the Organization, where available.'
},
_publicKey: {
type: require('./element.input'),
description: 'The public part of the \'keys\' allocated to an Organization by an accredited body to support secure exchange of data over the internet. To be provided by the Organization, where available.'
},
appointmentRequired: {
type: GraphQLBoolean,
description: 'Indicates whether or not a prospective consumer will require an appointment for a particular service at a site to be provided by the Organization. Indicates if an appointment is required for access to this service.'
},
_appointmentRequired: {
type: require('./element.input'),
description: 'Indicates whether or not a prospective consumer will require an appointment for a particular service at a site to be provided by the Organization. Indicates if an appointment is required for access to this service.'
},
availableTime: {
type: new GraphQLList(require('./healthcareserviceavailabletime.input')),
description: 'A collection of times that the Service Site is available.'
},
notAvailable: {
type: new GraphQLList(require('./healthcareservicenotavailable.input')),
description: 'The HealthcareService is not available during this period of time due to the provided reason.'
},
availabilityExceptions: {
type: GraphQLString,
description: 'A description of site availability exceptions, e.g. public holiday availability. Succinctly describing all possible exceptions to normal site availability as details in the available Times and not available Times.'
},
_availabilityExceptions: {
type: require('./element.input'),
description: 'A description of site availability exceptions, e.g. public holiday availability. Succinctly describing all possible exceptions to normal site availability as details in the available Times and not available Times.'
}
})
});
|
mankeyl/elasticsearch
|
server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageStatsRequest.java
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.action.admin.indices.stats;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.broadcast.BroadcastRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.tasks.CancellableTask;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
public class FieldUsageStatsRequest extends BroadcastRequest<FieldUsageStatsRequest> {
private String[] fields = Strings.EMPTY_ARRAY;
public FieldUsageStatsRequest(String... indices) {
super(indices);
}
public FieldUsageStatsRequest(String[] indices, IndicesOptions indicesOptions) {
super(indices, indicesOptions);
}
public FieldUsageStatsRequest(StreamInput in) throws IOException {
super(in);
fields = in.readStringArray();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArray(fields);
}
public FieldUsageStatsRequest fields(String... fields) {
this.fields = fields;
return this;
}
public String[] fields() {
return this.fields;
}
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId, Map<String, String> headers) {
return new CancellableTask(id, FieldUsageStatsAction.NAME, type, "", parentTaskId, headers) {
@Override
public String getDescription() {
return FieldUsageStatsRequest.this.getDescription();
}
};
}
@Override
public String getDescription() {
return "get field usage for indices [" + String.join(",", indices) + "], fields " + Arrays.toString(fields);
}
}
|
DamianDominoDavis/kolmafia-1
|
src/net/sourceforge/kolmafia/swingui/BuffBotFrame.java
|
package net.sourceforge.kolmafia.swingui;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.GridLayout;
import javax.swing.BorderFactory;
import javax.swing.JComboBox;
import javax.swing.JList;
import javax.swing.JPanel;
import javax.swing.JTextArea;
import javax.swing.ListSelectionModel;
import javax.swing.SwingConstants;
import net.java.dev.spellcast.utilities.JComponentUtilities;
import net.java.dev.spellcast.utilities.LockableListModel;
import net.sourceforge.kolmafia.BuffBotHome;
import net.sourceforge.kolmafia.KoLConstants;
import net.sourceforge.kolmafia.KoLGUIConstants;
import net.sourceforge.kolmafia.persistence.SkillDatabase;
import net.sourceforge.kolmafia.preferences.Preferences;
import net.sourceforge.kolmafia.request.UseSkillRequest;
import net.sourceforge.kolmafia.session.BuffBotManager;
import net.sourceforge.kolmafia.session.BuffBotManager.Offering;
import net.sourceforge.kolmafia.swingui.panel.GenericPanel;
import net.sourceforge.kolmafia.swingui.panel.ScrollablePanel;
import net.sourceforge.kolmafia.swingui.widget.AutoHighlightTextField;
import net.sourceforge.kolmafia.swingui.widget.GenericScrollPane;
import net.sourceforge.kolmafia.utilities.StringUtilities;
public class BuffBotFrame extends GenericFrame {
private JList<Offering> buffListDisplay;
/**
* Constructs a new <code>BuffBotFrame</code> and inserts all of the necessary panels into a
* tabular layout for accessibility.
*/
public BuffBotFrame() {
super("BuffBot Manager");
// Initialize the display log buffer and the file log
this.tabs.addTab("Run Buffbot", new MainBuffPanel());
JPanel optionsContainer = new JPanel(new BorderLayout(10, 10));
optionsContainer.add(new BuffOptionsPanel(), BorderLayout.NORTH);
optionsContainer.add(new BuffListPanel(), BorderLayout.CENTER);
this.tabs.addTab("Edit Offerings", optionsContainer);
this.addTab("Change Settings", new MainSettingsPanel());
this.setCenterComponent(this.tabs);
}
@Override
public boolean useSidePane() {
return true;
}
/** Internal class used to handle everything related to operating the buffbot. */
private class MainBuffPanel extends ScrollablePanel {
public MainBuffPanel() {
super("BuffBot Activities", "start", "stop", new JList<>(BuffBotHome.getMessages()));
((JList<?>) this.scrollComponent).setCellRenderer(BuffBotHome.getMessageRenderer());
}
@Override
public void setEnabled(final boolean isEnabled) {
if (this.confirmedButton == null) {
return;
}
this.confirmedButton.setEnabled(isEnabled);
}
@Override
public void actionConfirmed() {
if (BuffBotHome.isBuffBotActive()) {
return;
}
// Need to make sure everything is up to date.
// This includes character status, inventory
// data and current settings.
BuffBotHome.setBuffBotActive(true);
BuffBotManager.runBuffBot(Integer.MAX_VALUE);
}
@Override
public void actionCancelled() {
BuffBotHome.setBuffBotActive(false);
}
}
/** Internal class used to handle everything related to BuffBot options management */
private class BuffOptionsPanel extends GenericPanel {
private final JComboBox<UseSkillRequest> skillSelect;
private final AutoHighlightTextField priceField, countField;
public BuffOptionsPanel() {
super("add", "remove", new Dimension(150, 20), new Dimension(300, 20));
LockableListModel<UseSkillRequest> buffSet = new LockableListModel<UseSkillRequest>();
for (UseSkillRequest skill : KoLConstants.usableSkills) {
if (SkillDatabase.isBuff(SkillDatabase.getSkillId(skill.getSkillName()))) {
buffSet.add(skill);
}
}
this.skillSelect = new JComboBox<>(buffSet);
this.priceField = new AutoHighlightTextField();
this.countField = new AutoHighlightTextField();
VerifiableElement[] elements = new VerifiableElement[3];
elements[0] = new VerifiableElement("Buff to cast: ", this.skillSelect);
elements[1] = new VerifiableElement("Price (in meat): ", this.priceField);
elements[2] = new VerifiableElement("# of casts: ", this.countField);
this.setContent(elements);
}
@Override
public void actionConfirmed() {
BuffBotManager.addBuff(
((UseSkillRequest) this.skillSelect.getSelectedItem()).getSkillName(),
StringUtilities.parseInt(this.priceField.getText()),
StringUtilities.parseInt(this.countField.getText()));
}
@Override
public void actionCancelled() {
BuffBotManager.removeBuffs(BuffBotFrame.this.buffListDisplay.getSelectedValuesList());
}
}
private class BuffListPanel extends JPanel {
public BuffListPanel() {
this.setLayout(new BorderLayout());
this.setBorder(BorderFactory.createLineBorder(Color.black, 1));
this.add(
JComponentUtilities.createLabel(
"Active Buffing List", SwingConstants.CENTER, Color.black, Color.white),
BorderLayout.NORTH);
BuffBotFrame.this.buffListDisplay = new JList<>(BuffBotManager.getBuffCostTable());
BuffBotFrame.this.buffListDisplay.setSelectionMode(
ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
BuffBotFrame.this.buffListDisplay.setVisibleRowCount(5);
this.add(new GenericScrollPane(BuffBotFrame.this.buffListDisplay), BorderLayout.CENTER);
}
}
/** Internal class used to handle everything related to BuffBot White List management */
private class MainSettingsPanel extends GenericPanel {
private final JTextArea invalidPriceMessage, thanksMessage;
private final JComboBox<String> philanthropyModeSelect;
private final JComboBox<String> messageDisposalSelect;
public MainSettingsPanel() {
super("save", "reset", new Dimension(120, 20), new Dimension(200, 20), false);
LockableListModel<String> philanthropyModeChoices = new LockableListModel<String>();
philanthropyModeChoices.add("Disabled");
philanthropyModeChoices.add("Once per day");
philanthropyModeChoices.add("Clan only");
this.philanthropyModeSelect = new JComboBox<>(philanthropyModeChoices);
LockableListModel<String> messageDisposalChoices = new LockableListModel<String>();
messageDisposalChoices.add("Auto-save non-requests");
messageDisposalChoices.add("Auto-delete non-requests");
messageDisposalChoices.add("Do nothing to non-requests");
this.messageDisposalSelect = new JComboBox<>(messageDisposalChoices);
VerifiableElement[] elements = new VerifiableElement[2];
elements[0] = new VerifiableElement("Philanthropy: ", this.philanthropyModeSelect);
elements[1] = new VerifiableElement("Message disposal: ", this.messageDisposalSelect);
this.invalidPriceMessage = new JTextArea();
this.thanksMessage = new JTextArea();
this.invalidPriceMessage.setFont(KoLGUIConstants.DEFAULT_FONT);
this.invalidPriceMessage.setLineWrap(true);
this.invalidPriceMessage.setWrapStyleWord(true);
this.thanksMessage.setFont(KoLGUIConstants.DEFAULT_FONT);
this.thanksMessage.setLineWrap(true);
this.thanksMessage.setWrapStyleWord(true);
this.actionCancelled();
this.setContent(elements);
}
@Override
public void setContent(final VerifiableElement[] elements) {
super.setContent(elements);
JPanel settingsMiddlePanel = new JPanel(new BorderLayout());
settingsMiddlePanel.add(
JComponentUtilities.createLabel(
"Invalid Buff Price Message", SwingConstants.CENTER, Color.black, Color.white),
BorderLayout.NORTH);
settingsMiddlePanel.add(this.invalidPriceMessage, BorderLayout.CENTER);
JPanel settingsBottomPanel = new JPanel(new BorderLayout());
settingsBottomPanel.add(
JComponentUtilities.createLabel(
"Donation Thanks Message", SwingConstants.CENTER, Color.black, Color.white),
BorderLayout.NORTH);
settingsBottomPanel.add(this.thanksMessage, BorderLayout.CENTER);
JPanel settingsPanel = new JPanel(new GridLayout(2, 1, 10, 10));
settingsPanel.add(settingsMiddlePanel);
settingsPanel.add(settingsBottomPanel);
this.add(settingsPanel, BorderLayout.CENTER);
}
@Override
public void actionConfirmed() {
Preferences.setInteger(
"buffBotPhilanthropyType", this.philanthropyModeSelect.getSelectedIndex());
Preferences.setInteger(
"buffBotMessageDisposal", this.messageDisposalSelect.getSelectedIndex());
Preferences.setString("invalidBuffMessage", this.invalidPriceMessage.getText());
Preferences.setString("thanksMessage", this.thanksMessage.getText());
}
@Override
public void actionCancelled() {
this.philanthropyModeSelect.setSelectedIndex(
Preferences.getInteger("buffBotPhilanthropyType"));
this.messageDisposalSelect.setSelectedIndex(Preferences.getInteger("buffBotMessageDisposal"));
this.invalidPriceMessage.setText(Preferences.getString("invalidBuffMessage"));
this.thanksMessage.setText(Preferences.getString("thanksMessage"));
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.