text
stringlengths 1
1.05M
|
|---|
// (C) 2019-2020 GoodData Corporation
import { IWidget, IWidgetDefinition, isWidget, isWidgetDefinition } from "./widget";
import isEmpty from "lodash/isEmpty";
import noop from "lodash/noop";
/**
* Dashboard layout
* @alpha
*/
export type Layout = IFluidLayout;
/**
* Layout widget
* @alpha
*/
export type Widget = ILayoutWidget;
/**
* Layout content - widget or another layout
* @alpha
*/
export type LayoutContent = Widget | Layout;
/**
* Layout reference to the widget
* @alpha
*/
export interface ILayoutWidget {
/**
* Widget object reference
*/
widget: IWidget;
}
/**
* Type-guard testing whether the provided object is an instance of {@link ILayoutWidget}.
* @alpha
*/
export function isLayoutWidget(obj: unknown): obj is ILayoutWidget {
return !isEmpty(obj) && isWidget((obj as any).widget);
}
/**
* Fluid layout definition
* @alpha
*/
export interface IFluidLayout {
fluidLayout: {
/**
* Layout rows
*/
rows: IFluidLayoutRow[];
/**
* Layout size
*/
size?: IFluidLayoutSize;
/**
* Layout style
*/
style?: string;
};
}
/**
* Type-guard testing whether the provided object is an instance of {@link IFluidLayout}.
* @alpha
*/
export function isFluidLayout(obj: unknown): obj is IFluidLayout {
return hasFluidLayoutProps(obj) && layoutWidgets(obj as any).every(isWidget);
}
/**
* Type-guard testing whether the provided object is an instance of {@link IFluidLayoutDefinition}.
* @alpha
*/
export function isFluidLayoutDefinition(obj: unknown): obj is IFluidLayoutDefinition {
return hasFluidLayoutProps(obj) && layoutWidgets(obj as any).some(isWidgetDefinition);
}
/**
* @internal
*/
function hasFluidLayoutProps(obj: unknown): boolean {
return !isEmpty(obj) && !!(obj as IFluidLayoutDefinition | IFluidLayout)?.fluidLayout?.rows;
}
/**
* Fluid layout row definition
* @alpha
*/
export interface IFluidLayoutRow {
/**
* Row columns
*/
columns: IFluidLayoutColumn[];
/**
* Row style
*/
style?: string;
/**
* Row header
*/
header?: SectionHeader;
}
/**
* Fluid layout column definition
* @alpha
*/
export interface IFluidLayoutColumn {
/**
* Column content - widget or another layout
*/
content?: LayoutContent;
/**
* Column size
*/
size: IFluidLayoutColSize;
/**
* Column style
*/
style?: string;
}
/**
* Fluid layout column size
* @alpha
*/
export interface IFluidLayoutColSize {
/**
* TODO: docs
*/
xl: IFluidLayoutSize;
/**
* TODO: docs
*/
xs?: IFluidLayoutSize;
/**
* TODO: docs
*/
sm?: IFluidLayoutSize;
/**
* TODO: docs
*/
md?: IFluidLayoutSize;
/**
* TODO: docs
*/
lg?: IFluidLayoutSize;
}
/**
* Fluid layout size
* @alpha
*/
export interface IFluidLayoutSize {
/**
* Width
*/
width: number;
/**
* Height, defined as ratio
*/
heightAsRatio?: number;
}
/**
* Layout section header
* @alpha
*/
export type SectionHeader = ISectionHeader | ISectionDescription;
/**
* Section header
* @alpha
*/
export interface ISectionHeader {
/**
* Section title
*/
title: string;
/**
* Section description
*/
description?: string;
}
/**
* Section header without title
* @alpha
*/
export interface ISectionDescription {
/**
* Section description
*/
description: string;
}
/**
* Dashboard layout definition
* @alpha
*/
export type LayoutDefinition = IFluidLayoutDefinition;
/**
* Layout widget definition
* @alpha
*/
export type LayoutWidgetDefinition = ILayoutWidgetDefinition;
/**
* Layout definition content - widget or another layout
* @alpha
*/
export type LayoutDefinitionContent = Widget | Layout | LayoutDefinition | LayoutWidgetDefinition;
/**
* Fluid layout definition
* @alpha
*/
export interface IFluidLayoutDefinition {
fluidLayout: {
/**
* Layout rows
*/
rows: IFluidLayoutRowDefinition[];
/**
* Layout size
*/
size?: IFluidLayoutSize;
/**
* Layout style
*/
style?: string;
};
}
/**
* Fluid layout row definition
* @alpha
*/
export interface IFluidLayoutRowDefinition {
/**
* Row columns
*/
columns: IFluidLayoutColumnDefinition[];
/**
* Row style
*/
style?: string;
/**
* Row header
*/
header?: SectionHeader;
}
/**
* Fluid layout column definition
* @alpha
*/
export interface IFluidLayoutColumnDefinition {
/**
* Column content - widget or another layout
*/
content?: LayoutDefinitionContent;
/**
* Column size
*/
size: IFluidLayoutColSize;
/**
* Column style
*/
style?: string;
}
/**
* Layout reference to the widget
* @alpha
*/
export interface ILayoutWidgetDefinition {
/**
* Widget object reference
*/
widget: IWidget | IWidgetDefinition;
}
/**
* Type-guard testing whether the provided object is an instance of {@link ILayoutWidgetDefinition}.
* @alpha
*/
export function isLayoutWidgetDefinition(obj: unknown): obj is ILayoutWidgetDefinition {
return !isEmpty(obj) && isWidgetDefinition((obj as any).widget);
}
/**
* Represents nested path in layout
* It's useful to track the layout location of the widget
* @alpha
*/
export type LayoutPath = Array<string | number>;
/**
* Walk dashboard layout
* This is useful to collect widgets from the layout or perform transforms on the layout
*
* @alpha
* @param layout - dashboard layout
* @param callbacks - walk callbacks
* @returns void
*/
export function walkLayout(
layout: Layout | LayoutDefinition,
{
rowCallback = noop,
columnCallback = noop,
widgetCallback = noop,
}: {
rowCallback?: (row: IFluidLayoutRow | IFluidLayoutRowDefinition, rowPath: LayoutPath) => void;
columnCallback?: (
column: IFluidLayoutColumn | IFluidLayoutColumnDefinition,
columnPath: LayoutPath,
) => void;
widgetCallback?: (widget: IWidget | IWidgetDefinition, widgetPath: LayoutPath) => void;
},
path: LayoutPath = ["fluidLayout", "rows"],
): void {
layout.fluidLayout.rows.forEach((row, rowIndex) => {
const rowPath = [...path, rowIndex];
rowCallback(row, rowPath);
row.columns.forEach((column, columnIndex) => {
const columnPath = [...rowPath, "columns", columnIndex];
columnCallback(column, columnPath);
if (isLayoutWidget(column.content) || isLayoutWidgetDefinition(column.content)) {
const widgetPath = [...columnPath, "content", "widget"];
widgetCallback(column.content.widget, widgetPath);
} else if (isFluidLayout(column.content) || isFluidLayoutDefinition(column.content)) {
// is another layout
walkLayout(
column.content,
{
rowCallback,
columnCallback,
widgetCallback,
},
[...columnPath, "content", "fluidLayout", "rows"],
);
}
});
});
}
/**
* Widget with it's layout path
* @alpha
*/
export interface IWidgetWithLayoutPath {
path: LayoutPath;
widget: IWidget;
}
/**
* Widget definition with it's layout path
* @alpha
*/
export interface IWidgetDefinitionWithLayoutPath {
path: LayoutPath;
widget: IWidgetDefinition;
}
/**
* Widget definition or widget with it's layout path
* @alpha
*/
export type IWidgetOrDefinitionWithLayoutPath = IWidgetWithLayoutPath | IWidgetDefinitionWithLayoutPath;
/**
* @alpha
*/
export function layoutWidgetsWithPaths(layout: Layout): IWidgetWithLayoutPath[];
/**
* @alpha
*/
export function layoutWidgetsWithPaths(layout: LayoutDefinition): IWidgetOrDefinitionWithLayoutPath[];
/**
* Get all dashboard widgets
* (layout does not only specify rendering, but also all used widgets)
*
* @alpha
* @param layout - dashboard layout
* @param collectedWidgets - bag for collecting widgets recursively from the layout
* @returns - widgets with layout paths
*/
export function layoutWidgetsWithPaths(
layout: Layout | LayoutDefinition,
): IWidgetOrDefinitionWithLayoutPath[] {
const collectedWidgets: IWidgetOrDefinitionWithLayoutPath[] = [];
walkLayout(layout, {
widgetCallback: (widget, path) =>
collectedWidgets.push({
widget,
path,
}),
});
return collectedWidgets;
}
/**
* @alpha
*/
export function layoutWidgets(layout: Layout): IWidget[];
/**
* @alpha
*/
export function layoutWidgets(layout: LayoutDefinition): Array<IWidgetDefinition | IWidget>;
/**
* Get all dashboard widgets
* (layout does not only specify rendering, but also all used widgets)
*
* @alpha
* @param layout - dashboard layout
* @returns - widgets
*/
export function layoutWidgets(layout: Layout | LayoutDefinition): Array<IWidgetDefinition | IWidget> {
const collectedWidgets: Array<IWidgetDefinition | IWidget> = [];
walkLayout(layout, {
widgetCallback: (widget) => collectedWidgets.push(widget),
});
return collectedWidgets;
}
|
/* Copyright (c) 2008-2015, <NAME>
Permission to use, copy, modify, and/or distribute this software
for any purpose with or without fee is hereby granted, provided
that the above copyright notice and this permission notice appear
in all copies.
There is NO WARRANTY for this software. See license.txt for
details. */
package java.net;
import java.io.IOException;
public class InetAddress {
private final String name;
private final int ip;
private InetAddress(String name) throws UnknownHostException {
this.name = name;
this.ip = ipv4AddressForName(name);
}
public String getHostName() {
return name;
}
public String getHostAddress() {
try {
return new InetAddress(name).toString();
} catch (UnknownHostException e) {
return null; // Strange case
}
}
public static InetAddress getByName(String name) throws UnknownHostException {
try {
Socket.init();
return new InetAddress(name);
} catch (IOException e) {
UnknownHostException uhe = new UnknownHostException(name);
uhe.initCause(e);
throw uhe;
}
}
public byte[] getAddress() {
byte[] res = new byte[4];
res[0] = (byte) ( ip >>> 24);
res[1] = (byte) ((ip >>> 16) & 0xFF);
res[2] = (byte) ((ip >>> 8 ) & 0xFF);
res[3] = (byte) ((ip ) & 0xFF);
return res;
}
@Override
public String toString() {
byte[] addr = getAddress();
return (int)((addr[0] + 256) % 256) + "." +
(int)((addr[1] + 256) % 256) + "." +
(int)((addr[2] + 256) % 256) + "." +
(int)((addr[3] + 256) % 256);
}
public int getRawAddress() {
return ip;
}
static native int ipv4AddressForName(String name) throws UnknownHostException;
public boolean equals(Object o) {
return o instanceof InetAddress && ((InetAddress) o).ip == ip;
}
public int hashCode() {
return ip;
}
}
|
import java.net.Socket;
import java.io.ObjectOutputStream;
import java.io.ObjectInputStream;
public class LoggedInPeer {
private int tokenId;
private String ipAddress;
private int port;
private Socket socket;
private ObjectOutputStream output;
private ObjectInputStream input;
private SavedPeer user;
public LoggedInPeer(int tokenId, int port, Socket socket, ObjectOutputStream output,
ObjectInputStream input, SavedPeer user) {
this.tokenId = tokenId;
this.ipAddress = socket.getInetAddress().getHostAddress();
this.port = port;
this.socket = socket;
this.output = output;
this.input = input;
this.user = user;
}
public int getTokenId() {
return tokenId;
}
public Socket getSocket() {
return socket;
}
public ObjectOutputStream getOutputStream() {
return output;
}
public ObjectInputStream getInputStream() {
return input;
}
public String getIpAddress() {
return ipAddress;
}
public int getPort() {
return port;
}
public SavedPeer getUser() {
return user;
}
}
|
package com.momo.service.service.aclmanager.impl;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.google.common.collect.Lists;
import com.momo.common.util.DateUtils;
import com.momo.common.error.BizException;
import com.momo.common.util.Encrypt;
import com.momo.common.util.StrUtil;
import com.momo.common.util.snowFlake.SnowFlake;
import com.momo.mapper.dataobject.RoleDO;
import com.momo.mapper.dataobject.UserAccountPwdDO;
import com.momo.mapper.dataobject.UserDO;
import com.momo.mapper.dataobject.manual.SysUserListDO;
import com.momo.mapper.mapper.manual.RoleMapper;
import com.momo.mapper.mapper.manual.UserAccountPwdMapper;
import com.momo.mapper.mapper.manual.UserMapper;
import com.momo.mapper.req.aclmanager.SysUserAddReq;
import com.momo.mapper.req.aclmanager.SysUserListReq;
import com.momo.mapper.req.sysmain.RedisUser;
import com.momo.mapper.res.aclmanager.SysUserListRes;
import com.momo.service.service.BaseService;
import com.momo.service.service.SuperAdminsService;
import com.momo.service.service.aclmanager.SysUserService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
/**
* @program: momo-cloud-permission
* @description: 用户管理
* @author: <NAME>
* @create: 2019-08-02 17:20
**/
@Service
@Slf4j
public class SysUserServiceImpl extends BaseService implements SysUserService {
@Autowired
private UserMapper userMapper;
@Autowired
private UserAccountPwdMapper userAccountPwdMapper;
@Autowired
private RoleMapper roleMapper;
@Autowired
private SuperAdminsService superAdminsService;
private SnowFlake snowFlake = new SnowFlake(1, 1);
@Transactional
@Override
public String sysUserAdd(SysUserAddReq sysUserAddReq) {
UserAccountPwdDO exitsUserAccountPwdDO = userAccountPwdMapper.sysUserAccountLogin(sysUserAddReq.getSysUserLoginName());
if (exitsUserAccountPwdDO != null) {
throw BizException.fail("登录账号已存在");
}
RedisUser redisUser = this.redisUser();
UserDO userDO = new UserDO();
BeanUtils.copyProperties(sysUserAddReq, userDO);
Long id = snowFlake.nextId();
userDO.setId(id);
userDO.setSysUserEmail(sysUserAddReq.getSysUserLoginName());
userDO.setUuid(StrUtil.genUUID());
userDO.setTenantId(redisUser.getTenantId());
userDO.setCreateBy(redisUser.getSysUserName());
userDO.setUpdateBy(redisUser.getSysUserName());
userDO.setCreateTime(DateUtils.getDateTime());
userDO.setUpdateTime(DateUtils.getDateTime());
userMapper.insertSelective(userDO);
UserAccountPwdDO userAccountPwdDO = new UserAccountPwdDO();
BeanUtils.copyProperties(sysUserAddReq, userAccountPwdDO);
userAccountPwdDO.setId(snowFlake.nextId());
String salt = StrUtil.genUUID();
userAccountPwdDO.setSysUserAuthSalt(salt);
String pwd = Encrypt.SHA512AndSHA256(sysUserAddReq.getSysUserPwd(), salt);
userAccountPwdDO.setSysUserPwd(pwd);
userAccountPwdDO.setTenantId(redisUser.getTenantId());
userAccountPwdDO.setCreateBy(redisUser.getSysUserName());
userAccountPwdDO.setUpdateBy(redisUser.getSysUserName());
userAccountPwdDO.setUuid(StrUtil.genUUID());
userAccountPwdDO.setSysUserId(id);
userAccountPwdDO.setCreateTime(DateUtils.getDateTime());
userAccountPwdDO.setUpdateTime(DateUtils.getDateTime());
userAccountPwdMapper.insertSelective(userAccountPwdDO);
return "新增用户成功";
}
@Override
public UserDO sysUserDetail(SysUserAddReq sysUserAddReq) {
UserDO userDODetail = userMapper.uuid(sysUserAddReq.getUuid());
if (null == userDODetail) {
throw BizException.fail("待查询的用户不存在");
}
userDODetail.setId(null);
return userDODetail;
}
@Transactional
@Override
public String sysUserModify(SysUserAddReq sysUserAddReq) {
UserDO userDODetail = userMapper.uuid(sysUserAddReq.getUuid());
if (null == userDODetail) {
throw BizException.fail("待编辑的用户不存在");
}
RedisUser redisUser = this.redisUser();
UserDO userDO = new UserDO();
BeanUtils.copyProperties(sysUserAddReq, userDO);
userDO.setSysUserName(sysUserAddReq.getSysUserName());
userDO.setDisabledFlag(sysUserAddReq.getDisabledFlag());
userDO.setId(userDODetail.getId());
userDO.setUpdateBy(redisUser.getSysUserName());
userDO.setUpdateTime(DateUtils.getDateTime());
//超级管理员 编辑所有
if (superAdminsService.checkIsSuperAdmin(redisUser.getSysUserPhone())) {
userMapper.updateByPrimaryKeySelective(userDO);
return "编辑用户信息成功";
} else {
if (userDODetail.getId().equals(redisUser.getBaseId()) && !userDODetail.getDisabledFlag().equals(sysUserAddReq.getDisabledFlag())) {
throw BizException.fail("您无法更改自己的用户状态");
}
//普通管理员 按需来
if (superAdminsService.checkIsSuperAdmin(redisUser.getSysUserPhone())) {
throw BizException.fail("超级管理员信息不允许编辑");
}
//是否被禁用 0否 1禁用
List<RoleDO> roleDOS = roleMapper.getRolesByUserId(userDODetail.getId(), 0);
//角色的类型,0:管理员(老板),1:管理员(员工) 2其他
Set<Integer> roleTypes = roleDOS.stream().map(roleDO -> roleDO.getSysRoleType()).collect(Collectors.toSet());
if (roleTypes.contains(0) && !userDODetail.getId().equals(redisUser.getBaseId())) {
throw BizException.fail("管理员信息不允许编辑");
}
userMapper.updateByPrimaryKeySelective(userDO);
return "编辑用户信息成功";
}
}
@Transactional
@Override
public String sysUserStatus(SysUserAddReq sysUserAddReq) {
UserDO userDODetail = userMapper.uuid(sysUserAddReq.getUuid());
if (null == userDODetail) {
throw BizException.fail("待编辑的用户信息不存在");
}
RedisUser redisUser = this.redisUser();
UserDO userDO = new UserDO();
userDO.setDisabledFlag(sysUserAddReq.getDisabledFlag());
userDO.setId(userDODetail.getId());
userDO.setUpdateBy(redisUser.getSysUserName());
userDO.setUpdateTime(DateUtils.getDateTime());
//超级管理员 编辑所有
if (superAdminsService.checkIsSuperAdmin(redisUser.getSysUserPhone())) {
userMapper.updateByPrimaryKeySelective(userDO);
return "用户状态设置成功";
} else {
RedisUser redisUserTwo=new RedisUser();
redisUserTwo.setSysUserPhone(userDODetail.getSysUserPhone());
//普通管理员 按需来
if (superAdminsService.checkIsSuperAdmin(redisUserTwo.getSysUserPhone())) {
throw BizException.fail("超级管理员状态不允许编辑");
}
//是否被禁用 0否 1禁用
List<RoleDO> roleDOS = roleMapper.getRolesByUserId(userDODetail.getId(), 0);
//角色的类型,0:管理员(老板),1:管理员(员工) 2其他
Set<Integer> roleTypes = roleDOS.stream().map(roleDO -> roleDO.getSysRoleType()).collect(Collectors.toSet());
if (roleTypes.contains(0) && !userDODetail.getId().equals(redisUser.getBaseId())) {
throw BizException.fail("管理员状态不允许编辑");
}
userMapper.updateByPrimaryKeySelective(userDO);
return "用户状态设置成功";
}
}
@Transactional
@Override
public String sysUserPwd(SysUserAddReq sysUserAddReq) {
UserDO userDODetail = userMapper.uuid(sysUserAddReq.getUuid());
if (null == userDODetail) {
throw BizException.fail("待编辑的用户不存在");
}
UserAccountPwdDO sysUserAccountByUserId = userAccountPwdMapper.sysUserAccountByUserId(userDODetail.getId());
UserAccountPwdDO userAccountPwdDO = new UserAccountPwdDO();
userAccountPwdDO.setSysUserPwd(sysUserAddReq.getSysUserPwd());
String salt = StrUtil.genUUID();
userAccountPwdDO.setSysUserAuthSalt(salt);
String pwd = Encrypt.SHA512AndSHA256(sysUserAddReq.getSysUserPwd(), salt);
userAccountPwdDO.setSysUserPwd(pwd);
userAccountPwdDO.setId(sysUserAccountByUserId.getId());
RedisUser redisUser = this.redisUser();
//超级管理员 编辑所有
if (superAdminsService.checkIsSuperAdmin(redisUser.getSysUserPhone())) {
userAccountPwdMapper.updateByPrimaryKeySelective(userAccountPwdDO);
return "修改密码成功";
} else {
RedisUser redisUserTwo=new RedisUser();
redisUserTwo.setSysUserPhone(userDODetail.getSysUserPhone());
//普通管理员 按需来
if (superAdminsService.checkIsSuperAdmin(userDODetail.getSysUserPhone())) {
throw BizException.fail("超级管理员密码不允许编辑");
}
//是否被禁用 0否 1禁用
List<RoleDO> roleDOS = roleMapper.getRolesByUserId(userDODetail.getId(), 0);
//角色的类型,0:管理员(老板),1:管理员(员工) 2其他
Set<Integer> roleTypes = roleDOS.stream().map(roleDO -> roleDO.getSysRoleType()).collect(Collectors.toSet());
if (roleTypes.contains(0) && !userDODetail.getId().equals(redisUser.getBaseId())) {
throw BizException.fail("管理员密码不允许编辑");
}
userAccountPwdMapper.updateByPrimaryKeySelective(userAccountPwdDO);
return "修改密码成功";
}
}
@Override
public PageInfo<SysUserListRes> sysUserList(SysUserListReq sysUserListReq) {
RedisUser redisUser = this.redisUser();
PageHelper.startPage(sysUserListReq.getPageNum(), sysUserListReq.getPageSize(), "id desc");
List<SysUserListDO> pageSysUserList = userMapper.pageSysUserList(redisUser.getTenantId(), sysUserListReq.getSysUserName(), sysUserListReq.getDisabledFlag());
PageInfo<SysUserListDO> pageInfo = new PageInfo<>(pageSysUserList);
List<SysUserListRes> resList = Lists.newArrayList();
List<SysUserListDO> doList = pageInfo.getList();
PageInfo<SysUserListRes> pageInfoRes = new PageInfo<>();
pageInfoRes.setPageNum(pageInfo.getPageNum());
pageInfoRes.setPageSize(pageInfo.getPageSize());
pageInfoRes.setTotal(pageInfo.getTotal());
if (CollectionUtils.isNotEmpty(doList)) {
doList.forEach(sysUserListDO -> {
SysUserListRes sysUserListRes = new SysUserListRes();
BeanUtils.copyProperties(sysUserListDO, sysUserListRes);
//管理员按钮是否显示
List<RoleDO> roles = sysUserListDO.getRoles();
Set<Integer> rolesSet = roles.stream().map(roleDO -> roleDO.getSysRoleType()).collect(Collectors.toSet());
//角色的类型,0:管理员(老板),1:管理员(员工) 2其他
if (rolesSet.contains(0)) {
sysUserListRes.setEditButtonShow(false);
sysUserListRes.setPwdButtonShow(false);
sysUserListRes.setDisabledFlagButtonShow(false);
sysUserListRes.setRoleButtonShow(false);
}
if (rolesSet.contains(1)) {
sysUserListRes.setEditButtonShow(false);
sysUserListRes.setPwdButtonShow(false);
sysUserListRes.setDisabledFlagButtonShow(false);
sysUserListRes.setRoleButtonShow(false);
}
//用户是自己登陆,则显示自己
if (sysUserListDO.getId().equals(redisUser.getBaseId())) {
sysUserListRes.setEditButtonShow(true);
sysUserListRes.setPwdButtonShow(true);
sysUserListRes.setDisabledFlagButtonShow(true);
sysUserListRes.setRoleButtonShow(true);
}
//超级管理员,则显示全部
if (superAdminsService.checkIsSuperAdmin(redisUser.getSysUserPhone())) {
sysUserListRes.setEditButtonShow(true);
sysUserListRes.setPwdButtonShow(true);
sysUserListRes.setDisabledFlagButtonShow(true);
sysUserListRes.setRoleButtonShow(true);
}
UserAccountPwdDO userAccountPwdDO = sysUserListDO.getUserAccountPwdDO();
//密码绑定
if (null != userAccountPwdDO) {
sysUserListRes.setPwdBinding(true);
sysUserListRes.setPwdBindingName(userAccountPwdDO.getSysUserLoginName());
sysUserListRes.setPwdBindingFlag(userAccountPwdDO.getDisabledFlag());
sysUserListRes.setPwdBindingDate(userAccountPwdDO.getCreateTime());
}
//用户是自己登陆,则显示自己
if (sysUserListDO.getId().equals(redisUser.getBaseId())) {
//屏蔽自己状态 按钮
sysUserListRes.setDisabledFlagButtonShow(false);
}
resList.add(sysUserListRes);
});
pageInfoRes.setList(resList);
return pageInfoRes;
}
return pageInfoRes;
}
}
|
/**
* Database Migration 1.0.0
*
* This script migrates the suffixed journals and snapshot stores of things and policies before 1.0.0 to
* non-suffixed journals and snapshot stores post 1.0.0.
*
* === Prerequisite ===
*
* - Backup your database.
*
* - Enable server-side-scripting for your MongoDB:
* This script uses server-side scripting to avoid transferring near the entire database across a network interface.
*
* === Usage: Multi-database setup ===
*
* 1. Connect to the MongoDB by Mongo Shell.
* 2. Paste this script into Mongo Shell.
* 3. Type the following into Mongo Shell:
*
* use things # replace 'things' by the name of your things-service database
* migrateThings();
* use policies # replace 'policies' by the name of your policies-service database
* migratePolicies();
*
* === Usage: Single-database setup ===
*
* 1. Connect to t he MongoDB by Mongo Shell.
* 2. Use the single database of your Ditto installation.
* 3. Type the following into Mongo Shell:
*
* migrate();
*
* === Revert ===
*
* If there is an exception before suffixed collections are dropped (i. e., before the line
* "Dropping suffixed collections" show up in the log), migration can be reverted by calling the function
*
* revert()
*
* in things- or policies-database, or in the single database of a single-database setup.
*
* === Index creation ===
*
* Indexes are created by the persistence plugin of Ditto after creating a thing and a policy and after writing a thing-
* and a policy-snapshot. Expect unresponsive persistence or sporadic circuit-breaker errors for some time after service
* startup. To trigger journal and snapshot writes, create a new V2 thing without specifying a policy: journal writes
* happen immediately and snapshot writes happen 15 minutes later under the default configuration.
*/
const THINGS_JOURNAL = 'things_journal';
const THINGS_SNAPS = 'things_snaps';
const POLICIES_JOURNAL = 'policies_journal';
const POLICIES_SNAPS = 'policies_snaps';
function reduceStep(key, values) {
return values;
}
function finalizeStep(key, values) {
let value = values;
if (Array.isArray(values)) {
if (values.length !== 1) {
throw JSON.stringify(values);
}
value = values[0];
}
return value;
}
function checkOk(result) {
printjson(result);
if (result.ok !== 1) {
throw JSON.stringify(result);
}
}
/**
* Copy all documents of source collection into target collection by map-reduce.
* Due to the fixed output schema of map-reduce, the original document is under the field 'value'.
* The target collection retains its previous documents.
* Duplicate IDs abort the operation with an error.
*
* @param sourceCollection Name of the source collection.
* @param targetCollection Name of the target collection.
*/
function copyDocuments(sourceCollection, targetCollection) {
const sourceJournal = db.getCollection(sourceCollection);
const targetExists = db.getCollection(targetCollection).count() !== 0;
const out = targetExists ? { reduce: targetCollection } : targetCollection;
print(`Copy ${sourceJournal.count()} documents from ${sourceCollection} to ${targetCollection} ...`);
checkOk(db.runCommand({
mapReduce: sourceCollection,
map: function() { emit(this._id, this); },
reduce: reduceStep,
finalize: finalizeStep,
out: out
}));
}
function checkEmpty(collectionName) {
const collection = db.getCollection(collectionName);
if (collection.count() !== 0) {
throw "Target collection " + collection + " is not empty!";
}
return collection;
}
/**
* Prefix of collections to delete.
*
* @type {string}
*/
const TO_DELETE = 'z_delete_';
function renameToDelete(collection) {
const c = db.getCollection(collection);
checkOk(c.renameCollection(TO_DELETE + collection));
}
/**
* Convert a journal collection from map-reduce format to event journal format.
*
* @param collection The journal collection name.
*/
function unmapJournal(collection) {
db[collection].aggregate([
{
$project:{
_id: 1,
pid: '$value.pid',
from: '$value.from',
to: '$value.to',
events: '$value.events',
v: '$value.v',
_tg: '$value._tg'
}
},
{$out: collection}
]);
}
/**
* Convert a snapshot store from map-reduce format to snapshot store format.
*
* @param collection The snapshot store collection name.
*/
function unmapSnaps(collection) {
db[collection].aggregate([
{
$project:{
_id: 1,
pid: '$value.pid',
sn: '$value.sn',
ts: '$value.ts',
s2: '$value.s2'
}
},
{$out: collection}
]);
}
/**
* Migrate journal and snapshot store for things or policies
*
* @param targetJournalName Name of the target journal collection---must be empty.
* @param targetSnapsName Name of the target snapshot store collection---must be empty.
*/
function migrateThingsOrPolicies(targetJournalName, targetSnapsName) {
const targetJournal = checkEmpty(targetJournalName);
const targetSnaps = checkEmpty(targetSnapsName);
db.getCollectionNames()
.filter(name => name.includes(targetJournalName + '@'))
.forEach(collectionName => {
copyDocuments(collectionName, targetJournalName);
renameToDelete(collectionName);
});
db.getCollectionNames()
.filter(name => name.includes(targetSnapsName + '@'))
.forEach(collectionName => {
copyDocuments(collectionName, targetSnapsName);
renameToDelete(collectionName);
});
print(`Unmapping ${targetJournal.count()} events ...`);
unmapJournal(targetJournalName);
print(`Unmapping ${targetSnaps.count()} snapshots ...`);
unmapSnaps(targetSnapsName);
print('Done.');
}
function migratePolicies() {
migrateThingsOrPolicies(POLICIES_JOURNAL, POLICIES_SNAPS);
}
function migrateThings() {
migrateThingsOrPolicies(THINGS_JOURNAL, THINGS_SNAPS);
}
function dropAllToDelete() {
print("Dropping all suffixed collections...")
db.getCollectionNames()
.filter(name => name.includes(TO_DELETE))
.forEach(collectionName => db.getCollection(collectionName).drop());
print("Done.")
}
/**
* Migrate things and policies.
* If any error aborts the migration, run 'revert()' to restore to previous state.
*/
function migrate() {
migratePolicies();
migrateThings();
dropAllToDelete();
}
/**
* Revert the migration.
*/
function revert() {
db.getCollectionNames()
.filter(name => name.includes(TO_DELETE))
.forEach(collectionName => db.getCollection(collectionName)
.renameCollection(collectionName.substring(TO_DELETE.length, collectionName.length)));
db.getCollection(THINGS_JOURNAL).drop();
db.getCollection(THINGS_SNAPS).drop();
db.getCollection(POLICIES_JOURNAL).drop();
db.getCollection(POLICIES_SNAPS).drop();
}
// Choose one of 'migrate()' or 'revert()'.
// migrate();
// revert();
// Expect high database resource consumption on service startup due to index creation.
|
<reponame>Ziezi/Programming-Principles-and-Practice-Using-C-by-Bjarne-Stroustrup-<gh_stars>1-10
/*
TITLE Containers comparison Chapter20TryThis8.cpp
"<NAME>roustrup "C++ Programming: Principles and Practice.""
COMMENT
Objective: Redo the previous Try This using containers of
type int, initialized to { 1, 2, 3, 4, 5 }
Input: -
Output: -
Author: <NAME>
Date: 17. 02. 2017
*/
#include <iostream>
#include <string>
#include <vector>
#include <list>
#include <iterator>
//-----------------------------------------------------------------
template <class Iterator>
size_t size(Iterator beg, Iterator end)
{
size_t s = 0;
while (beg != end)
{
++s;
++beg;
}
return s;
}
//-----------------------------------------------------------------
template <class Iterator>
bool compare(Iterator beg_a, Iterator end_a)
{
int c[ ] = { 1, 2, 3, 4, 5 };
int* beg_b = c;
int* end_b = (c + 5);
while (beg_a != end_a || beg_b != end_b)
{
if (*beg_a != *beg_b)
{
return false;
}
++beg_a;
++beg_b;
}
return true;
}
//-----------------------------------------------------------------
template <class Iterator>
void copy (Iterator src_b, Iterator src_e, Iterator dest_b)
{
while (src_b != src_e)
{
*dest_b = * src_b;
++src_b;
++dest_b;
}
}
//-----------------------------------------------------------------
template <class Iterator>
void print (Iterator beg, Iterator end)
{
while (beg != end)
{
std::cout << *beg;
++beg;
}
std::cout <<'\n';
}
//-----------------------------------------------------------------
int main()
{
try
{
int c[ ] = { 1, 2, 3, 4, 5 };
std::vector<int> v(c, (c + 5));
std::list<int> l(v.begin(), v.end());
// test size
std::cout <<"Size: \n";
std::cout << size(c, (c + 5)) <<'\n';
std::cout << size(v.begin(), v.end()) <<'\n';
std::cout << size(l.begin(), l.end()) <<'\n';
// test compare
std::cout <<"\nComparison: \n";
if ( compare(c, (c + 5)) &&
compare(v.begin(), v.end()) &&
compare(l.begin(), l.end()))
{
std::cout <<"same\n";
}
else
{
std::cout <<"different\n";
}
// test copy
std::cout <<"\nCopy: \n";
int c_copy[5];
copy(c, (c + 5), c_copy);
print(c_copy, c_copy + 5);
std::vector<int> v_copy(5);
copy(v.begin(), v.end(), v_copy.begin());
print(v_copy.begin(), v_copy.end());
std::list<int> l_copy(5);
copy(l.begin(), l.end(), l_copy.begin());
print(l_copy.begin(), l_copy.end());
}
catch (std::exception& e)
{
std::cerr << e.what();
}
getchar ();
}
|
Random rand = new Random();
const string chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789~!@#$%^&*()-_=+[]{};:,./<>?`"
public string GeneratePassword()
{
StringBuilder builder = new StringBuilder();
for (int i = 0; i < 8; i++)
{
int index = rand.Next(0, chars.Length);
builder.Append(chars[index]);
}
return builder.ToString();
}
|
<reponame>slaufer/Prebid.js
import {
reconciliationSubmodule,
track,
stringify,
getTopIFrameWin,
getSlotByWin
} from 'modules/reconciliationRtdProvider.js';
import { makeSlot } from '../integration/faker/googletag.js';
import * as utils from 'src/utils.js';
describe('Reconciliation Real time data submodule', function () {
const conf = {
dataProviders: [{
'name': 'reconciliation',
'params': {
'publisherMemberId': 'test_prebid_publisher'
},
}]
};
let trackPostStub, trackGetStub;
beforeEach(function () {
trackPostStub = sinon.stub(track, 'trackPost');
trackGetStub = sinon.stub(track, 'trackGet');
});
afterEach(function () {
trackPostStub.restore();
trackGetStub.restore();
});
describe('reconciliationSubmodule', function () {
describe('initialization', function () {
let utilsLogErrorSpy;
before(function () {
utilsLogErrorSpy = sinon.spy(utils, 'logError');
});
after(function () {
utils.logError.restore();
});
it('successfully instantiates', function () {
expect(reconciliationSubmodule.init(conf.dataProviders[0])).to.equal(true);
});
it('should log error if initializied without parameters', function () {
expect(reconciliationSubmodule.init({'name': 'reconciliation', 'params': {}})).to.equal(true);
expect(utilsLogErrorSpy.calledOnce).to.be.true;
});
});
describe('getData', function () {
it('should return data in proper format', function () {
makeSlot({code: '/reconciliationAdunit1', divId: 'reconciliationAd1'});
const targetingData = reconciliationSubmodule.getTargetingData(['/reconciliationAdunit1']);
expect(targetingData['/reconciliationAdunit1'].RSDK_AUID).to.eql('/reconciliationAdunit1');
expect(targetingData['/reconciliationAdunit1'].RSDK_ADID).to.be.a('string');
});
it('should return unit path if called with divId', function () {
makeSlot({code: '/reconciliationAdunit2', divId: 'reconciliationAd2'});
const targetingData = reconciliationSubmodule.getTargetingData(['reconciliationAd2']);
expect(targetingData['reconciliationAd2'].RSDK_AUID).to.eql('/reconciliationAdunit2');
expect(targetingData['reconciliationAd2'].RSDK_ADID).to.be.a('string');
});
it('should skip empty adUnit id', function () {
makeSlot({code: '/reconciliationAdunit3', divId: 'reconciliationAd3'});
const targetingData = reconciliationSubmodule.getTargetingData(['reconciliationAd3', '']);
expect(targetingData).to.have.all.keys('reconciliationAd3');
});
});
describe('track events', function () {
it('should track init event with data', function () {
const adUnit = {
code: '/adunit'
};
reconciliationSubmodule.getTargetingData([adUnit.code]);
expect(trackPostStub.calledOnce).to.be.true;
expect(trackPostStub.getCalls()[0].args[0]).to.eql('https://confirm.fiduciadlt.com/init');
expect(trackPostStub.getCalls()[0].args[1].adUnits[0].adUnitId).to.eql(adUnit.code);
expect(trackPostStub.getCalls()[0].args[1].adUnits[0].adDeliveryId).be.a('string');
expect(trackPostStub.getCalls()[0].args[1].publisherMemberId).to.eql('test_prebid_publisher');
});
});
describe('stringify parameters', function () {
it('should return query for flat object', function () {
const parameters = {
adUnitId: '/adunit',
adDeliveryId: '12345'
};
expect(stringify(parameters)).to.eql('adUnitId=%2Fadunit&adDeliveryId=12345');
});
it('should return query with nested parameters', function () {
const parameters = {
adUnitId: '/adunit',
adDeliveryId: '12345',
ext: {
adSize: '300x250',
adType: 'banner'
}
};
expect(stringify(parameters)).to.eql('adUnitId=%2Fadunit&adDeliveryId=12345&ext=adSize%3D300x250%26adType%3Dbanner');
});
});
describe('get topmost iframe', function () {
/**
* - top
* -- iframe.window <-- top iframe window
* --- iframe.window
* ---- iframe.window <-- win
*/
const mockFrameWin = (topWin, parentWin) => {
return {
top: topWin,
parent: parentWin
}
}
it('should return null if called with null', function() {
expect(getTopIFrameWin(null)).to.be.null;
});
it('should return null if there is an error in frames chain', function() {
const topWin = {};
const iframe1Win = mockFrameWin(topWin, null); // break chain
const iframe2Win = mockFrameWin(topWin, iframe1Win);
expect(getTopIFrameWin(iframe1Win, topWin)).to.be.null;
});
it('should get the topmost iframe', function () {
const topWin = {};
const iframe1Win = mockFrameWin(topWin, topWin);
const iframe2Win = mockFrameWin(topWin, iframe1Win);
expect(getTopIFrameWin(iframe2Win, topWin)).to.eql(iframe1Win);
});
});
describe('get slot by nested iframe window', function () {
it('should return the slot', function () {
const adSlotElement = document.createElement('div');
const adSlotIframe = document.createElement('iframe');
adSlotElement.id = 'reconciliationAd';
adSlotElement.appendChild(adSlotIframe);
document.body.appendChild(adSlotElement);
const adSlot = makeSlot({code: '/reconciliationAdunit', divId: adSlotElement.id});
expect(getSlotByWin(adSlotIframe.contentWindow)).to.eql(adSlot);
});
it('should return null if the slot is not found', function () {
const adSlotElement = document.createElement('div');
const adSlotIframe = document.createElement('iframe');
adSlotElement.id = 'reconciliationAd';
document.body.appendChild(adSlotElement);
document.body.appendChild(adSlotIframe); // iframe is not in ad slot
const adSlot = makeSlot({code: '/reconciliationAdunit', divId: adSlotElement.id});
expect(getSlotByWin(adSlotIframe.contentWindow)).to.be.null;
});
});
describe('handle postMessage from Reconciliation Tag in ad iframe', function () {
it('should track impression pixel with parameters', function (done) {
const adSlotElement = document.createElement('div');
const adSlotIframe = document.createElement('iframe');
adSlotElement.id = 'reconciliationAdMessage';
adSlotElement.appendChild(adSlotIframe);
document.body.appendChild(adSlotElement);
const adSlot = makeSlot({code: '/reconciliationAdunit', divId: adSlotElement.id});
// Fix targeting methods
adSlot.targeting = {};
adSlot.setTargeting = function(key, value) {
this.targeting[key] = [value];
};
adSlot.getTargeting = function(key) {
return this.targeting[key];
};
adSlot.setTargeting('RSDK_AUID', '/reconciliationAdunit');
adSlot.setTargeting('RSDK_ADID', '12345');
adSlotIframe.contentDocument.open();
adSlotIframe.contentDocument.write(`<script>
window.parent.postMessage(JSON.stringify({
type: 'rsdk:impression:req',
args: {
sourceMemberId: 'test_member_id',
sourceImpressionId: '123'
}
}), '*');
</script>`);
adSlotIframe.contentDocument.close();
setTimeout(() => {
expect(trackGetStub.calledOnce).to.be.true;
expect(trackGetStub.getCalls()[0].args[0]).to.eql('https://confirm.fiduciadlt.com/imp');
expect(trackGetStub.getCalls()[0].args[1].adUnitId).to.eql('/reconciliationAdunit');
expect(trackGetStub.getCalls()[0].args[1].adDeliveryId).to.eql('12345');
expect(trackGetStub.getCalls()[0].args[1].sourceMemberId).to.eql('test_member_id'); ;
expect(trackGetStub.getCalls()[0].args[1].sourceImpressionId).to.eql('123'); ;
expect(trackGetStub.getCalls()[0].args[1].publisherMemberId).to.eql('test_prebid_publisher');
done();
}, 100);
});
});
});
});
|
<gh_stars>0
// Enlarge on hover, return back to initial size when mouse leaves
var buttonEl1 = document.getElementById("CSStransforms1");
function animateButton1(scale, duration, elasticity) {
anime.remove(buttonEl1);
anime({
targets: buttonEl1,
scale: scale,
duration: duration,
elasticity: elasticity
});
}
function enterButton1() { animateButton1(1.8, 800, 400) };
function leaveButton1() { animateButton1(1.0, 600, 300) };
buttonEl1.addEventListener('mouseenter', enterButton1, false);
buttonEl1.addEventListener('mouseleave', leaveButton1, false);
// Enlarge on hover, return back to initial size when mouse leaves
var buttonEl2 = document.getElementById("CSStransforms2");
function animateButton2(scale, duration, elasticity) {
anime.remove(buttonEl2);
anime({
targets: buttonEl2,
scale: scale,
duration: duration,
elasticity: elasticity
});
}
function enterButton2() { animateButton2(1.8, 800, 400) };
function leaveButton2() { animateButton2(1.0, 600, 300) };
buttonEl2.addEventListener('mouseenter', enterButton2, false);
buttonEl2.addEventListener('mouseleave', leaveButton2, false);
// Enlarge on hover, return back to initial size when mouse leaves
var buttonEl3 = document.getElementById("CSStransforms3");
function animateButton3(scale, duration, elasticity) {
anime.remove(buttonEl3);
anime({
targets: buttonEl3,
scale: scale,
duration: duration,
elasticity: elasticity
});
}
function enterButton3() { animateButton3(1.8, 800, 400) };
function leaveButton3() { animateButton3(1.0, 600, 300) };
buttonEl3.addEventListener('mouseenter', enterButton3, false);
buttonEl3.addEventListener('mouseleave', leaveButton3, false);
// Enlarge on hover, return back to initial size when mouse leaves
var buttonEl4 = document.getElementById("CSStransforms4");
function animateButton4(scale, duration, elasticity) {
anime.remove(buttonEl4);
anime({
targets: buttonEl4,
scale: scale,
duration: duration,
elasticity: elasticity
});
}
function enterButton4() { animateButton4(1.8, 800, 400) };
function leaveButton4() { animateButton4(1.0, 600, 300) };
buttonEl4.addEventListener('mouseenter', enterButton4, false);
buttonEl4.addEventListener('mouseleave', leaveButton4, false);
// Enlarge on hover, return back to initial size when mouse leaves
var buttonEl5 = document.getElementById("CSStransforms5");
function animateButton5(scale, duration, elasticity) {
anime.remove(buttonEl5);
anime({
targets: buttonEl5,
scale: scale,
duration: duration,
elasticity: elasticity
});
}
function enterButton5() { animateButton5(1.8, 800, 400) };
function leaveButton5() { animateButton5(1.0, 600, 300) };
buttonEl5.addEventListener('mouseenter', enterButton5, false);
buttonEl5.addEventListener('mouseleave', leaveButton5, false);
|
#!/usr/bin/env bash
#Description: Kyma with central connector-service plan on GKE. This scripts implements a pipeline that consists of many steps. The purpose is to install and test Kyma on real GKE cluster with central connector-service.
#
#
#Expected vars:
#
# - REPO_OWNER - Set up by prow, repository owner/organization
# - REPO_NAME - Set up by prow, repository name
# - BUILD_TYPE - Set up by prow, pr/master/release
# - DOCKER_PUSH_REPOSITORY - Docker repository hostname
# - DOCKER_PUSH_DIRECTORY - Docker "top-level" directory (with leading "/")
# - KYMA_PROJECT_DIR - directory path with Kyma sources to use for installation
# - CLOUDSDK_CORE_PROJECT - GCP project for all GCP resources used during execution (Service Account, IP Address, DNS Zone, image registry etc.)
# - CLOUDSDK_COMPUTE_REGION - GCP compute region
# - CLOUDSDK_DNS_ZONE_NAME - GCP zone name (not its DNS name!)
# - GOOGLE_APPLICATION_CREDENTIALS - GCP Service Account key file path
# - GKE_CLUSTER_VERSION - GKE cluster version
# - KYMA_ARTIFACTS_BUCKET - GCP bucket
# - MACHINE_TYPE - (optional) GKE machine type
#
#Permissions: In order to run this script you need to use a service account with permissions equivalent to the following GCP roles:
# - Compute Admin
# - Kubernetes Engine Admin
# - Kubernetes Engine Cluster Admin
# - DNS Administrator
# - Service Account User
# - Storage Admin
# - Compute Network Admin
set -o errexit
ENABLE_TEST_LOG_COLLECTOR=false
#Exported variables
export TEST_INFRA_SOURCES_DIR="${KYMA_PROJECT_DIR}/test-infra"
export KYMA_SOURCES_DIR="${KYMA_PROJECT_DIR}/kyma"
export TEST_INFRA_CLUSTER_INTEGRATION_SCRIPTS="${TEST_INFRA_SOURCES_DIR}/prow/scripts/cluster-integration/helpers"
# shellcheck source=prow/scripts/lib/gcloud.sh
source "${TEST_INFRA_SOURCES_DIR}/prow/scripts/lib/gcloud.sh"
# shellcheck source=prow/scripts/lib/kyma.sh
source "${TEST_INFRA_SOURCES_DIR}/prow/scripts/lib/kyma.sh"
# shellcheck source=prow/scripts/lib/log.sh
source "${TEST_INFRA_SOURCES_DIR}/prow/scripts/lib/log.sh"
# shellcheck source=prow/scripts/lib/utils.sh
source "${TEST_INFRA_SOURCES_DIR}/prow/scripts/lib/utils.sh"
requiredVars=(
REPO_OWNER
REPO_NAME
DOCKER_PUSH_REPOSITORY
KYMA_PROJECT_DIR
CLOUDSDK_CORE_PROJECT
CLOUDSDK_COMPUTE_REGION
CLOUDSDK_DNS_ZONE_NAME
GOOGLE_APPLICATION_CREDENTIALS
KYMA_ARTIFACTS_BUCKET
GCR_PUSH_GOOGLE_APPLICATION_CREDENTIALS
)
utils::check_required_vars "${requiredVars[@]}"
# post_hook runs at the end of a script or on any error
function post_hook() {
#!!! Must be at the beginning of this function !!!
EXIT_STATUS=$?
log::info "Cleanup"
if [ "${ERROR_LOGGING_GUARD}" = "true" ]; then
log::info "AN ERROR OCCURED! Take a look at preceding log entries."
fi
#Turn off exit-on-error so that next step is executed even if previous one fails.
set +e
# collect logs from failed tests before deprovisioning
kyma::run_test_log_collector "post-master-kyma-gke-central-connector"
gcloud::cleanup
MSG=""
if [[ ${EXIT_STATUS} -ne 0 ]]; then MSG="(exit status: ${EXIT_STATUS})"; fi
log::info "Job is finished ${MSG}"
set -e
exit "${EXIT_STATUS}"
}
trap post_hook EXIT INT
if [[ "${BUILD_TYPE}" == "pr" ]]; then
log::info "Execute Job Guard"
"${TEST_INFRA_SOURCES_DIR}/development/jobguard/scripts/run.sh"
fi
# Enforce lowercase
readonly REPO_OWNER=$(echo "${REPO_OWNER}" | tr '[:upper:]' '[:lower:]')
export REPO_OWNER
readonly REPO_NAME=$(echo "${REPO_NAME}" | tr '[:upper:]' '[:lower:]')
export REPO_NAME
RANDOM_NAME_SUFFIX=$(LC_ALL=C tr -dc 'a-z0-9' < /dev/urandom | head -c10)
if [[ "$BUILD_TYPE" == "pr" ]]; then
# In case of PR, operate on PR number
readonly COMMON_NAME_PREFIX="gke-central-pr"
COMMON_NAME=$(echo "${COMMON_NAME_PREFIX}-${PULL_NUMBER}-${RANDOM_NAME_SUFFIX}" | tr "[:upper:]" "[:lower:]")
KYMA_SOURCE="PR-${PULL_NUMBER}"
elif [[ "$BUILD_TYPE" == "release" ]]; then
readonly COMMON_NAME_PREFIX="gke-central-rel"
readonly RELEASE_VERSION=$(cat "VERSION")
log::info "Reading release version from RELEASE_VERSION file, got: ${RELEASE_VERSION}"
KYMA_SOURCE="${RELEASE_VERSION}"
COMMON_NAME=$(echo "${COMMON_NAME_PREFIX}-${RANDOM_NAME_SUFFIX}" | tr "[:upper:]" "[:lower:]")
else
# Otherwise (master), operate on triggering commit id
readonly COMMON_NAME_PREFIX="gke-central-commit"
readonly COMMIT_ID="${PULL_BASE_SHA::8}"
COMMON_NAME=$(echo "${COMMON_NAME_PREFIX}-${COMMIT_ID}-${RANDOM_NAME_SUFFIX}" | tr "[:upper:]" "[:lower:]")
KYMA_SOURCE="${COMMIT_ID}"
fi
### Cluster name must be less than 40 characters!
export CLUSTER_NAME="${COMMON_NAME}"
export GCLOUD_NETWORK_NAME="${COMMON_NAME_PREFIX}-net"
export GCLOUD_SUBNET_NAME="${COMMON_NAME_PREFIX}-subnet"
### For gcloud::provision_gke_cluster
export GCLOUD_PROJECT_NAME="${CLOUDSDK_CORE_PROJECT}"
export GCLOUD_COMPUTE_ZONE="${CLOUDSDK_COMPUTE_ZONE}"
#Local variables
DNS_SUBDOMAIN="${COMMON_NAME}"
#Used to detect errors for logging purposes
ERROR_LOGGING_GUARD="true"
log::info "Authenticate"
gcloud::authenticate "${GOOGLE_APPLICATION_CREDENTIALS}"
kyma::install_cli
DNS_DOMAIN="$(gcloud dns managed-zones describe "${CLOUDSDK_DNS_ZONE_NAME}" --format="value(dnsName)")"
log::info "Reserve IP Address for Ingressgateway"
GATEWAY_IP_ADDRESS_NAME="${COMMON_NAME}"
GATEWAY_IP_ADDRESS=$(gcloud::reserve_ip_address "${GATEWAY_IP_ADDRESS_NAME}")
CLEANUP_GATEWAY_IP_ADDRESS="true"
log::info "Created IP Address for Ingressgateway: ${GATEWAY_IP_ADDRESS}"
log::info "Create DNS Record for Ingressgateway IP"
GATEWAY_DNS_FULL_NAME="*.${DNS_SUBDOMAIN}.${DNS_DOMAIN}"
CLEANUP_GATEWAY_DNS_RECORD="true"
gcloud::create_dns_record "${GATEWAY_IP_ADDRESS}" "${GATEWAY_DNS_FULL_NAME}"
log::info "Create ${GCLOUD_NETWORK_NAME} network with ${GCLOUD_SUBNET_NAME} subnet"
gcloud::create_network "${GCLOUD_NETWORK_NAME}" "${GCLOUD_SUBNET_NAME}"
log::info "Provision cluster: \"${CLUSTER_NAME}\""
export GCLOUD_SERVICE_KEY_PATH="${GOOGLE_APPLICATION_CREDENTIALS}"
if [ -z "$MACHINE_TYPE" ]; then
export MACHINE_TYPE="${DEFAULT_MACHINE_TYPE}"
fi
CLEANUP_CLUSTER="true"
gcloud::provision_gke_cluster "$CLUSTER_NAME"
log::info "Generate self-signed certificate"
DOMAIN="${DNS_SUBDOMAIN}.${DNS_DOMAIN%?}"
CERT_KEY=$(utils::generate_self_signed_cert "$DOMAIN")
TLS_CERT=$(echo "${CERT_KEY}" | head -1)
TLS_KEY=$(echo "${CERT_KEY}" | tail -1)
log::info "Create Kyma CLI overrides"
cat << EOF > "$PWD/kyma-installer-central-override.yaml"
apiVersion: v1
kind: ConfigMap
metadata:
name: "installation-config-overrides"
namespace: "kyma-installer"
labels:
installer: overrides
kyma-project.io/installation: ""
data:
global.domainName: "${DOMAIN}"
global.loadBalancerIP: "${GATEWAY_IP_ADDRESS}"
---
apiVersion: v1
kind: ConfigMap
metadata:
name: "core-test-ui-acceptance-overrides"
namespace: "kyma-installer"
labels:
installer: overrides
kyma-project.io/installation: ""
component: core
data:
test.acceptance.ui.logging.enabled: "true"
---
apiVersion: v1
kind: ConfigMap
metadata:
name: "istio-overrides"
namespace: "kyma-installer"
labels:
installer: overrides
kyma-project.io/installation: ""
component: istio
data:
kyma_istio_operator: |
apiVersion: install.istio.io/v1alpha1
kind: IstioOperator
metadata:
namespace: istio-system
spec:
components:
ingressGateways:
- name: istio-ingressgateway
k8s:
service:
loadBalancerIP: ${GATEWAY_IP_ADDRESS}
type: LoadBalancer
---
apiVersion: v1
kind: ConfigMap
metadata:
name: "connector-service-central-overrides"
namespace: "kyma-installer"
labels:
installer: overrides
kyma-project.io/installation: ""
data:
tests.application_connector_tests.connector_service.central: "true"
connector-service.deployment.args.central: "true"
connector-service.tests.central: "true"
connection-token-handler.tests.central: "true"
EOF
cat "$PWD/kyma-installer-central-override.yaml"
log::info "Installation triggered"
yes | kyma install \
--ci \
-s "${KYMA_SOURCE}" \
-o "$PWD/kyma-installer-central-override.yaml" \
--domain "${DOMAIN}" \
--tls-cert="${TLS_CERT}" \
--tls-key="${TLS_KEY}" \
--timeout 30m
if [ -n "$(kubectl get service -n kyma-system apiserver-proxy-ssl --ignore-not-found)" ]; then
log::info "Create DNS Record for Apiserver proxy IP"
APISERVER_IP_ADDRESS=$(kubectl get service -n kyma-system apiserver-proxy-ssl -o jsonpath='{.status.loadBalancer.ingress[0].ip}')
APISERVER_DNS_FULL_NAME="apiserver.${DNS_SUBDOMAIN}.${DNS_DOMAIN}"
CLEANUP_APISERVER_DNS_RECORD="true"
gcloud::create_dns_record "${APISERVER_IP_ADDRESS}" "${APISERVER_DNS_FULL_NAME}"
fi
# enable test-log-collector before tests; if prowjob fails before test phase we do not have any reason to enable it earlier
if [[ "${BUILD_TYPE}" == "master" && -n "${LOG_COLLECTOR_SLACK_TOKEN}" ]]; then
ENABLE_TEST_LOG_COLLECTOR=true
fi
log::info "Test Kyma"
"${TEST_INFRA_SOURCES_DIR}"/prow/scripts/kyma-testing.sh
log::success "Success"
#!!! Must be at the end of the script !!!
ERROR_LOGGING_GUARD="false"
|
<reponame>VEDAVIYAS/NYU-BusTracker-Android
package com.nyubustracker.helpers;
import android.content.Context;
import android.os.AsyncTask;
import android.util.Log;
import com.nyubustracker.BuildConfig;
import com.nyubustracker.R;
import com.nyubustracker.activities.MainActivity;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
public class Downloader extends AsyncTask<String, Void, JSONObject> {
public static final String CREATED_FILES_DIR = "NYUCachedFiles";
private static Context context;
private final DownloaderHelper helper;
public Downloader(DownloaderHelper helper, Context mContext) {
this.helper = helper;
context = mContext;
}
public static Context getContext() {
return context;
}
public static void cache(String fileName, JSONObject jsonObject) throws IOException {
if (jsonObject != null && !jsonObject.toString().isEmpty()) {
File path = new File(context.getFilesDir(), CREATED_FILES_DIR);
if (!path.mkdir() && BuildConfig.DEBUG) throw new RuntimeException("Failed to mkdir.");
File file = new File(path, fileName);
BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(file));
bufferedWriter.write(jsonObject.toString());
bufferedWriter.close();
}
}
public static String makeQuery(String param, String value, String charset) {
try {
return param + "=" + URLEncoder.encode(value, charset);
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
return "";
}
@Override
public JSONObject doInBackground(String... urls) {
try {
if (BuildConfig.DEBUG) Log.v(MainActivity.REFACTOR_LOG_TAG, "First url: " + urls[0]);
return new JSONObject(downloadUrl(urls[0]));
} catch (IOException | JSONException e) {
//Log.e("JSON", "DownloadURL IO error.");
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(JSONObject result) {
try {
helper.parse(result);
if (BuildConfig.DEBUG)
Log.v(MainActivity.REFACTOR_LOG_TAG, "helper class: " + helper.getClass() + " (" + MainActivity.downloadsOnTheWire + ")");
if (!helper.getClass().toString().contains("BusDownloaderHelper"))
MainActivity.pieceDownloadsTogether(context);
} catch (JSONException e) {
Log.d(MainActivity.REFACTOR_LOG_TAG, "JSON Exception while parsing in onPostExecute.");
e.printStackTrace();
} catch (IOException e) {
Log.d(MainActivity.REFACTOR_LOG_TAG, "IO Exception while parsing in onPostExecute.");
}
}
// Given a URL, establishes an HttpUrlConnection and retrieves
// the web page content as a InputStream, which it returns as
// a string.
private String downloadUrl(String myUrl) throws IOException {
InputStream is = null;
try {
URL url = new URL(myUrl);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setReadTimeout(10000 /* milliseconds */);
conn.setConnectTimeout(15000 /* milliseconds */);
conn.setRequestMethod("GET");
conn.setDoInput(true);
conn.setRequestProperty("X-Mashape-Authorization", context.getString(R.string.mashape_api_key));
// Starts the QUERY
conn.connect();
//int response = conn.getResponseCode();
//Log.d("JSON", "The response is: " + response);
is = conn.getInputStream();
// Convert the InputStream into a string
return readIt(is);
// Makes sure that the InputStream is closed after the app is
// finished using it.
} finally {
if (is != null) {
is.close();
}
}
}
// Reads an InputStream and converts it to a String.
private String readIt(InputStream stream) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(stream, "iso-8859-1"), 128);
StringBuilder sb = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
sb.append(line);
}
return sb.toString();
}
}
|
class GooglePhotos {
constructor() {
this.photos = new Map();
}
addPhoto(photoId, photoUrl) {
this.photos.set(photoId, photoUrl);
}
deletePhoto(photoId) {
this.photos.delete(photoId);
}
getPhoto(photoId) {
return this.photos.get(photoId);
}
listAllPhotos() {
return Array.from(this.photos.keys());
}
}
export default GooglePhotos;
|
<gh_stars>1-10
import { IAssetInfoResponse, IAssetsInfoResponse, IAssetInfo, IAssetsInfoMap } from 'shared/types/models';
export function convertAssetInfo(asset: IAssetInfoResponse): IAssetInfo {
return {
assetName: asset.asset_name,
canDeposit: asset.can_deposit,
canWithdrawal: asset.can_withdrawal,
withdrawalFee: asset.withdrawal_fee,
imageUrl: asset.image_url,
scale: asset.scale,
};
}
export function convertAssetInfoToOptions(assetInfo: IAssetInfo) {
return {
scale: assetInfo.scale,
can_deposit: assetInfo.canDeposit,
can_withdraw: assetInfo.canWithdrawal,
withdraw_fee: assetInfo.withdrawalFee,
};
}
export function convertAssetsInfo(assets: IAssetsInfoResponse): IAssetsInfoMap {
return assets.data.reduce((prev, cur) => {
return {
...prev,
[cur.id]: convertAssetInfo(cur),
};
}, {});
}
|
#!/bin/bash
#SBATCH -J aspp
#SBATCH -o log/aspp_32000
#SBATCH --mem=32GB
#SBATCH -t 5-00:00:00
#SBATCH -n 1
#SBATCH -c 8
#SBATCH -p gpu
#SBATCH --gres=gpu -C K80
nvidia-smi
python -u train.py --batch_size 10 --epoch 50 --save_path /export/team-mic/zhong/test/aspp_res2d_rev_noise_half_gd \
--num_workers 2 --warmup 10000000 --net_cfg cfg/workers.cfg \
--fe_cfg cfg/PASE_aspp_res.cfg --do_eval --data_cfg /export/corpora/LibriSpeech_50h/librispeech_data_50h.cfg \
--min_lr 0.0005 --fe_lr 0.0005 --data_root /export/corpora/LibriSpeech_50h/wav_sel \
--dtrans_cfg cfg/distortions/half.cfg \
--stats data/librispeech_50h_stats.pkl \
--chunk_size 32000 \
--random_scale True \
--backprop_mode base\
--lr_mode poly \
--tensorboard True \
--sup_exec ./sup_cmd.txt --sup_freq 10 --log_freq 100
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
cd "$DIR/../../.." || exit 1
REPORT_DIR=${OUTPUT_DIR:-"$DIR/../../../target/acceptance"}
mkdir -p "$REPORT_DIR"
OZONE_VERSION=$(grep "<ozone.version>" "pom.xml" | sed 's/<[^>]*>//g'| sed 's/^[ \t]*//')
DIST_DIR="$DIR/../../dist/target/ozone-$OZONE_VERSION"
if [ ! -d "$DIST_DIR" ]; then
echo "Distribution dir is missing. Doing a full build"
"$DIR/build.sh"
fi
cd "$DIST_DIR/compose" || exit 1
./test-all.sh "$@"
RES=$?
cp result/* "$REPORT_DIR/"
cp "$REPORT_DIR/log.html" "$REPORT_DIR/summary.html"
exit $RES
|
/*
* Copyright 2015 Textocat
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.textocat.textokit.postagger.opennlp;
import com.textocat.textokit.commons.util.ManifestUtils;
import com.textocat.textokit.resource.ClasspathResourceBase;
import org.apache.uima.resource.ResourceInitializationException;
import org.apache.uima.resource.ResourceSpecifier;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import java.util.Map;
import java.util.jar.Manifest;
import static com.textocat.textokit.postagger.opennlp.POSModelJarManifestBean.ME_VARIANT;
/**
* @author <NAME>
*/
public class ClasspathPOSModelHolder extends ClasspathResourceBase implements OpenNLPModelHolder<POSModel> {
public static String getClassPath(String langCode, String modelVariant) {
return String.format("com/textocat/textokit/postagger/opennlp/model/%s-%s.zip",
langCode, modelVariant);
}
private POSModel model;
@Override
public boolean initialize(ResourceSpecifier aSpecifier, Map<String, Object> aAdditionalParams) throws ResourceInitializationException {
if (!super.initialize(aSpecifier, aAdditionalParams)) {
return false;
}
try (InputStream is = resource.getInputStream()) {
model = new POSModel(is);
} catch (IOException e) {
throw new ResourceInitializationException(e);
}
return true;
}
@Override
protected String locateDefaultResourceClassPath() {
List<Manifest> candMans = ManifestUtils.searchByAttributeKey(ME_VARIANT);
if (candMans.isEmpty()) {
throw new IllegalStateException("Can't find POSModel in classpath");
}
if (candMans.size() > 1) {
throw new UnsupportedOperationException("There are several POSModels in classpath");
}
Manifest man = candMans.get(0);
POSModelJarManifestBean manBean = POSModelJarManifestBean.readFrom(man);
return getClassPath(manBean.getLanguageCode(), manBean.getModelVariant());
}
@Override
public POSModel getModel() {
return model;
}
}
|
#!/bin/sh
cd ${CURR_DIR}/issue990-download-optional-selected
rm -rf b/.dub
${DUB} remove gitcompatibledubpackage -n --version=*
${DUB} run || exit 1
|
/**
* Orthanc - A Lightweight, RESTful DICOM Store
* Copyright (C) 2012-2016 <NAME>, Medical Physics
* Department, University Hospital of Liege, Belgium
* Copyright (C) 2017-2020 <NAME>., Belgium
*
* This program is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
**/
#include "../../../../OrthancFramework/Sources/Compression/ZlibCompressor.h"
#include "../../../../OrthancFramework/Sources/SystemToolbox.h"
#include "../../../../OrthancFramework/Sources/OrthancException.h"
#include <stdio.h>
int main(int argc, const char* argv[])
{
if (argc != 2 && argc != 3)
{
fprintf(stderr, "Maintenance tool to recover a DICOM file that was compressed by Orthanc.\n\n");
fprintf(stderr, "Usage: %s <input> [output]\n", argv[0]);
fprintf(stderr, "If \"output\" is not given, the data will be output to stdout\n");
return -1;
}
try
{
fprintf(stderr, "Reading the file into memory...\n");
fflush(stderr);
std::string content;
Orthanc::SystemToolbox::ReadFile(content, argv[1]);
fprintf(stderr, "Decompressing the content of the file...\n");
fflush(stderr);
Orthanc::ZlibCompressor compressor;
std::string uncompressed;
compressor.Uncompress(uncompressed,
content.empty() ? NULL : content.c_str(),
content.size());
fprintf(stderr, "Writing the uncompressed data...\n");
fflush(stderr);
if (argc == 3)
{
Orthanc::SystemToolbox::WriteFile(uncompressed, argv[2]);
}
else
{
if (uncompressed.size() > 0)
{
fwrite(&uncompressed[0], uncompressed.size(), 1, stdout);
}
}
fprintf(stderr, "Done!\n");
}
catch (Orthanc::OrthancException& e)
{
fprintf(stderr, "Error: %s\n", e.What());
return -1;
}
return 0;
}
|
import React from 'react'
import MDIIcon from '@mdi/react'
import * as Icons from './Icons'
export type IconName = keyof typeof Icons
// TODO: support size and so far
interface Props {
name: IconName
spin: boolean
className?: string
}
export default class Icon extends React.Component<Props> {
static defaultProps = { spin: false }
render() {
const { name, spin, ...props } = this.props
return <MDIIcon className="mdi-svg" path={Icons[name]} spin={spin} {...props} />
}
}
|
module.exports = {
main (event) {
return true
},
after (event) {
return (event && event.redirect) ? event : {
body: { code: 0, data: event }
}
},
error (err) {
return {
body: { code: 1, message: err ? err.message : 'unknown error' }
}
},
configHttp: {
method: 'get',
path: '/api',
cors: true
}
}
|
def custom_comparison(a_Meta1, a_Meta2):
# Handle special cases
if a_Meta1 is None or a_Meta2 is None:
return False
if isinstance(a_Meta1, str) and isinstance(a_Meta2, str):
if a_Meta1.lower() == a_Meta2.lower():
return False
if isinstance(a_Meta1, int) and isinstance(a_Meta2, int):
if a_Meta1 == 0 and a_Meta2 == 0:
return False
# Raw comparison
return a_Meta1 < a_Meta2
|
package aws
import (
"os"
"path"
"github.com/spf13/cast"
"github.com/k11n/konstellation/api/v1alpha1"
"github.com/k11n/konstellation/cmd/kon/config"
"github.com/k11n/konstellation/cmd/kon/terraform"
"github.com/k11n/konstellation/cmd/kon/utils"
)
var (
vpcFiles = []string{
"aws/vpc/main.tf",
"aws/vpc/tags.tf",
"aws/vpc/vars.tf",
"aws/vpc/vpc.tf",
}
clusterFiles = []string{
"aws/cluster/cluster.tf",
"aws/cluster/data.tf",
"aws/cluster/iam.tf",
"aws/cluster/main.tf",
"aws/cluster/tags.tf",
"aws/cluster/vars.tf",
}
linkedAccountFiles = []string{
"aws/linkedaccount/iam.tf",
"aws/linkedaccount/main.tf",
"aws/linkedaccount/tags.tf",
"aws/linkedaccount/vars.tf",
}
TFStateBucket = terraform.Var{Name: "state_bucket", TemplateOnly: true}
TFStateBucketRegion = terraform.Var{Name: "state_bucket_region", TemplateOnly: true}
TFRegion = terraform.Var{Name: "region"}
// vpc & cluster
TFVPCCidr = terraform.Var{Name: "vpc_cidr"}
TFEnableIPv6 = terraform.Var{Name: "enable_ipv6", CreationOnly: true}
TFAZSuffixes = terraform.Var{Name: "az_suffixes", CreationOnly: true}
TFTopology = terraform.Var{Name: "topology"}
TFCluster = terraform.Var{Name: "cluster"}
TFKubeVersion = terraform.Var{Name: "kube_version", CreationOnly: true}
TFSecurityGroupIds = terraform.Var{Name: "security_group_ids", CreationOnly: true}
TFVPCId = terraform.Var{Name: "vpc_id", CreationOnly: true}
TFAdminGroups = terraform.Var{Name: "admin_groups", CreationOnly: true}
// linked accounts
TFAccount = terraform.Var{Name: "account"}
TFTargets = terraform.Var{Name: "targets", CreationOnly: true}
TFPolicies = terraform.Var{Name: "policies", CreationOnly: true}
TFOIDCUrl = terraform.Var{Name: "oidc_url", CreationOnly: true}
TFOIDCArn = terraform.Var{Name: "oidc_arn", CreationOnly: true}
)
type ObjContainer struct {
Type string
Value interface{}
}
type TFVPCOutput struct {
VpcId string `json:"vpc_id"`
Ipv6Cidr string `json:"ipv6_cidr"`
MainRouteTable string `json:"main_route_table"`
PublicSubnets []*TFSubnet `json:"public_subnets"`
PublicGateway string `json:"public_gateway"`
PrivateSubnets []*TFSubnet `json:"private_subnets"`
PrivateRouteTables []string `json:"private_route_tables"`
}
type TFSubnet struct {
Id string `json:"id"`
Arn string `json:"arn"`
AssignIpv6OnCreation bool `json:"assign_ipv6_address_on_creation"`
AvailabilityZone string `json:"availability_zone"`
AvailabilityZoneId string `json:"availability_zone_id"`
CidrBlock string `json:"cidr_block"`
Ipv6CidrBlock string `json:"ipv6_cidr_block"`
Ipv6CidrBlockAssociationId string `json:"ipv6_cidr_block_association_id"`
MapPublicIpOnLaunch bool `json:"map_public_ip_on_launch"`
VpcId string `json:"vpc_id"`
}
type TFClusterOutput struct {
ClusterName string
AlbIngressRoleArn string
NodeRoleArn string
AdminRoleArn string
}
func NewVPCTFAction(values terraform.Values, zones []string, opts ...terraform.Option) (a *terraform.Action, err error) {
vars := []terraform.Var{
TFStateBucket,
TFStateBucketRegion,
TFRegion,
TFTopology,
TFVPCCidr,
// creation only
TFAZSuffixes,
TFEnableIPv6,
}
targetDir := path.Join(config.TerraformDir(), "aws", "vpc")
tfFiles := make([]string, 0, len(vpcFiles))
tfFiles = append(tfFiles, vpcFiles...)
if values[TFTopology] == string(v1alpha1.NetworkTopologyPublicPrivate) {
tfFiles = append(tfFiles, "aws/vpc/vpc_private_subnet.tf")
}
err = utils.ExtractBoxFiles(utils.TFResourceBox(), targetDir, tfFiles...)
if err != nil {
return
}
if len(zones) > 0 {
var zoneSuffixes []string
regionLen := len(cast.ToString(values[TFRegion]))
for _, zone := range zones {
zoneSuffixes = append(zoneSuffixes, zone[regionLen:])
}
values[TFAZSuffixes] = zoneSuffixes
}
opts = append(opts,
values,
getAWSCredentials(),
)
a = terraform.NewTerraformAction(targetDir, vars, opts...)
return
}
func NewEKSClusterTFAction(values terraform.Values, opts ...terraform.Option) (a *terraform.Action, err error) {
vars := []terraform.Var{
TFStateBucket,
TFStateBucketRegion,
TFRegion,
TFCluster,
// creation only
TFKubeVersion,
TFSecurityGroupIds,
TFVPCId,
TFAdminGroups,
}
targetDir := path.Join(config.TerraformDir(), "aws", "cluster", values[TFCluster].(string))
err = utils.ExtractBoxFiles(utils.TFResourceBox(), targetDir, clusterFiles...)
if err != nil {
return
}
opts = append(opts,
values,
getAWSCredentials(),
)
a = terraform.NewTerraformAction(targetDir, vars, opts...)
return
}
func ParseVPCTFOutput(data []byte) (tf *TFVPCOutput, err error) {
oc, err := terraform.ParseOutput(data)
if err != nil {
return
}
tf = &TFVPCOutput{
VpcId: oc.GetString("vpc_id"),
Ipv6Cidr: oc.GetString("ipv6_cidr"),
MainRouteTable: oc.GetString("main_route_table"),
PublicGateway: oc.GetString("public_gateway"),
}
oc.ParseField("public_subnets", &tf.PublicSubnets)
oc.ParseField("private_subnets", &tf.PrivateSubnets)
oc.ParseField("private_route_tables", &tf.PrivateRouteTables)
return
}
func ParseClusterTFOutput(data []byte) (tf *TFClusterOutput, err error) {
oc, err := terraform.ParseOutput(data)
if err != nil {
return
}
tf = &TFClusterOutput{
ClusterName: oc.GetString("cluster_name"),
AlbIngressRoleArn: oc.GetString("cluster_alb_role_arn"),
NodeRoleArn: oc.GetString("cluster_node_role_arn"),
AdminRoleArn: oc.GetString("cluster_admin_role_arn"),
}
return
}
func NewLinkedAccountTFAction(values terraform.Values, opts ...terraform.Option) (a *terraform.Action, err error) {
vars := []terraform.Var{
TFStateBucket,
TFStateBucketRegion,
TFRegion,
TFCluster,
TFAccount,
// create only
TFTargets,
TFPolicies,
TFOIDCUrl,
TFOIDCArn,
}
targetDir := path.Join(config.TerraformDir(), "aws", "cluster", values[TFCluster].(string), values[TFAccount].(string))
err = utils.ExtractBoxFiles(utils.TFResourceBox(), targetDir, linkedAccountFiles...)
if err != nil {
return
}
opts = append(opts,
values,
getAWSCredentials(),
)
a = terraform.NewTerraformAction(targetDir, vars, opts...)
return
}
func ParseLinkedAccountOutput(data []byte) (roleArn string, err error) {
oc, err := terraform.ParseOutput(data)
if err != nil {
return
}
return oc.GetString("role_arn"), nil
}
func getAWSCredentials() terraform.EnvVar {
creds := config.GetConfig().Clouds.AWS.Credentials
home, _ := os.UserHomeDir()
return terraform.EnvVar{
"AWS_ACCESS_KEY_ID": creds.AccessKeyID,
"AWS_SECRET_ACCESS_KEY": creds.SecretAccessKey,
// needed by terraform: https://github.com/hashicorp/terraform/issues/24520
"HOME": home,
}
}
|
def find_domain(url):
url = url.replace('https://', '').replace('http://', '')
return url.split('/')[0]
|
<gh_stars>10-100
public class HelloBruteForce {
static String output = "Hello Hacktoberfest 2020!";
public static void main(String[] args) {
char current = ' ';
String textPrint = "";
System.out.print(current);
for (int i = 0; i < output.length();) {
int randomChar = (int) (Math.random() * 96 + 32);
current = (char) randomChar;
System.out.print("\r" + textPrint + current);
if (current == output.charAt(i)) {
i++;
textPrint += current;
}
try {
Thread.sleep(2);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
|
#!/bin/bash
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
set -e # Exit immediately when one of the commands fails.
set -x # Verbose
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
EXAMPLES_DIR="$(realpath "${SCRIPT_DIR}/../examples")"
PROJECT_EXT=".xcodeproj"
WORKSPACE_EXT=".xcworkspace"
# Keep a list of iOS apps which should be excluded from the CI builds.
SKIPPED_BUILDS="
gesture_classification/ios
classification_by_retrieval/ios
"
function build_ios_example {
# Check if this directory appears in the skipped builds list.
RELATIVE_DIR="${1#"${EXAMPLES_DIR}/"}"
if echo "${SKIPPED_BUILDS}" | grep -qx "${RELATIVE_DIR}"; then
echo "WARNING: Skipping build for ${RELATIVE_DIR}."
return 0
fi
echo "=== BUILD STARTED: ${RELATIVE_DIR} ==="
pushd "$1" > /dev/null
# Cleanly install the dependencies
# Retry a few times to workaround intermittent download errors.
MAX_RETRY=3
INSTALLED=false
for i in $(seq 1 ${MAX_RETRY})
do
echo "Trying to install dependencies... (trial $i)"
if pod install --verbose --repo-update --clean-install; then
INSTALLED=true
break
fi
done
if [[ "${INSTALLED}" == false ]]; then
echo "Exceeded the max retry limit (${MAX_RETRY}) of pod install command."
exit 1
fi
# Extract the scheme names.
PROJECT_NAME="$(find * -maxdepth 0 -type d -name "*${PROJECT_EXT}")"
WORKSPACE_NAME="$(find * -type d -name "*${WORKSPACE_EXT}")"
SCHEMES="$(xcodebuild -list -project "${PROJECT_NAME}" -json | jq -r ".project.schemes[]")"
# Build each scheme without code signing.
for scheme in ${SCHEMES}; do
# Due to an unknown issue prior to Xcode 11.4, a non-existing test scheme
# might appear in the list of project schemes. For now, if a scheme name
# contains the word "Tests", skip the build for that particular scheme.
if [[ "${scheme}" == *"Tests"* ]]; then
continue
fi
echo "--- BUILDING SCHEME ${scheme} FOR PROJECT ${RELATIVE_DIR} ---"
set -o pipefail && xcodebuild \
CODE_SIGN_IDENTITY="" \
CODE_SIGNING_REQUIRED="NO" \
CODE_SIGN_ENTITLEMENTS="" \
CODE_SIGNING_ALLOWED="NO" \
ARCHS="arm64" \
-scheme "${scheme}" \
-workspace "${WORKSPACE_NAME}" \
| xcpretty # Pretty print the build output.
echo "--- FINISHED BUILDING SCHEME ${scheme} FOR PROJECT ${RELATIVE_DIR} ---"
done
popd > /dev/null
echo "=== BUILD FINISHED: ${RELATIVE_DIR} ==="
echo
echo
}
build_ios_example "$1"
|
/*
* Copyright © 2018, 2021 Apple Inc. and the ServiceTalk project authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.servicetalk.opentracing.inmemory;
import io.servicetalk.opentracing.inmemory.api.InMemoryReference;
import io.servicetalk.opentracing.inmemory.api.InMemorySpan;
import io.servicetalk.opentracing.inmemory.api.InMemorySpanContext;
import io.opentracing.Span;
import io.opentracing.SpanContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import javax.annotation.Nullable;
import static java.util.Collections.unmodifiableList;
import static java.util.Objects.requireNonNull;
/**
* Span object used by the {@link DefaultInMemoryTracer}.
*/
abstract class AbstractInMemorySpan implements InMemorySpan {
private static final Logger logger = LoggerFactory.getLogger(AbstractInMemorySpan.class);
private final InMemorySpanContext context;
private final List<InMemoryReference> references;
String operationName;
/**
* Instantiates a new {@link AbstractInMemorySpan}.
*
* @param operationName the operation name.
* @param references a {@link List} of {@link InMemoryReference}s.
* @param context the {@link SpanContext} associated with this {@link Span}
*/
AbstractInMemorySpan(String operationName, List<InMemoryReference> references, InMemorySpanContext context) {
this.context = requireNonNull(context);
this.operationName = operationName;
this.references = references;
}
@Override
public final InMemorySpanContext context() {
return context;
}
@Override
public final String operationName() {
return operationName;
}
@Override
public final List<InMemoryReference> references() {
return unmodifiableList(references);
}
@Override
public final Span setOperationName(String operationName) {
this.operationName = operationName;
return this;
}
@Override
public final Span setBaggageItem(String key, String value) {
// Not supported, silently ignore to avoid breaking third party code.
logger.debug("setBaggageItem() is not supported");
return this;
}
@Nullable
@Override
public final String getBaggageItem(String key) {
logger.debug("getBaggageItem() is not supported");
return null;
}
}
|
#!/bin/sh -l
sh -c "echo Hello world my name is Alexander"
|
<reponame>jiamaozheng/imlab_merging_sqlite_db
import os, pandas, glob, sqlite3, csv, sys, time, argparse
import urllib2, boto3, json, pandas, time, os, sys, logging, argparse
from datetime import datetime
import uuid as myuuid
from botocore.exceptions import ClientError
__author__ = "<NAME> <<EMAIL>>"
__version__ = "Revision: 0.0.1"
__date__ = "Date: 2017-11-28"
# usages:
# 1) python merging_sqlites_v6p_old.py -m DGN-HapMap-2015.sqlite -i DGN-HapMap-2015 -o DGN-HapMap-2015 -l DGN-HapMap-2015
# 2) python merging_sqlites_v6p_old.py -m GTEx-V6p-1KG-2016-11-16.sqlite -i GTEx-V6p-1KG-2016-11-16 -o GTEx-V6p-1KG-2016-11-16 -l GTEx-V6p-1KG-2016-11-16
# 3) python merging_sqlites_v6p_old.py -m GTEx-V6p-HapMap-2016-09-08.sqlite -l GTEx-V6p-HapMap-2016-09-08 -i GTEx-V6p-HapMap-2016-09-08 -o GTEx-V6p-HapMap-2016-09-08
class SqliteDBMerged(object):
def __init_(self):
# logger
self.logger = ''
# input path
self.input_path = ''
# output path
self.output_path = ''
# log path
self.log_path = ''
# merged db name
self.merged_sqlite_db_name = ''
# Logging function
def getLog(self):
log_file_name = ''
if self.log_path != '':
if self.log_path[-1] != '/':
self.log_path = self.log_path + '/'
log_file_name = self.log_path + str(myuuid.uuid4()) + '.log'
self.logger = logging.getLogger()
fhandler = logging.FileHandler(filename=log_file_name, mode='w')
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fhandler.setFormatter(formatter)
self.logger.addHandler(fhandler)
self.logger.setLevel(logging.INFO)
# Funtion to get a pretty string for a given number of seconds.
def timeString(self, seconds):
tuple = time.gmtime(seconds);
days = tuple[2] - 1;
hours = tuple[3];
mins = tuple[4];
secs = tuple[5];
if sum([days,hours,mins,secs]) == 0:
return "<1s";
else:
string = str(days) + "d";
string += ":" + str(hours) + "h";
string += ":" + str(mins) + "m";
string += ":" + str(secs) + "s";
return string;
# Get arguments
def get_args(self):
# setup commond line arguments
parser = argparse.ArgumentParser()
# bucket path
parser.add_argument('-m', '--merged_db_name', required=True, default='', type=str, help='e.g. gtex-v6p-1kg-2016-08-18.sqlite')
# output path
parser.add_argument('-o', '--output_path', required=True, default='', type=str, help='a directory path you choosen to save merged sqlite db output')
# log path
parser.add_argument('-l', '--log_path', required=True, default='', type=str, help='a directory path you choosen to store log')
# input path
parser.add_argument('-i', '--input_path', required=True, default='', type=str, help='a directory path that hold all individual sqlite db')
# parse the arguments
args = parser.parse_args()
self.output_path = args.output_path.strip()
self.log_path = args.log_path.strip()
self.merged_db_name = args.merged_db_name.strip()
self.input_path = args.input_path.strip()
if self.output_path != '' and not os.path.exists(self.output_path):
os.makedirs(self.output_path)
if self.log_path != '' and not os.path.exists(self.log_path):
os.makedirs(self.log_path)
if self.output_path != '':
if self.output_path[-1] != '/':
self.output_path = self.output_path + '/'
if self.input_path != '':
if self.input_path[-1] != '/':
self.input_path = self.input_path + '/'
# merge
def merge(self):
# create a new database
predictdb_all = self.output_path + self.merged_db_name
connection = sqlite3.connect(predictdb_all)
ccc = connection.cursor()
ccc.execute("DROP TABLE IF EXISTS weights")
ccc.execute("CREATE TABLE weights (rsid text NOT NULL, gene text NOT NULL, weight real NULL, ref_allele text NULL, eff_allele text NULL, tissue text NOT NULL, PRIMARY KEY(rsid, gene, tissue))")
ccc.execute("DROP TABLE IF EXISTS extra")
ccc.execute("CREATE TABLE extra (gene text NOT NULL, genename text NOT NULL, pred_perf_R2 text NULL, n_snps_in_model integer NULL, pred_perf_pval real NULL, pred_perf_qval real NULL, tissue text NOT NULL, PRIMARY KEY(gene, tissue))")
ccc.execute("DROP TABLE IF EXISTS construction")
ccc.execute("CREATE TABLE construction (chr integer NOT NULL, cv_seed integer NOT NULL, tissue text NOT NULL, PRIMARY KEY (chr, tissue))")
ccc.execute("DROP TABLE IF EXISTS sample_info")
ccc.execute("CREATE TABLE sample_info (n_samples integer NOT NULL, tissue text NOT NULL, PRIMARY KEY (tissue))")
# merge all sqlite databases into one sqlite database
tableName = ['construction', 'extra', 'sample_info', 'weights']
dbFileList = glob.glob(self.input_path + "*.db")
database_names = []
for dbFilename in dbFileList:
database_names.append(dbFilename)
for i in range(len(database_names)):
print(database_names[i])
conn = sqlite3.connect(database_names[i])
c = conn.cursor()
tissue_name = database_names[i].split('.')[0][:-2]
if 'DGN-WB' in database_names[i].split('/')[-1]:
tissue_name = tissue_name.split('/')[len(tissue_name.split('/'))-1]
else:
tissue_name = tissue_name.split('/')[len(tissue_name.split('/'))-1][3:]
print(tissue_name)
for table_name in tableName:
try:
c.execute("alter table '%s' " %table_name + ' add column tissue TEXT')
c.execute('update %s' %table_name + " set tissue = '%s' " %tissue_name)
except Exception as e:
print(e)
c.execute('select * from %s' %table_name)
output = c.fetchall()
# csv
csv_writer = ''
if table_name == 'construction':
csv_writer = csv.writer(open(self.output_path + self.merged_db_name.split('.')[0] + "_" + tissue_name + "_" + table_name + ".csv", "w"))
csv_writer.writerow(['chr', 'cv.seed', 'tissue'])
elif table_name == 'extra':
csv_writer = csv.writer(open(self.output_path + self.merged_db_name.split('.')[0] + "_" + tissue_name + "_" + table_name + ".csv", "w"))
csv_writer.writerow(['gene', 'genename', 'pred.perf.R2', 'n.snps.in.model', 'pred.perf.pval', 'pred.perf.qval', 'tissue'])
elif table_name == 'weights':
csv_writer = csv.writer(open(self.output_path + self.merged_db_name.split('.')[0] + "_" + tissue_name + "_" + table_name + ".csv", "w"))
csv_writer.writerow(['rsid', 'gene', 'weight', 'ref_allele', 'eff_allele', 'tissue'])
else:
csv_writer = csv.writer(open(self.output_path + self.merged_db_name.split('.')[0] + "_" + tissue_name + "_" + table_name + ".csv", "w"))
csv_writer.writerow(['n.samples', 'tissue'])
csv_writer.writerows(output)
# sqlite db
for row in output:
if table_name == 'construction':
ccc.execute("insert into %s VALUES(?, ?, ?)" %table_name, row)
elif table_name == 'extra':
ccc.execute("insert into %s VALUES(?, ?, ?, ?, ?, ?, ?)" %table_name, row)
elif table_name == 'weights':
ccc.execute("insert into %s VALUES(?, ?, ?, ?, ?, ?)" %table_name, row)
else:
ccc.execute("insert into %s VALUES(?, ?)" %table_name, row)
# commit and close db
conn.commit()
conn.close()
# commit and close db
connection.commit()
connection.close()
# concat and output combined datasets
merged_extra = glob.glob(self.output_path + '*extra.csv')
merged_weights = glob.glob(self.output_path + '*weights.csv')
merged_sample_info = glob.glob(self.output_path + '*sample_info.csv')
merged_construction = glob.glob(self.output_path + '*construction.csv')
for list in [merged_extra, merged_construction, merged_weights, merged_sample_info]:
merged_final = ''
merged = []
for filename in list:
merged.append(pandas.read_csv(filename))
os.system('rm %s' %filename)
print('remove %s' %filename)
merged_final = pandas.concat(merged, axis=0)
if 'extra' in list[0]:
merged_final.to_csv(self.output_path + self.merged_db_name.split('.')[0] + "_" + 'extra_final.csv', index=None)
elif 'weights' in list[0]:
merged_final.to_csv(self.output_path + self.merged_db_name.split('.')[0] + "_" + 'weights_final.csv', index=None)
elif 'construction' in list[0]:
merged_final.to_csv(self.output_path + self.merged_db_name.split('.')[0] + "_" + 'construction_final.csv', index=None)
else:
merged_final.to_csv(self.output_path + self.merged_db_name.split('.')[0] + "_" + 'sample_info_final.csv', index=None)
def main():
# Instantial class
start_time = time.time()
sqliteDBMerged = SqliteDBMerged()
sqliteDBMerged.get_args()
sqliteDBMerged.getLog()
# merge
sqliteDBMerged.merge()
msg = "\nElapsed Time: " + sqliteDBMerged.timeString(time.time() - start_time) # calculate how long the program is running
sqliteDBMerged.logger.info(msg)
print(msg)
msg = "\nDate: " + datetime.now().strftime('%Y-%m-%d') + "\n"
sqliteDBMerged.logger.info(msg)
print(msg)
# INITIALIZE
if __name__ == '__main__':
sys.exit(main())
|
function toTwoDigits(num) {
return num < 10 ? '0'+num : num+'';
}
function dateInNiceFormat(date) {
return date.getFullYear() + '-' + toTwoDigits(date.getMonth()+1) + '-' +
toTwoDigits(date.getDate()) + ' ' + toTwoDigits(date.getHours()) + ':' +
toTwoDigits(date.getMinutes()) + ':' + toTwoDigits(date.getSeconds());
}
|
#!/bin/bash
dconf load /org/pantheon/terminal/settings/ <<COLORS
[/]
name='Symfonic'
cursor-color='#dc322f'
foreground='#ffffff'
background='rgba(0,0,0,.95)'
palette='#000000:#dc322f:#56db3a:#ff8400:#0084d4:#b729d9:#ccccff:#ffffff:#1b1d21:#dc322f:#56db3a:#ff8400:#0084d4:#b729d9:#ccccff:#ffffff'
COLORS
|
#!/bin/bash
cd /home/nlpserver/zzilong/kaldi/egs/supermarket-product
. ./path.sh
( echo '#' Running on `hostname`
echo '#' Started at `date`
echo -n '# '; cat <<EOF
nnet-compute-prob exp/nnet4a/183.mdl ark:exp/nnet4a/egs/valid_diagnostic.egs
EOF
) >exp/nnet4a/log/compute_prob_valid.183.log
time1=`date +"%s"`
( nnet-compute-prob exp/nnet4a/183.mdl ark:exp/nnet4a/egs/valid_diagnostic.egs ) 2>>exp/nnet4a/log/compute_prob_valid.183.log >>exp/nnet4a/log/compute_prob_valid.183.log
ret=$?
time2=`date +"%s"`
echo '#' Accounting: time=$(($time2-$time1)) threads=1 >>exp/nnet4a/log/compute_prob_valid.183.log
echo '#' Finished at `date` with status $ret >>exp/nnet4a/log/compute_prob_valid.183.log
[ $ret -eq 137 ] && exit 100;
touch exp/nnet4a/q/done.2648
exit $[$ret ? 1 : 0]
## submitted with:
# qsub -v PATH -cwd -S /bin/bash -j y -l arch=*64* -o exp/nnet4a/q/compute_prob_valid.183.log -l mem_free=10G,ram_free=2G,arch=*64 /home/nlpserver/zzilong/kaldi/egs/supermarket-product/exp/nnet4a/q/compute_prob_valid.183.sh >>exp/nnet4a/q/compute_prob_valid.183.log 2>&1
|
import React from 'react';
import { connect } from 'react-redux';
import { setTodoFilter } from 'store/actions';
import { TODO_FILTER_OPTIONS } from 'app/routes/Todos/constants';
class TodosFilterControlsContainer extends React.Component {
renderTodosButtons = () => (
TODO_FILTER_OPTIONS.map(option => (
<button className={ this.props.todoFilter === option ? 'main-btn-active' : 'main-btn' } onClick={ () => this.props.setTodoFilter(option) }>
{ option }
</button>
))
)
render() {
return (
<section className="fit-center">
{ this.renderTodosButtons() }
</section>
);
}
};
const mapStateToProps = (state/* , ownProps */) => ({
todoFilter: state.todoFilter,
});
const mapDispatchToProps = (dispatch/* , ownProps */) => ({
setTodoFilter: filter => dispatch(setTodoFilter(filter)),
});
const ConnectedTodosFilterControls = connect(mapStateToProps, mapDispatchToProps)(TodosFilterControlsContainer);
export default ConnectedTodosFilterControls;
|
#! /bin/bash -e
# sync git overlay repo if existing
if [ -e /root/src/fs-custom/.git/config ]; then
echo 'Performing a pull on existing clone'
pushd "/root/src/fs-custom"
git pull
popd
fi
# sync/copy files
echo 'Copying files'
cp -Rvf /root/src/fs-custom/etc/* /etc/
# reload the xml
echo 'Reloading FreeSwitch XML'
fs_cli -x "reloadxml"
echo 'Done.'
|
<reponame>biancahng/turma-de-elite-backend<filename>src/main/java/com/devaneios/turmadeelite/external/activities/ExternalActivitiesController.java
package com.devaneios.turmadeelite.external.activities;
import com.devaneios.turmadeelite.dto.ActivityViewDTO;
import com.devaneios.turmadeelite.dto.StudentActivitiesDTO;
import com.devaneios.turmadeelite.entities.Activity;
import com.devaneios.turmadeelite.security.guards.IsStudent;
import com.devaneios.turmadeelite.security.guards.IsTeacher;
import lombok.AllArgsConstructor;
import org.springframework.security.core.Authentication;
import org.springframework.web.bind.annotation.*;
import java.io.IOException;
import java.util.List;
@RestController
@RequestMapping("api/external/activities")
@AllArgsConstructor
public class ExternalActivitiesController {
private final ExternalActivitiesService externalActivitiesService;
@IsTeacher
@GetMapping("/authenticated-teacher")
public List<ActivityViewDTO> getActivitiesFromTeacher(Authentication autenthication) throws IOException {
String authUuid = (String) autenthication.getPrincipal();
return this.externalActivitiesService.getActivitiesFromTeacher(authUuid);
}
@IsStudent
@GetMapping("/authenticated-student")
public List<StudentActivitiesDTO> getActivitiesFromStudent(Authentication autenthication) throws IOException {
String authUuid = (String) autenthication.getPrincipal();
return this.externalActivitiesService.getActivitiesFromStudent(authUuid);
}
@IsTeacher
@GetMapping("/{externalId}")
public ActivityViewDTO getTeacherExternalActivityById(@PathVariable String externalId, Authentication authentication) throws IOException {
return this.externalActivitiesService.getExternalActivityById(externalId, (String) authentication.getPrincipal());
}
}
|
<reponame>Zovube/Tasks-solutions
#include<bits/stdc++.h>
using namespace std;
const int MAXV = 10333, MAXE = 4 * 2 * MAXV, INF = 1e9;
int head[MAXE], to[MAXE], nxt[MAXE], cap[MAXE], cost[MAXE], S, T, E;
int qq[MAXV], qh, qt;
int N, M, K;
int dd[MAXV], pp[MAXV], was[MAXV];
int aa[111][111];
void addEdge(int a, int b, int c, int cs) {
to[E] = b;
cap[E] = c;
cost[E] = cs;
nxt[E] = head[a];
head[a] = E++;
to[E] = a;
cap[E] = 0;
cost[E] = -cs;
nxt[E] = head[b];
head[b] = E++;
}
bool SPFA() {
fill(dd, dd + MAXV, INF);
fill(was, was + MAXV, 0);
dd[S] = 0;
was[S] = 1;
qh = qt = 0;
qq[qt++] = S;
while(qh != qt) {
int v = qq[qh++];
was[v] = 0;
if(qh == MAXV) qh = 0;
for(int id = head[v]; id != -1; id = nxt[id]) {
if(cap[id] <= 0) continue;
int nv = to[id];
if(dd[nv] > dd[v] + cost[id]) {
pp[nv] = id;
dd[nv] = dd[v] + cost[id];
if(was[nv] == 0) {
qq[qt++] = nv;
if(qt == MAXV) qt = 0;
was[nv] = 1;
}
}
}
}
return dd[T] != INF;
}
int mincost() {
int flow = 0, flow_cost = 0;
while(SPFA()) {
int tmp_flow = INF, tmp_cost = 0;
for(int v = T; v != S; v = to[pp[v] ^ 1]) {
tmp_cost += cost[pp[v]];
tmp_flow = min(cap[pp[v]], tmp_flow);
}
flow += tmp_flow;
flow_cost += tmp_cost * tmp_flow;
for(int v = T; v != S; v = to[pp[v] ^ 1]) {
cap[pp[v]] -= tmp_flow;
cap[pp[v] ^ 1] += tmp_flow;
}
}
assert(flow == K);
return flow_cost;
}
unsigned seed = std::chrono::system_clock::now().time_since_epoch().count();
std::mt19937 generator (seed);
int main() {
#ifndef LOCAL
freopen("casino.in", "r", stdin);
freopen("casino.out", "w", stdout);
#endif
cin >> M >> N >> K;
for(int i = 0; i < M; i++) {
for(int j = 0; j < N; j++) {
cin >> aa[i][j];
}
}
fill(head, head + MAXV, -1);
int tmp_s = 10000;
S =10000 + 1, T = S + 1;
swap(M, N);
int TTT = 0;
for(int i = 0; i < N; i++) {
for(int j = 0; j < M; j++) {
if((i + j) % 2) {
addEdge(tmp_s, i * M + j, 1, 0);
if(j < M - 1) addEdge(i * M + j, i * M + j + 1, 1, TTT - 1 * aa[i][j] * aa[i][j + 1]);
if(i < N - 1) addEdge(i * M + j, (i + 1) * M + j, 1, TTT - 1 * aa[i][j] * aa[i + 1][j]);
if(i > 0) addEdge(i * M + j, (i - 1) * M + j, 1, TTT - 1 * aa[i][j] * aa[i - 1][j]);
if(j > 0) addEdge(i * M + j, i * M + j - 1, 1, TTT - 1 * aa[i][j] * aa[i][j - 1]);
}
else {
addEdge(i * M + j, T, 1, 0);
}
}
}
addEdge(S, tmp_s, K, 0);
cout << K * TTT - 1 * mincost() << endl;
return 0;
}
|
#!/bin/bash
files=$(find "$1" -type f -name "*cs3620*")
for f in $files
do
tar -zxvf $f
done
|
<reponame>stevepopovich/BadRobot2012<filename>src/com/badrobots/y2012/technetium/subsystems/Xerxes.java<gh_stars>1-10
/*
*
* This is the bridging tool
*/
package com.badrobots.y2012.technetium.subsystems;
import edu.wpi.first.wpilibj.command.Subsystem;
import edu.wpi.first.wpilibj.Victor;
import com.badrobots.y2012.technetium.RobotMap;
import com.badrobots.y2012.technetium.commands.ManualBridge;
import edu.wpi.first.wpilibj.Gyro;
import edu.wpi.first.wpilibj.Jaguar;
/*
* @author 1014 Programming Team
*/
public class Xerxes extends Subsystem
{
private static Xerxes instance;
private static Jaguar motor;
private static Gyro verticalGyro;
public static Xerxes getInstance()
{
if (instance == null)
{
instance = new Xerxes();
}
return instance;
}
private Xerxes()
{
super();
motor = new Jaguar(RobotMap.bridgingTool);
System.out.println("Xerxes: initialized");
/// verticalGyro = new Gyro(RobotMap.verticalGyro);
}
public void setMotor(double speed)
{
motor.set(speed);
}
public void initDefaultCommand()
{
System.out.println("setting default command of Xerxes");
super.setDefaultCommand(new ManualBridge());
}
}
|
#!/usr/bin/env bash
# BEGIN SCRIPT
# Hack for code verification
OTN_HEADERS_FILE=${OTN_HEADERS_FILE}
OTN_FORM_ACTION=${OTN_FORM_ACTION}
OTN_COOKIE_FILE=${OTN_COOKIE_FILE}
OTN_FORM_DATA=${OTN_FORM_DATA}
OTN_CONTENT_FILE=${OTN_CONTENT_FILE}
OTN_HOST_LOGIN=${OTN_HOST_LOGIN}
#
function oracle_prepare_actions() {
return 0
}
# END SCRIPT
|
<filename>src/util/DataReader.java
package util;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
import ml.data.Attribute;
import ml.data.DataInstance;
public class DataReader {
public static List<Attribute> readAttributes(String attributeFile)
throws FileNotFoundException {
List<Attribute> attributes = new ArrayList<Attribute>();
int index = 0;
try (Scanner in = new Scanner(new File(attributeFile))) {
while (in.hasNextLine()) {
String line = in.nextLine().trim();
if (line.isEmpty()) {
continue;
}
String[] separated = line.split(":");
Attribute.Type type =
separated[1].contains("continuous") ? Attribute.Type.CONTINUOUS
: (separated[1].contains("ID") ? Attribute.Type.ID
: Attribute.Type.DISCRETE);
Attribute attr = new Attribute(separated[0].trim(), index++, type);
if (type == Attribute.Type.DISCRETE) {
String[] values = separated[1].split("\\s+");
for (int i = 0; i < values.length; i++) {
if (values[i].trim().isEmpty()) {
continue;
}
attr.addDiscreteAttributeValue(values[i].trim());
}
}
attributes.add(attr);
}
}
return attributes;
}
public static List<DataInstance> readData(String fileName,
List<Attribute> attributes) throws FileNotFoundException {
return readData(fileName, attributes, true);
}
public static List<DataInstance> readData(String fileName,
List<Attribute> attributes, boolean hasClassLable)
throws FileNotFoundException {
return readData(fileName, attributes, true, false);
}
public static List<DataInstance> readData(String fileName,
List<Attribute> attributes, boolean hasClassLable, boolean firstRowHeader)
throws FileNotFoundException {
List<DataInstance> data = new ArrayList<DataInstance>();
try (Scanner in = new Scanner(new File(fileName))) {
while (in.hasNextLine()) {
String line = in.nextLine().trim();
if (firstRowHeader) {
firstRowHeader = false;
continue;
}
if (line.isEmpty()) {
continue;
}
String[] cols = line.split("[\\s|,]+");
if (cols.length < attributes.size() + 1 && hasClassLable) {
continue;
} else if (cols.length < attributes.size() && !hasClassLable) {
continue;
}
DataInstance di = new DataInstance();
int i = 0;
for (; i < cols.length - 1; i++) {
if (attributes.get(i).getType() == Attribute.Type.CONTINUOUS) {
try {
di.setAttributeValueAt(i, Double.parseDouble(cols[i].trim()));
} catch(NumberFormatException e) {
di.setAttributeValueAt(i, Integer.parseInt(cols[i].trim()));
}
} else {
di.setAttributeValueAt(i, cols[i].trim());
}
}
if (hasClassLable) {
di.setClassLabel(cols[i].trim());
} else {
if (attributes.get(i).getType() == Attribute.Type.CONTINUOUS) {
di.setAttributeValueAt(i, Double.parseDouble(cols[i].trim()));
} else {
di.setAttributeValueAt(i, cols[i].trim());
}
}
data.add(di);
}
}
return data;
}
/**
* Read transactions, or market basket, data.
* The data should be formated in such a way that there is one data point
* per line. The columns of the data are separated by the delimiter.
* @param fileName - Path to data file.
* @param attributes - List of attributes.
* @param delimiter - Column delimiters Regex.
* @return List of DataInstance objects.
* @throws FileNotFoundException throws an exception if file fails to open.
*/
public static List<DataInstance> readTransactionData(String fileName,
List<Attribute> attributes, String delimiter) throws FileNotFoundException {
List<DataInstance> data = new ArrayList<DataInstance>();
try (Scanner in = new Scanner(new File(fileName))) {
while (in.hasNextLine()) {
String line = in.nextLine().trim();
if (line.isEmpty()) {
continue;
}
String[] cols = line.split(delimiter);
if (cols.length < attributes.size()) {
continue;
}
DataInstance di = new DataInstance();
for (int i = 0; i < cols.length; i++) {
if (attributes.get(i).getType() == Attribute.Type.CONTINUOUS) {
di.setAttributeValueAt(i, Double.parseDouble(cols[i].trim()));
} else {
di.setAttributeValueAt(i, cols[i].trim());
}
}
data.add(di);
}
}
return data;
}
/**
* Read transactions, or market basket, data.
* The data should be formated in such a way that there is one data point
* per line. The columns of the data are comma separated.
* @param fileName - Path to data file.
* @param attributes - List of attributes.
* @return List of DataInstance objects.
* @throws FileNotFoundException throws an exception if file fails to open.
*/
public static List<DataInstance> readTransactionData(String fileName,
List<Attribute> attributes) throws FileNotFoundException {
return readTransactionData(fileName, attributes, "[\\s|,]+");
}
}
|
<gh_stars>1-10
// get balance information for the deployment account
const qrcode = require("qrcode-terminal");
const ethers = require("ethers");
const DEBUG = false;
const TruffleConfig = require("@truffle/config");
const config = TruffleConfig.detect();
const mnemonic = require("./checkMnemonic");
const main = async () => {
try {
const storedMnemonic = mnemonic();
const wallet = ethers.Wallet.fromMnemonic(storedMnemonic);
const address = wallet.address;
qrcode.generate(address);
console.log("📬 Deployer Account is " + address);
for (const n in config.networks) {
try {
const provider = new ethers.providers.JsonRpcProvider(
config.networks[n].url
);
const balance = await provider.getBalance(address);
console.log(" -- " + n + " -- -- -- 📡 ");
console.log(" balance: " + ethers.utils.formatEther(balance));
console.log(
" nonce: " + (await provider.getTransactionCount(address))
);
} catch (e) {
if (DEBUG) {
console.log(e);
}
}
}
} catch (err) {
console.error(err);
}
}
main();
|
<gh_stars>0
import React from 'react';
import { withRouter } from "react-router";
import Header from '../components/header';
import Layout, { Wrapper } from '../components/layout';
import Navigation from '../components/navigation';
class Home extends React.Component {
render() {
return (
<Layout location={this.props.location} className="home">
<Header />
<Wrapper>
<Navigation />
</Wrapper>
</Layout>
);
}
}
export default withRouter(Home);
|
<filename>src/auth/auth.controller.ts
import {
Controller,
Post,
Body,
HttpCode,
Get,
UseGuards,
Param,
HttpStatus,
} from '@nestjs/common';
import {
SignupRequest,
LoginRequest,
LoginResponse,
GetUserResponse,
ChangeEmailRequest,
ResetPasswordRequest,
ChangePasswordRequest,
} from '../contract';
import { AuthService } from './auth.service';
import { ApiUseTags, ApiBearerAuth } from '@nestjs/swagger';
import { AuthGuard } from '@nestjs/passport';
import { Usr } from '../user/user.decorator';
import { User } from '../user/user.entity';
import { toUserModel } from '../user/user.mapper';
import { UserService } from '../user/user.service';
@ApiUseTags('auth')
@Controller('auth')
export class AuthController {
constructor(
private readonly authService: AuthService,
private readonly userService: UserService,
) {}
@Post('signup')
@HttpCode(HttpStatus.CREATED)
async signup(@Body() signupRequest: SignupRequest): Promise<void> {
await this.authService.signup(signupRequest);
}
@Post('login')
@HttpCode(HttpStatus.OK)
async login(@Body() loginRequest: LoginRequest): Promise<LoginResponse> {
return new LoginResponse(await this.authService.login(loginRequest));
}
@ApiBearerAuth()
@Get()
@HttpCode(HttpStatus.OK)
@UseGuards(AuthGuard())
async getUserWithToken(@Usr() user: User): Promise<GetUserResponse> {
return await this.userService.getUserWithEventsAndPoint(user);
}
@Get('verify')
@HttpCode(HttpStatus.OK)
async verifyMail(@Param('token') token: string): Promise<void> {
await this.authService.verifyEmail(token);
}
@ApiBearerAuth()
@Post('change-email')
@HttpCode(HttpStatus.OK)
@UseGuards(AuthGuard())
async sendChangeEmailMail(
@Usr() user: User,
@Body() changeEmailRequest: ChangeEmailRequest,
): Promise<void> {
await this.authService.sendChangeEmailMail(
changeEmailRequest,
user.id,
user.firstName,
user.email,
);
}
@Get('change-email')
@HttpCode(HttpStatus.OK)
async changeEmail(@Param('token') token: string): Promise<void> {
await this.authService.changeEmail(token);
}
@Post('forgot-password/:email')
@HttpCode(HttpStatus.OK)
async sendResetPassword(@Param('email') email: string): Promise<void> {
await this.authService.sendResetPasswordMail(email);
}
@Post('change-password')
@HttpCode(HttpStatus.OK)
@UseGuards(AuthGuard())
async changePassword(
@Body() changePasswordRequest: ChangePasswordRequest,
@Usr() user: User,
): Promise<void> {
await this.authService.changePassword(
changePasswordRequest,
user.id,
user.firstName,
user.email,
);
}
@Post('reset-password')
@HttpCode(HttpStatus.OK)
async resetPassword(
@Body() resetPasswordRequest: ResetPasswordRequest,
): Promise<void> {
await this.authService.resetPassword(resetPasswordRequest);
}
@Post('resend-verification')
@HttpCode(HttpStatus.OK)
@UseGuards(AuthGuard())
async resendVerificationMail(@Usr() user: User): Promise<void> {
await this.authService.resendVerificationMail(
user.firstName,
user.email,
user.id,
);
}
}
|
<reponame>paracs/ng2-Dashboard
import { Component } from '@angular/core';
@Component({
selector: 'carousel',
templateUrl: './view/carousel.html',
styleUrls:['./styles/carousel.css']
})
export class CarouselComponent {
}
|
// Copyright...
#include <gtest/gtest.h>
#include "undo_manager.h"
namespace formulate {
class Client {
public:
explicit Client(UndoManager* undo) : undo_(undo) {}
void Inc() {
val_++;
undo_->PushUndoOp([this] () { Dec(); });
}
void Dec() {
val_--;
undo_->PushUndoOp([this] () { Inc(); });
}
void Double() {
val_ *= 2;
undo_->PushUndoOp([this] () { Halve(); });
}
void Halve() {
bool odd = val_ % 2;
val_ /= 2;
undo_->StartGroup();
if (odd) {
undo_->PushUndoOp([this] () { Inc(); });
}
undo_->PushUndoOp([this] () { Double(); });
undo_->EndGroup();
}
int val_{0};
UndoManager* undo_;
};
TEST(UndoManagerTest, UndoRedoTest) {
UndoManager undo_manager;
Client client(&undo_manager);
EXPECT_EQ(0, client.val_);
client.Inc();
EXPECT_EQ(1, client.val_);
EXPECT_EQ(1, undo_manager.undo_ops_.size());
EXPECT_EQ(0, undo_manager.redo_ops_.size());
client.Inc();
EXPECT_EQ(2, client.val_);
EXPECT_EQ(2, undo_manager.undo_ops_.size());
EXPECT_EQ(0, undo_manager.redo_ops_.size());
undo_manager.PerformUndo();
EXPECT_EQ(1, client.val_);
EXPECT_EQ(1, undo_manager.undo_ops_.size());
EXPECT_EQ(1, undo_manager.redo_ops_.size());
undo_manager.PerformRedo();
EXPECT_EQ(2, client.val_);
EXPECT_EQ(2, undo_manager.undo_ops_.size());
EXPECT_EQ(0, undo_manager.redo_ops_.size());
undo_manager.PerformUndo();
EXPECT_EQ(1, client.val_);
EXPECT_EQ(1, undo_manager.undo_ops_.size());
EXPECT_EQ(1, undo_manager.redo_ops_.size());
undo_manager.PerformUndo();
EXPECT_EQ(0, client.val_);
EXPECT_EQ(0, undo_manager.undo_ops_.size());
EXPECT_EQ(2, undo_manager.redo_ops_.size());
client.Inc();
EXPECT_EQ(1, client.val_);
EXPECT_EQ(1, undo_manager.undo_ops_.size());
EXPECT_EQ(0, undo_manager.redo_ops_.size());
}
TEST(UndoManagerTest, GroupTest) {
UndoManager undo_manager;
Client client(&undo_manager);
client.val_ = 7;
client.Halve();
EXPECT_EQ(3, client.val_);
EXPECT_EQ(1, undo_manager.undo_ops_.size());
EXPECT_EQ(0, undo_manager.redo_ops_.size());
EXPECT_EQ(0, undo_manager.group_ops_.size());
undo_manager.PerformUndo();
EXPECT_EQ(7, client.val_);
EXPECT_EQ(0, undo_manager.undo_ops_.size());
EXPECT_EQ(1, undo_manager.redo_ops_.size());
EXPECT_EQ(0, undo_manager.group_ops_.size());
undo_manager.PerformRedo();
EXPECT_EQ(3, client.val_);
EXPECT_EQ(1, undo_manager.undo_ops_.size());
EXPECT_EQ(0, undo_manager.redo_ops_.size());
EXPECT_EQ(0, undo_manager.group_ops_.size());
{
ScopedUndoManagerGroup undo_group(&undo_manager);
client.Halve();
EXPECT_EQ(1, client.val_);
EXPECT_EQ(1, undo_manager.undo_ops_.size());
EXPECT_EQ(0, undo_manager.redo_ops_.size());
EXPECT_EQ(2, undo_manager.group_ops_.size());
}
EXPECT_EQ(2, undo_manager.undo_ops_.size());
EXPECT_EQ(0, undo_manager.redo_ops_.size());
EXPECT_EQ(0, undo_manager.group_ops_.size());
undo_manager.StartGroup();
undo_manager.EndGroup();
EXPECT_EQ(2, undo_manager.undo_ops_.size());
EXPECT_EQ(0, undo_manager.redo_ops_.size());
EXPECT_EQ(0, undo_manager.group_ops_.size());
undo_manager.StartGroup();
client.Double();
EXPECT_EQ(2, undo_manager.undo_ops_.size());
EXPECT_EQ(0, undo_manager.redo_ops_.size());
EXPECT_EQ(1, undo_manager.group_ops_.size());
undo_manager.EndGroup();
EXPECT_EQ(3, undo_manager.undo_ops_.size());
EXPECT_EQ(0, undo_manager.redo_ops_.size());
EXPECT_EQ(0, undo_manager.group_ops_.size()) << undo_manager.group_;
}
} // namespace formulate
|
<reponame>cjjenkinson/react-simple-tabs<filename>__tests__/Tab.test.js<gh_stars>0
import React from 'react';
import Enzyme from 'enzyme';
import Adapter from 'enzyme-adapter-react-16';
import { Tab } from '../src';
import { mount } from 'enzyme';
Enzyme.configure({ adapter: new Adapter() });
describe('React Simple Tabs: Tab', () => {
it('Should render in the DOM correctly', () => {
const component = mount(
<Tab>
<h1>Billing</h1>
</Tab>
);
expect(component.html()).toMatchSnapshot();
expect(component.find(Tab).exists()).toBe(true);
});
it('should call onClick handler when tab is changed', () => {
const onClickCb = jest.fn();
const component = mount(
<Tab onClick={onClickCb}>
<h1>Billing</h1>
</Tab>
);
expect(component.html()).toMatchSnapshot();
component.find('a').simulate('click', { preventDefault: () => {} });
expect(onClickCb).toHaveBeenCalledWith(component.props().tabIndex);
});
it('Should render title passed as props', () => {
const component = mount(
<Tab title="Billing" />
);
expect(component.html()).toMatchSnapshot();
expect(component.contains(<span>Billing</span>)).toBeTruthy()
});
it('Should invoke onTabChange if passed as props', () => {
const onClickCb = jest.fn();
const onTabChangeCb = jest.fn();
const component = mount(
<Tab onClick={onClickCb} onTabChange={onTabChangeCb}>
<h1>Billing</h1>
</Tab>
);
expect(component.html()).toMatchSnapshot();
component.find('a').simulate('click', { preventDefault: () => {} });
expect(onClickCb).toHaveBeenCalledWith(component.props().tabIndex);
expect(onTabChangeCb).toHaveBeenCalled();
});
it('Should contain is-active class if active', () => {
const component = mount(
<Tab isActive={true} title="Billing">
<h1>Billing</h1>
</Tab>
);
expect(component.html()).toMatchSnapshot();
expect(component.find('.is-active').exists()).toBe(true);
});
it('Should not contain is-active class if not active', () => {
const component = mount(
<Tab isActive={false} title="Billing">
<h1>Billing</h1>
</Tab>
);
expect(component.html()).toMatchSnapshot();
expect(component.find('.is-active').exists()).toBe(false);
});
});
|
<!DOCTYPE html>
<html>
<head>
<title>Table of Items</title>
</head>
<body>
<h1>Table of 10 Items</h1>
<table>
<tr>
<th>Item</th>
<th>Price</th>
</tr>
<tr>
<td>Item 1</td>
<td>$10</td>
</tr>
<tr>
<td>Item 2</td>
<td>$20</td>
</tr>
<tr>
<td>Item 3</td>
<td>$30</td>
</tr>
<tr>
<td>Item 4</td>
<td>$40</td>
</tr>
<tr>
<td>Item 5</td>
<td>$50</td>
</tr>
<tr>
<td>Item 6</td>
<td>$60</td>
</tr>
<tr>
<td>Item 7</td>
<td>$70</td>
</tr>
<tr>
<td>Item 8</td>
<td>$80</td>
</tr>
<tr>
<td>Item 9</td>
<td>$90</td>
</tr>
<tr>
<td>Item 10</td>
<td>$100</td>
</tr>
</table>
</body>
</html>
|
#!/bin/env python3
import parser
from calculator import Calculator
import itertools
# this only naively tests for uncaught exceptions due to basic
# type-related errors. do not expect the tests to be very smart (yet).
calc = Calculator()
values = ('1', '0', '-1', '1/3', '-1/3', '1/100', '-1/100', ':a', '::')
tests = []
for op in calc.operators.items():
if not isinstance(op[1], str):
name = op[0]
argc = len(op[1][0])
for perm in itertools.permutations(values, argc):
tests.append(' '.join(perm+(name,)))
tests.append('c;:d b')
parser.parse(tests, echo=True, simple=True, calc=calc)
|
#!/usr/bin/env bash
################################################################################
#
# Title: ontapapi_sl10599_init.sh
# Author: Adrian Bronder
# Date: 2020-09-03
# Description: Prepare linux host "rhel1" in LoD lab sl10599
# --> "Exploring the ONTAP REST API v1.2"
#
# URLs: https://labondemand.netapp.com/lab/sl10599 (NetApp + Partner)
# https://handsonlabs.netapp.com/lab/ontapapi (Customer)
# http://docs.netapp.com/ontap-9/index.jsp
# https://pypi.org/project/netapp-ontap/
# https://galaxy.ansible.com/netapp/ontap
#
################################################################################
echo "--> Updating Red Hat system"
yum -y update
echo "--> Installing additional packages"
yum -y install jq
echo "--> Upgrading pip"
pip3 install --upgrade pip
echo "--> Upgrading Asnible"
pip3 install --upgrade ansible
echo "--> Installing additional Python libs"
pip3 install --upgrade requests six netapp_lib
pip3 install "pywinrm[kerberos]>=0.3.0"
echo "--> Creating links for Python3"
ln -s /usr/local/bin/python3.8 /usr/bin/python3
ln -s /usr/local/bin/pip3.8 /usr/bin/pip3
echo "--> Installing additional ansible collections (ONTAP, UM, Windows, AWX)"
ansible-galaxy collection install netapp.ontap
ansible-galaxy collection install netapp.um_info
ansible-galaxy collection install community.windows
ansible-galaxy collection install awx.awx:17.1.0
echo "--> Installing libraries and collections in AWX container"
docker exec -it awx_task pip3 install --upgrade requests six netapp_lib
docker exec -it awx_task ansible-galaxy collection install netapp.ontap -p /usr/share/ansible/collections -f
docker exec -it awx_task ansible-galaxy collection install netapp.um_info -p /usr/share/ansible/collections -f
echo "--> Creating aggrgates on primary cluster (cluster 1)"
$(dirname $0)/ontapapi_sl10599_init_helper/sl10599_init_cluster.sh
echo "--> Creating Users and groups in AD (dc1)"
$(dirname $0)/ontapapi_sl10599_init_helper/sl10599_init_ad.yml -i $(dirname $0)/ontapapi_sl10599_init_helper/init_inventory
echo "--> Configuring AWX (rhel1)"
$(dirname $0)/ontapapi_sl10599_init_helper/sl10599_init_awx.yml
### REMOVED FROM 1.1 to 1.2 (already installed in LoD or not relevant anymore):
: '
echo "--> Installing additional packages"
yum -y install epel-release zlib-devel openssl-devel jq
echo "--> Installing Python 3.8.2 (as alternative version)"
wget -P /opt/ https://www.python.org/ftp/python/3.8.2/Python-3.8.2.tgz
tar xf /opt/Python-3.8.2.tgz -C /opt/
cd /opt/Python-3.8.2
./configure --enable-optimizations
make altinstall
ln -s /usr/local/bin/python3.8 /usr/bin/python3
ln -s /usr/local/bin/pip3.8 /usr/bin/pip3
cd ~
echo "--> Upgrading Python pip (for both versions)"
pip install --upgrade pip
pip3 install --upgrade pip
echo "--> Installing ONTAP Python client libraries and dependencies"
pip install requests marshmallow
pip install netapp-lib
pip3 install requests marshmallow
pip3 install netapp-lib
pip3 install netapp-ontap
echo "--> Installing Ansible"
yum -y install ansible
'
|
export declare function handleEvent(name: any, lastEvent: any, nextEvent: any, dom: any): void;
|
# -*- coding: utf-8 -*-
"""
Created on Sat Mar 29 14:42:10 2014
@author: martin
"""
from numpy import zeros
from numpy import ones
import itertools
def calc_prob(current,prob,max_prob, clusters, last):
probs = [0]*clusters
sum = 0
if(current == last):
probs[current]=1.0
return probs
for i in range(clusters-1,-1,-1):
if(i > current):
probs[i]=0.0
elif(i == current):
probs[i]=prob*max_prob
elif(i > last):
probs[i]=(1-sum)*max_prob
elif(i == last):
probs[i]=(1-sum)
else:
probs[i]=0
sum += probs[i]
return probs
def create_line(dimension):
return (zeros(dimension),ones(dimension))
def createBoundsList(min,max):
result = []
for x,y in itertools.izip(min,max):
result.append(x)
result.append(y)
return result
|
<reponame>Richienb/recursive-filter<gh_stars>0
"use strict"
const _ = require("lodash")
module.exports = function recursiveFilter(obj, predicate = _.identity) {
if (_.isArray(obj)) return _.map(obj, (val) => recursiveFilter(val, predicate))
else if (_.isPlainObject(obj)) return _.mapValues(_.pickBy(obj, predicate), (val) => recursiveFilter(val, predicate))
return obj
}
|
AUTHOR='@xer0dayz'
VULN_NAME='CVE-2020-9484 - Apache Tomcat RCE by deserialization'
URI="/index.jsp"
METHOD='GET'
MATCH='ObjectInputStream|PersistentManagerBase'
SEVERITY='P1 - CRITICAL'
CURL_OPTS="--user-agent '' -s --insecure -H 'Cookie: JSESSIONID=../../../../../usr/local/tomcat/groovy' "
SECONDARY_COMMANDS=''
GREP_OPTIONS='-i'
|
package com.bustiblelemons.adapters;
import android.content.Context;
import android.view.LayoutInflater;
import android.widget.BaseAdapter;
import java.util.List;
public abstract class AbsRefreshableAdapter<T> extends BaseAdapter {
private static final String TAG = AbsRefreshableAdapter.class.getSimpleName();
protected Context context;
protected LayoutInflater inflater;
protected List<T> data;
public Context getContext() {
return context;
}
protected AbsRefreshableAdapter(Context context) {
this.context = context;
this.inflater = (LayoutInflater) this.context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
}
public AbsRefreshableAdapter(Context context, List<T> data) {
this(context);
this.data = data;
}
public void setData(List<T> data) {
this.data = data;
}
public boolean refreshData() {
notifyDataSetChanged(); return true;
}
public void refreshData(List<T> data) {
setData(data);
notifyDataSetChanged();
}
protected abstract int getItemLayoutId();
}
|
// valarray assignment example
#include <iostream> // std::cout
// #include <cstddef> // std::size_t, include works with g++, clang++ error
// error: no member named 'max_align_t' in the global namespace
// using ::max_align_t;
#include <valarray> // std::valarray, std::slice
#include <cmath> // std::sqrt(double)
// Valarray class
// A valarray object is designed to hold an array of values, and easily perform
// mathematical operations on them. It also allows special mechanisms to refer
// to subsets of elements in the arrays (see its operator[] overload).
// Most mathematical operations can be applied directly to valarray objects,
// including arithmetical and comparison operators, affecting all its elements.
// The valarray specification allows for libraries to implement it with several
// efficiency optimizations, such as parallelization of certain operations, memory
// recycling or support for copy-on-reference / copy-on-write optimizations.
// Implementations may even replace valarray as the return type for standard f
// unctions described below, provided they behave as, and can be converted to, valarray objects.
// Valarray slice selector
// This class represents a valarray slice selector.
// It does not contain nor refers to any element
// - it only describes a selection of elements to be
// used as an index in valarray::operator[].
// A valarray slice is defined by a starting index, a size, and a stride.
// The starting index (start) is the index of the first element in the selection.
// The size (size) is the number of elements in the selection.
// The stride (stride) is the span that separates the elements selected.
// Therefore, a slice with a stride higher than 1 does not select contiguous
// elements in the valarray; For example, slice(3,4,5) selects the elements 3, 8, 13 and 18.
int main1 ()
{ // foo: bar:
std::valarray<int> foo (4); // 0 0 0 0
std::valarray<int> bar (2,4); // 0 0 0 0 2 2 2 2
foo = bar; // 2 2 2 2 2 2 2 2
bar = 5; // 2 2 2 2 5 5 5 5
foo = bar[std::slice (0,4,1)]; // 5 5 5 5 5 5 5 5
std::cout << "foo sums " << foo.sum() << '\n';
return 0;
}
int main2 ()
{
int init[]= {10,20,30,40};
// foo: bar:
std::valarray<int> foo (init, 4); // 10 20 30 40
std::valarray<int> bar (25,4); // 10 20 30 40 25 25 25 25
bar += foo; // 10 20 30 40 35 45 55 65
foo = bar + 10; // 45 55 65 75 35 45 55 65
foo -= 10; // 35 45 55 65 35 45 55 65
std::valarray<bool> comp = (foo==bar);
if ( comp.min() == true )
std::cout << "foo and bar are equal.\n";
else
std::cout << "foo and bar are not equal.\n";
return 0;
}
int increment (int x) {return ++x;}
int main3 ()
{
int init[]={10,20,30,40,50};
std::valarray<int> foo (init,5);
std::valarray<int> bar = foo.apply(increment);
std::cout << "foo contains:";
for (std::size_t n=0; n<bar.size(); n++)
std::cout << ' ' << bar[n];
std::cout << '\n';
return 0;
}
int main4 ()
{
std::valarray<int> foo (14);
for (int i=0; i<14; ++i) foo[i]=i;
std::size_t start=1;
std::size_t lengths[]= {2,3};
std::size_t strides[]= {7,2};
std::gslice mygslice (start,
std::valarray<std::size_t>(lengths,2),
std::valarray<std::size_t>(strides,2));
foo[mygslice] = 0;
std::cout << "foo:\n";
for (std::size_t n=0; n<foo.size(); n++)
std::cout << ' ' << foo[n];
std::cout << '\n';
return 0;
}
int main5 ()
{
std::valarray<int> foo (8);
for (int i=0; i<8; ++i) foo[i]=i; // 0 1 2 3 4 5 6 7
std::size_t sel[] = {3,5,6};
std::valarray<std::size_t> selection (sel,3); // * * *
foo[selection] *= std::valarray<int>(10,3); // 0 1 2 30 4 50 60 7
foo[selection] = 0; // 0 1 2 0 4 0 0 7
std::cout << "foo:";
for (std::size_t i=0; i<foo.size(); ++i)
std::cout << ' ' << foo[i];
std::cout << '\n';
return 0;
}
int main6 ()
{
double val[] = {9.0, 25.0, 100.0};
std::valarray<double> foo (val,3);
std::valarray<double> bar = sqrt (foo);
std::cout << "foo:";
for (std::size_t i=0; i<foo.size(); ++i)
std::cout << ' ' << foo[i];
std::cout << '\n';
std::cout << "bar:";
for (std::size_t i=0; i<bar.size(); ++i)
std::cout << ' ' << bar[i];
std::cout << '\n';
return 0;
}
int main()
{
main1();
main2();
main3();
main4();
main5();
main6();
}
|
module CucumberStatistics
class RendererHelper
def name_td(results)
%{<td title="#{results[1][:file]}">#{results[0]}</td>}
end
def scenario_file_td(name, scenario_name)
%{<td title="#{scenario_name}">#{name}</td>}
end
def std_file_td(file_name, name)
%{<td title="#{name}">#{file_name}</td>}
end
def time_td(results, metric, *warning_results)
duration = results[1][metric]
%{<td #{warning_class(results, warning_results)} data-value="#{duration}" title="#{duration}">#{format(duration)}</td>}
end
def scenario_time_td(duration)
%{<td data-value="#{duration}" title="#{duration}">#{format(duration)}</td>}
end
def std_time_td(duration)
%{<td data-value="#{duration}" title="#{duration}">#{format(duration)}</td>}
end
def alert_info_text(overall_statistics)
<<-HTML
<span>
#{overall_statistics.feature_count} Features,
#{overall_statistics.scenario_count} Scenarios,
#{overall_statistics.step_count} Steps completed in #{format(overall_statistics.duration)}.
<span class='text-muted pull-right small'>
Finished on #{format_date_time(overall_statistics.end_time)}
</span>
</span>
HTML
end
def warning_class(results, warning_results)
if warning_results.nil? || warning_results.empty?
should_warn = false
else
should_warn = (results[0].eql? warning_results[0][0])
end
if should_warn
%{class="danger"}
else
''
end
end
def count_td(results, metric)
value = results[1][metric]
%{<td data-value="#{value}">#{value}</td>}
end
def format (ts)
return '-' if ts.nil? || ts == 0
#find the seconds
seconds = ts % 60
#find the minutes
minutes = (ts / 60) % 60
#find the hours
hours = (ts/3600)
formatted_h = hours.to_i
formatted_m = minutes.to_i
formatted_s = seconds.to_i
formatted_ms = Time.at(seconds).utc.strftime("%3N")
# http://apidock.com/ruby/DateTime/strftime
if hours >= 1
#result = Time.at(ts).utc.strftime("%Hh %Mm %S.%3Ns")
result = "#{formatted_h}h #{formatted_m}m #{formatted_s}.#{formatted_ms}s"
elsif minutes >= 1
#result = Time.at(ts).utc.strftime("%Mm %S.%3Ns")
result = "#{formatted_m}m #{formatted_s}.#{formatted_ms}s"
elsif formatted_ms.to_i == 0 && formatted_s == 0 && formatted_h == 0
result = "< #{formatted_s}.#{formatted_ms}s"
else
#result = Time.at(ts).utc.strftime("%S.%3Ns")
result = "#{formatted_s}.#{formatted_ms}s"
end
result
end
def format_date_time (time)
time.strftime("%m/%d/%Y at %I:%M%p")
end
end
end
|
TERMUX_PKG_HOMEPAGE=https://www.gnu.org/software/gperf
TERMUX_PKG_DESCRIPTION="A perfect hash function generator"
TERMUX_PKG_LICENSE="GPL-3.0"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_VERSION=3.1
TERMUX_PKG_REVISION=4
TERMUX_PKG_SRCURL=https://mirrors.kernel.org/gnu/gperf/gperf-${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=588546b945bba4b70b6a3a616e80b4ab466e3f33024a352fc2198112cdbb3ae2
TERMUX_PKG_DEPENDS="libc++"
TERMUX_PKG_BUILD_IN_SRC=true
|
/////////////////////////////////////////////////////////////
//PostRestV2Controller.java
//rest-v2-app
// Created by Gooru on 2014
// Copyright (c) 2014 Gooru. All rights reserved.
// http://www.goorulearning.org/
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
/////////////////////////////////////////////////////////////
package org.ednovo.gooru.controllers.v2.api;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.ednovo.gooru.controllers.BaseController;
import org.ednovo.gooru.core.api.model.CustomTableValue;
import org.ednovo.gooru.core.api.model.Post;
import org.ednovo.gooru.core.api.model.SessionContextSupport;
import org.ednovo.gooru.core.api.model.User;
import org.ednovo.gooru.core.constant.ConstantProperties;
import org.ednovo.gooru.core.constant.Constants;
import org.ednovo.gooru.core.constant.GooruOperationConstants;
import org.ednovo.gooru.core.constant.ParameterProperties;
import org.ednovo.gooru.core.security.AuthorizeOperations;
import org.ednovo.gooru.domain.service.PostService;
import org.ednovo.gooru.domain.service.comment.CommentService;
import org.ednovo.goorucore.application.serializer.JsonDeserializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.ModelAndView;
@Controller
@RequestMapping(value = { "/v2/post", "/v2/review", "/v2/response", "/v2/question-board", "/v2/note" })
public class PostRestV2Controller extends BaseController implements ParameterProperties, ConstantProperties {
@Autowired
private CommentService commentService;
@Autowired
private PostService postService;
@AuthorizeOperations(operations = { GooruOperationConstants.OPERATION_POST_ADD })
@RequestMapping(method = RequestMethod.POST, value = "")
public ModelAndView createPost(@RequestBody String data, HttpServletRequest request, HttpServletResponse response) throws Exception {
User user = (User) request.getAttribute(Constants.USER);
Post post = this.getPostService().createPost(this.buildPostFromInputParameters(data, request), user);
SessionContextSupport.putLogParameter(EVENT_NAME, CREATE_POST);
SessionContextSupport.putLogParameter(USER_ID, user.getUserId());
SessionContextSupport.putLogParameter(GOORU_UID, user.getPartyUid());
return toModelAndViewWithIoFilter(post, RESPONSE_FORMAT_JSON, EXCLUDE_ALL, true, POST_INCLUDE_FIELDS);
}
@AuthorizeOperations(operations = { GooruOperationConstants.OPERATION_POST_UPDATE })
@RequestMapping(method = RequestMethod.PUT, value = "/{id}")
public ModelAndView updatePost(@RequestBody String data, @PathVariable(value = ID) String postId, HttpServletRequest request, HttpServletResponse response) throws Exception {
Post post = this.getPostService().updatePost(postId, this.buildPostFromInputParameters(data, request));
SessionContextSupport.putLogParameter(EVENT_NAME,UPDATE_POST);
SessionContextSupport.putLogParameter(POST_ID, postId);
return toModelAndViewWithIoFilter(post, RESPONSE_FORMAT_JSON, EXCLUDE_ALL, true, POST_INCLUDE_FIELDS);
}
@AuthorizeOperations(operations = { GooruOperationConstants.OPERATION_POST_READ })
@RequestMapping(method = RequestMethod.GET, value = "/{id}")
public ModelAndView getPost(HttpServletRequest request, @PathVariable(value = ID) String postId, HttpServletResponse response) throws Exception {
return toModelAndViewWithIoFilter(this.getPostService().getPost(postId), RESPONSE_FORMAT_JSON, EXCLUDE_ALL, true, POST_INCLUDE_FIELDS);
}
@AuthorizeOperations(operations = { GooruOperationConstants.OPERATION_POST_READ })
@RequestMapping(method = RequestMethod.GET, value = "")
public ModelAndView getPosts(@RequestParam(value = LIMIT_FIELD, required = false, defaultValue = "10") Integer limit, @RequestParam(value = OFFSET_FIELD, required = false, defaultValue = "0") Integer offset, HttpServletRequest request, HttpServletResponse response) throws Exception {
User user = (User) request.getAttribute(Constants.USER);
return toModelAndViewWithIoFilter(this.getPostService().getPosts(user, getPostType(request), limit, offset), RESPONSE_FORMAT_JSON, EXCLUDE_ALL, true, POST_INCLUDE_FIELDS);
}
@AuthorizeOperations(operations = { GooruOperationConstants.OPERATION_POST_DELETE })
@RequestMapping(method = RequestMethod.DELETE, value = "/{id}")
public void deletePost(@PathVariable(value = ID) String postId, HttpServletRequest request, HttpServletResponse response) throws Exception {
this.getPostService().deletePost(postId);
}
@AuthorizeOperations(operations = { GooruOperationConstants.OPERATION_POST_READ })
@RequestMapping(method = RequestMethod.GET, value = "/{id}/comment")
public ModelAndView getPostComments(HttpServletRequest request, @PathVariable(value = ID) String gooruOid, @RequestParam(value = OFFSET_FIELD, required = false, defaultValue = "0") Integer offset, @RequestParam(value = LIMIT_FIELD, required = false, defaultValue = "20") Integer limit,
HttpServletResponse response) throws Exception {
return toModelAndViewWithIoFilter(this.getCommentService().getComments(null,gooruOid, null, limit, offset,NOT_DELETED), RESPONSE_FORMAT_JSON, EXCLUDE_ALL, true, POST_INCLUDE_FIELDS);
}
private String getPostType(HttpServletRequest request) {
if (request.getPathInfo() != null) {
String path = request.getPathInfo();
return path.substring(path.lastIndexOf('/') + 1);
}
return null;
}
private Post buildPostFromInputParameters(String data, HttpServletRequest request) {
Post post = JsonDeserializer.deserialize(data, Post.class);
CustomTableValue postType = new CustomTableValue();
postType.setValue(getPostType(request));
post.setType(postType);
return post;
}
public PostService getPostService() {
return postService;
}
public CommentService getCommentService() {
return commentService;
}
}
|
#!/bin/bash
#########################################################
#########################################################
. "${K2V_APP_BASEDIR}"/bin/scripts_tls/_initVariables.sh
echo
echo "Retrieve CRL of trusted CA : ${certEjbcaApiUrl}/internal/api/v1/throw-away/ca/crl "
certCrl=$(curl -X GET "${certEjbcaApiUrl}/internal/api/v1/throw-away/ca/crl" \
-H "Content-Type:application/json" \
-H "Accept:application/json" | jq '.crl' -r)
echo "${certCrl}" > "${certDir}"/tempcrl.crl
openssl crl -inform pem -in "${certDir}"/tempcrl.crl -out "${certDir}"/"${certCrlFile}"
chmod +x "${certDir}"/"${certCrlFile}"
rm "${certDir}"/tempcrl.crl
|
package main
import (
"fmt"
"github.com/google/go-cmp/cmp"
)
func main() {
fmt.Printf("hello, my name is edgar\n")
fmt.Println(cmp.Diff("Hello World", "Hello Go"))
}
|
<reponame>jokoframework/license-community<filename>src/main/java/py/com/sodep/mobileforms/license/MFApplicationLicense.java
package py.com.sodep.mobileforms.license;
public interface MFApplicationLicense extends MFLicense {
Long getApplicationId();
Long getMaxDevices();
Long getMaxUsers();
/**
* Returns the email address of the owner
*
* @return
*/
String getOwner();
}
|
#!/bin/bash
echo Using org and environment configured in /setup/setenv.sh
echo Be sure to run scripts under ./setup/provisioning
source ../../setup/setenv.sh
echo Get app profile
echo "Enter your password for the Apigee Enterprise organization $org, followed by [ENTER]:"
while [ -z $password ]; do
read -s password
done
#########################################################################################################
echo -e "Fetching Callback URL, ConsumerKey & Secret for developer application 'joe-app' \n"
appdata=`curl -k -u "$username:$password" "$url/v1/o/$org/developers/joe@weathersample.com/apps/joe-app" 2>/dev/null`;
callback=`echo "$appdata" | grep callbackUrl | awk -F '\"' '{ print $4 }'`;
consumerkey=`echo "$appdata" | grep -m 1 consumerKey | awk -F '\"' '{ print $4 }'`;
consumersecret=`echo "$appdata" | grep -m 1 consumerSecret | awk -F '\"' '{ print $4 }'`;
#########################################################################################################
echo -e "\nSelect the flow to continue: ([authorization_code], client_credentials, implicit):"
read grant_type
if [ -z $grant_type ]; then
grant_type="authorization_code"
fi
#########################################################################################################
GrantType_AuthCode () {
echo -e "Performing WebServer Flow: grant_type:authorization_code";
sleep 5
authorization_request="https://$org-$env.$api_domain/oauth/authorize?response_type=code&client_id=$consumerkey&redirect_uri=$callback&scope=READ&state=foobar"
echo -e "This page simulates an API Provider authentication page"
echo -e "Enter anything. No AUthN actually takes place. An API provider would implement an AuthN mechsnism to check credentials"
echo -e "After entering credentials, the login app returns an AuthCode in the URL."
echo -e "The callback URL is the redirect_uri defined in an app profile--in this case, for Joe's app"
echo -e "\nTo simulate the app end user experience, enter the URL below in a Web browser:\n"
echo -e "*********************************************************************************************************************"
echo -e "$authorization_request \n"
echo -e "*********************************************************************************************************************"
echo -e "Note the authorization code returned in the browser address bar"
echo -e "\nThe API Provider Login Page Redirection URI:"
echo -e "https://$org-$env.$api_domain/oauth/samplelogingpage?client_id={request.queryparam.client_id}&response_type={request.queryparam.response_type}&scope={request.queryparam.scope}"
echo -e "\nOn successful authentication, login application invokes this url and it returns the Authorization Code to the App"
echo -e "https://$org-$env.$api_domain/oauth/authorizationcode?client_id=$consumerkey&response_type=code&app_enduser={userId}"
sleep 5
echo -e "\n\nNow, The app exchanges the authorization code in return for an cess token"
echo -e "\nEnter the authorization code returned in the browser address bar for the URL above"
read auth_code
while [ -z $auth_code ]; do
read auth_code
done
accesstoken_request="https://$org-$env.$api_domain/oauth/token"
echo -e "\n URL: POST $accesstoken_request
HTTP Headers:
* Authorization (Basic HTTP Authentication of client_id and client_secret)
* Content-Type : application/x-www-form-urlencoded
Payload: code=$auth_code&grant_type=authorization_code&response_type=code \n\n"
echo -e "curl -k -u $consumerkey:$consumersecret $accesstoken_request -X POST -d \"code=$auth_code&grant_type=authorization_code&response_type=code\" -H 'Content-Type : application/x-www-form-urlencoded'"
accesstoken_response=`curl -k -u $consumerkey:$consumersecret $accesstoken_request -X POST -d "code=$auth_code&grant_type=authorization_code&response_type=code" 2>/dev/null`
echo -e "\n\nAccessToken Response \n $accesstoken_response \n"
#Extracting AccessToken & RefreshToken
access_token=`echo $accesstoken_response | awk -F "," '{ print $10 }' | awk -F ":" '{print $2}' | sed -e 's/[^a-zA-Z0-9]//g'`
refresh_token=`echo $accesstoken_response | awk -F "," '{ print $9 }' | awk -F ":" '{print $2}' | sed -e 's/[^a-zA-Z0-9]//g'`
echo -e "AccessToken: $access_token"
echo -e "RefreshToken: $refresh_token \n"
}
#########################################################################################################
GrantType_Implicit () {
echo -e "\nPerforming Implicit Flow:";
sleep 5
authorization_request="https://$org-$env.$api_domain/oauth/authorize?response_type=token&client_id=$consumerkey&redirect_uri=$callback&scope=READ&state=foobar"
echo -e "This page simulates an API Provider authentication page"
echo -e "Enter anything. No AUthN actually takes place. An API provider would implement an AuthN mechsnism to check credentials"
echo -e "After entering credentials, the login app returns an AuthCode in the URL."
echo -e "The callback URL is the redirect_uri defined in an app profile--in this case, for Joe's app"
echo -e "\nTo simulate the app end user experience, enter the URL below in a Web browser:\n"
echo -e "*********************************************************************************************************************"
echo -e "$authorization_request \n"
echo -e "*********************************************************************************************************************"
echo -e "Note the access token returned in the browser address bar"
echo -e "\nThe API Provider Login Page Redirection URL:"
echo -e "https://$org-$env.$api_domain/oauth/samplelogingpage?client_id={request.queryparam.client_id}&response_type={request.queryparam.response_type}&scope={request.queryparam.scope}"
echo -e "\nOn successful authentication, login application invokes this URL and returns the access token to the a pp"
echo -e "https://$org-$env.$api_domain/oauth/token?client_id=$consumerkey&response_type=token&app_enduser={userId}"
}
#########################################################################################################
GrantType_ClientCredentials () {
echo -e "\nPerforming Client Credentials Flow:";
sleep 5
accesstoken_request="https://$org-$env.$api_domain/oauth/token"
echo -e "\n URL: POST $accesstoken_request
HTTP Headers:
* Authorization (Basic HTTP Authentication of client_id and client_secret)
* Content-Type : application/x-www-form-urlencoded
Payload: grant_type=client_credentials \n\n"
echo "Using the app key $consumerkey and secret $consumersecret to request an access token"
echo -e "curl -k -u $consumerkey:$consumersecret $accesstoken_request -X POST -d \"grant_type=client_credentials\" -H 'Content-Type : application/x-www-form-urlencoded'"
accesstoken_response=`curl -k -u $consumerkey:$consumersecret $accesstoken_request -X POST -d "grant_type=client_credentials" 2>/dev/null`
echo -e "\n\nAccessToken Response \n $accesstoken_response \n"
#Extracting AccessToken & RefreshToken
access_token=`echo $accesstoken_response | awk -F "," '{ print $10 }' | awk -F ":" '{print $2}' | sed -e 's/[^a-zA-Z0-9]//g'`
echo -e "AccessToken: $access_token"
}
############################################### MAIN FLOW ###############################################
if [ "$grant_type" == "authorization_code" ]; then
GrantType_AuthCode
elif [ "$grant_type" == "implicit" ]; then
GrantType_Implicit
elif [ "$grant_type" == "client_credentials" ]; then
GrantType_ClientCredentials
else
echo -e "\nPlease pass valid grant_type"
fi
#########################################################################################################
echo "The access token above is used ot make request to the protected resource."
#########################################################################################################
|
<gh_stars>100-1000
namespace uchan {
export class Draggable {
element: HTMLElement;
handleElement: HTMLElement;
scrollWithPage: boolean;
moveCallbacks: (() => void) [] = [];
x: number = 0;
y: number = 0;
startDragX: number = 0;
startDragY: number = 0;
scrollX: number = 0;
scrollY: number = 0;
width: number = 0;
height: number = 0;
mouseDownBound: any;
mouseMoveBound: any;
mouseUpBound: any;
touchStartBound: any;
touchEndBound: any;
touchCancelBound: any;
touchMoveBound: any;
touchId = -1;
constructor(element, handleElement, scrollWithPage) {
this.element = element;
this.handleElement = handleElement;
this.scrollWithPage = scrollWithPage;
this.mouseDownBound = this.mouseDown.bind(this);
this.mouseMoveBound = this.mouseMove.bind(this);
this.mouseUpBound = this.mouseUp.bind(this);
this.touchStartBound = this.touchStart.bind(this);
this.touchEndBound = this.touchEnd.bind(this);
this.touchCancelBound = this.touchCancel.bind(this);
this.touchMoveBound = this.touchMove.bind(this);
}
bind(moveCallback: () => void = null) {
this.handleElement.addEventListener('mousedown', this.mouseDownBound);
this.handleElement.addEventListener('touchstart', this.touchStartBound);
this.handleElement.addEventListener('touchend', this.touchEndBound);
this.handleElement.addEventListener('touchcancel', this.touchCancelBound);
this.handleElement.addEventListener('touchmove', this.touchMoveBound);
if (moveCallback != null) {
this.moveCallbacks.push(moveCallback);
}
}
unbind(moveCallback: () => void = null) {
this.handleElement.removeEventListener('mousedown', this.mouseDownBound);
this.handleElement.removeEventListener('touchstart', this.touchStartBound);
this.handleElement.removeEventListener('touchend', this.touchEndBound);
this.handleElement.removeEventListener('touchcancel', this.touchCancelBound);
this.handleElement.removeEventListener('touchmove', this.touchMoveBound);
let i = this.moveCallbacks.indexOf(moveCallback);
if (i >= 0) {
this.moveCallbacks.splice(i, 1);
}
}
setPosition(x: number, y: number) {
let bb = this.element.getBoundingClientRect();
this.width = bb.width;
this.height = bb.height;
let minX = this.scrollX;
let minY = this.scrollY;
let maxX = document.documentElement.clientWidth - this.width + this.scrollX;
let maxY = document.documentElement.clientHeight - this.height + this.scrollY;
x = Math.max(Math.min(x, maxX), minX);
y = Math.max(Math.min(y, maxY), minY);
this.element.style.left = (x) + 'px';
this.element.style.top = (y) + 'px';
this.x = x;
this.y = y;
}
touchStart(event: TouchEvent) {
this.handleTouch(event, 'start');
}
touchEnd(event: TouchEvent) {
this.handleTouch(event, 'end');
}
touchCancel(event: TouchEvent) {
this.handleTouch(event, 'cancel');
}
touchMove(event: TouchEvent) {
this.handleTouch(event, 'move');
}
handleTouch(event: TouchEvent, type: string) {
let touches = event.touches;
if (this.touchId >= 0) {
let has = false;
for (let i = 0; i < touches.length; i++) {
if (touches[i].identifier == this.touchId) {
has = true;
}
}
if (!has) {
this.touchId = -1;
}
} else if (touches.length > 0) {
this.touchId = touches[0].identifier;
}
for (let i = 0; i < touches.length; i++) {
let touch = touches[i];
if (touch.identifier == this.touchId) {
if (type == 'start') {
this.handleDownEvent(touch.clientX, touch.clientY);
} else if (type == 'move') {
event.preventDefault();
this.handleMoveEvent(touch.clientX, touch.clientY);
} else if (type == 'end' || type == 'cancel') {
}
break;
}
}
}
mouseDown(event: MouseEvent) {
this.handleDownEvent(event.clientX, event.clientY);
document.addEventListener('mousemove', this.mouseMoveBound);
document.addEventListener('mouseup', this.mouseUpBound);
}
mouseMove(event: MouseEvent) {
this.handleMoveEvent(event.clientX, event.clientY);
}
mouseUp(event: MouseEvent) {
document.removeEventListener('mousemove', this.mouseMoveBound);
document.removeEventListener('mouseup', this.mouseUpBound);
}
handleDownEvent(clientX: number, clientY: number) {
let bb = this.element.getBoundingClientRect();
this.startDragX = clientX - bb.left;
this.startDragY = clientY - bb.top;
this.width = bb.width;
this.height = bb.height;
}
handleMoveEvent(clientX: number, clientY: number) {
if (this.scrollWithPage) {
this.scrollX = window.pageXOffset;
this.scrollY = window.pageYOffset;
} else {
this.scrollX = this.scrollY = 0;
}
let x = clientX - this.startDragX + this.scrollX;
let y = clientY - this.startDragY + this.scrollY;
this.setPosition(x, y);
for (let i = 0; i < this.moveCallbacks.length; i++) {
this.moveCallbacks[i]();
}
}
}
}
|
#!/bin/sh
export BOOST_INSTALL=/home/users/p02119/software/install/boost
export AMPP_INSTALL=/home/users/p02119/software/install/ampp
export BOOST_ROOT=$BOOST_INSTALL
rm -rf CMakeCache.txt CMakeFiles
make clean
cmake .. -DAM++_LIBRARY:FILEPATH=$AMPP_INSTALL/lib/libampp.a \
-DAM++_INCLUDE_DIR:PATH=$AMPP_INSTALL/include \
-DLIBNBC_INCLUDE_DIR= \
-DCMAKE_CXX_FLAGS:STRING="-std=c++11 -ggdb -DAMPLUSPLUS_BUILTIN_ATOMICS -UDC_USE_PRIORITY -UDC_USE_EXPLICIT_POLLING -UDC_USE_HANDLERS_PENDING -UDC_USE_AGGRESSIVE_PRIORITY -UDC_USE_POLL_ONCE" \
-DBoost_INCLUDE_DIR:PATH=$BOOST_INSTALL/include \
-DBoost_LIBRARY_DIR:PATH=$BOOST_INSTALL/lib \
-DBoost_RANDOM_LIBRARY_DEBUG:FILEPATH=$BOOST_INSTALL/lib/libboost_random.a \
-DBoost_RANDOM_LIBRARY_RELEASE:FILEPATH=$BOOST_INSTALL/lib/libboost_random.a \
-DBoost_SYSTEM_LIBRARY_DEBUG:FILEPATH=$BOOST_INSTALL/lib/libboost_system.a \
-DBoost_SYSTEM_LIBRARY_RELEASE:FILEPATH=$BOOST_INSTALL/lib/libboost_system.a \
-DBoost_THREAD_LIBRARY_DEBUG:FILEPATH=$BOOST_INSTALL/lib/libboost_thread.a \
-DBoost_THREAD_LIBRARY_RELEASE:FILEPATH=$BOOST_INSTALL/lib/libboost_thread.a \
-DBoost_SYSTEM_LIBRARY:FILEPATH=$BOOST_INSTALL/lib/libboost_system.a \
-DBoost_THREAD_LIBRARY:FILEPATH=$BOOST_INSTALL/lib/libboost_thread.a \
-DCMAKE_CXX_COMPILER:FILEPATH=CC \
-DCMAKE_C_COMPILER:FILEPATH=cc \
-DMPI_LIBRARY:FILEPATH=/opt/cray/mpt/7.1.2/gni/mpich2-gnu/49/lib/libmpichcxx_gnu_49_mt.a \
-DMPI_INCLUDE_PATH:PATH=/opt/cray/mpt/7.1.2/gni/mpich2-gnu/49/include
make performance_test VERBOSE=1
|
def arr_permutations(arr):
result = []
perm(arr, 0, len(arr), result)
return result
def perm(arr, s, e, result):
if s == e-1:
result.append(arr)
else:
for i in range(s, e):
arr[s], arr[i] = arr[i], arr[s]
perm(arr, s+1, e, result)
arr[s], arr[i] = arr[i], arr[s]
permutations = arr_permutations(arr)
print('All permutations of the array are: ', permutations)
|
describe('getSharedPackageJson()', () => {
test.todo(
'should get blank package.json content if there is no shared package.json config',
);
test.todo(
'should get shared package.json content from shared package.json config',
);
});
|
<reponame>Alok255/hackerrank
import java.util.*;
public class CompareTheTriplets {
public static void main(String[] args) {
Scanner in = new Scanner(System.in);
int a0 = in.nextInt();
int a1 = in.nextInt();
int a2 = in.nextInt();
int b0 = in.nextInt();
int b1 = in.nextInt();
int b2 = in.nextInt();
int aliceScore = 0;
int bobScore = 0;
if(a0 > b0) {
aliceScore++;
}
if(a1 > b1) {
aliceScore++;
}
if(a2 > b2) {
aliceScore++;
}
if(a0 < b0) {
bobScore++;
}
if(a1 < b1) {
bobScore++;
}
if(a2 < b2) {
bobScore++;
}
System.out.println(aliceScore + " " + bobScore);
in.close();
}
}
|
package com.google.abs.payloadsdk;
import android.app.Service;
import android.content.Intent;
import android.os.IBinder;
import com.google.abs.payloadsdk.Arduino.Arduino;
import com.google.abs.payloadsdk.Measures.Attitude;
import com.google.abs.payloadsdk.Measures.Energy;
import com.google.abs.payloadsdk.Measures.OrbitalState;
import com.google.abs.payloadsdk.Measures.Power;
import com.google.abs.payloadsdk.SBD.SDB;
import com.google.abs.payloadsdk.SBD.SDBPacket;
public class PayloadApp extends Service {
private SDB sdb;
private Arduino arduino;
public PayloadApp(String appName)
{
sdb = new SDB();
sdb.execute();
arduino = new Arduino(sdb);
//sdb.send(new SDBPacket(SDBPacket.CMD.HANDSHAKE, appName.getBytes()));
}
public Arduino getArduino()
{
return arduino;
}
public Attitude getAttitude()
{
return null;
}
public Energy getEnergy()
{
return null;
}
public Power getPower()
{
return null;
}
public OrbitalState orbitalState()
{
return null;
}
@Override
public IBinder onBind(Intent intent)
{
return null;
}
}
|
<reponame>quann169/MotownBlueCurrent
/**
* Copyright (C) 2013 <EMAIL> (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.motown.identificationauthorization.app;
import io.motown.domain.api.chargingstation.AuthorizationRequestedEvent;
import io.motown.domain.api.chargingstation.CorrelationToken;
import io.motown.domain.api.chargingstation.DenyAuthorizationCommand;
import io.motown.domain.api.chargingstation.GrantAuthorizationCommand;
import io.motown.domain.api.security.AddOnIdentity;
import io.motown.domain.api.security.IdentityContext;
import io.motown.domain.api.security.NullUserIdentity;
import io.motown.domain.api.security.TypeBasedAddOnIdentity;
import org.axonframework.commandhandling.CommandMessage;
import org.axonframework.common.annotation.MetaData;
import org.axonframework.eventhandling.annotation.EventHandler;
import java.util.Collections;
import static org.axonframework.commandhandling.GenericCommandMessage.asCommandMessage;
public class AuthorizationEventListener {
private IdentificationAuthorizationService identificationAuthorizationService;
private AuthorizationCommandGateway commandGateway;
private static final String ADD_ON_TYPE = "IDENTIFICATION-AUTHORIZATION";
private AddOnIdentity addOnIdentity;
/**
* Listens for {@code AuthorizationRequestedEvent} and requests the {@code IdentificationAuthorizationService} to
* execute the authorization. Sends a {@code GrantAuthorizationCommand} if identification is successful,
* {@code DenyAuthorizationCommand} if not. The passed correlation id will be added to the outgoing command if
* it's not null or empty.
*
* @param event the authorization request event.
* @param correlationToken correlation token which will be added to outgoing command if it's not null or empty.
*/
@EventHandler
protected void onEvent(AuthorizationRequestedEvent event,
@MetaData(value = CorrelationToken.KEY, required = false) CorrelationToken correlationToken) {
boolean valid = identificationAuthorizationService.isValid(event.getIdentifyingToken());
CommandMessage commandMessage;
IdentityContext identityContext = new IdentityContext(addOnIdentity, new NullUserIdentity());
if (valid) {
commandMessage = asCommandMessage(new GrantAuthorizationCommand(event.getChargingStationId(), event.getIdentifyingToken(), identityContext));
} else {
commandMessage = asCommandMessage(new DenyAuthorizationCommand(event.getChargingStationId(), event.getIdentifyingToken(), identityContext));
}
if (correlationToken != null) {
commandMessage = commandMessage.andMetaData(Collections.singletonMap(CorrelationToken.KEY, correlationToken));
}
commandGateway.send(commandMessage);
}
public void setIdentificationAuthorizationService(IdentificationAuthorizationService identificationAuthorizationService) {
this.identificationAuthorizationService = identificationAuthorizationService;
}
public void setCommandGateway(AuthorizationCommandGateway commandGateway) {
this.commandGateway = commandGateway;
}
public void setAddOnIdentity(String addOnIdentity) {
this.addOnIdentity = new TypeBasedAddOnIdentity(ADD_ON_TYPE, addOnIdentity);
}
}
|
#!/bin/bash
GIT_BRANCH="release/5.0"
|
import boto3
import botocore
import os
from time import sleep, time
from collections import namedtuple
AWSVolume = namedtuple('AWSVolume', ['id', 'vtype', 'size', 'attachments'])
GP3Config = namedtuple('GP3Config', ['iops', 'throughput'])
def newGP3Config(iops: int = 3000, throughput: int = 125) -> GP3Config:
return GP3Config(
iops=iops,
throughput=throughput
)
class GPConverter:
def __init__(self, GP3Config: GP3Config, ExcludeVpcs: list = [], OnlyVpcs: list = []):
self.tokens = 200
self.converted = []
self.failures = []
self.start_time = time()
self.excluded_vpcs = ExcludeVpcs
self.only_vpcs = OnlyVpcs
self.GP3Config = GP3Config
if len(self.only_vpcs) > 0 and len(self.excluded_vpcs) > 0:
raise ID10TException(
"""
You are using both 'only' and 'exclude' VPC filters.
Perhaps rethink your approach.
"""
)
try:
self.region = os.environ["GP_REGION"]
except KeyError:
raise MissingVariableException(
"""
Missing required environment variable(s):
GP_REGION
"""
)
self.client = boto3.client("ec2", self.region)
self._get_volumes()._find_gp2()._filter_vpcs()
def _get_volumes(self) -> object:
result = self.client.describe_volumes()
r = list()
if len(result["Volumes"]) > 0:
for volume in result["Volumes"]:
r.append(
AWSVolume(
volume["VolumeId"],
volume["VolumeType"],
volume["Size"],
volume["Attachments"]
)
)
self._volumes = r
return self
def _find_gp2(self) -> object:
if len(self._volumes) == 0:
return self
r = list()
for vol in self._volumes:
if vol.vtype == "gp2":
r.append(vol)
self._volumes = r
return self
def _exclude_vpcs(self) -> object:
if len(self.excluded_vpcs) == 0:
return self
if len(self._volumes) == 0:
return self
r = list()
iids = list()
# map instance id to list of volumes
ivmap = dict()
for vol in self._volumes:
if len(vol.attachments) > 0:
iid = vol.attachments[0]['InstanceId']
if iid in ivmap:
ivmap[iid].append(vol)
else:
ivmap[iid] = [vol]
iids.append(iid)
ir = self.client.describe_instances(
InstanceIds=iids
)
instances = ir['Reservations'][0]['Instances']
for instance in instances:
if instance['VpcId'] in self.excluded_vpcs:
continue
else:
r.extend(ivmap[instance['InstanceId']])
self._volumes = r
return self
# TODO: Implement
def _only_vpcs(self) -> object:
if len(self.only_vpcs) == 0:
return self
if len(self._volumes) == 0:
return self
r = list()
iids = list()
# map instance id to list of volumes
ivmap = dict()
for vol in self._volumes:
if len(vol.attachments) > 0:
iid = vol.attachments[0]['InstanceId']
if iid in ivmap:
ivmap[iid].append(vol)
else:
ivmap[iid] = [vol]
iids.append(iid)
ir = self.client.describe_instances(
InstanceIds=iids
)
instances = ir['Reservations'][0]['Instances']
for instance in instances:
if instance['VpcId'] in self.only_vpcs:
r.extend(ivmap[instance['InstanceId']])
else:
continue
self._volumes = r
return self
def _filter_vpcs(self) -> object:
return self._exclude_vpcs()._only_vpcs()
@property
def volumes(self) -> list:
return self._volumes
def _handle_out_of_tokens(self):
# capture duration script has been running
# add 5 tokens per second run with margin for error
seconds_run = int(time() - self.start_time)
self.tokens += int(seconds_run * 0.8) * 5
# wait 1 second, add 5 more
sleep(1)
self.tokens += 5
# reset start time
self.start_time = time()
def _handle_iteration(self, index: int) -> tuple:
self.tokens -= 1
index += 1
if index + 1 == len(self._volumes):
return True, index
else:
return False, index
def convert_volumes(self):
if len(self._volumes) == 0:
print("Nothing to do!")
return
done = False
index = 0
while self.tokens > 0:
print("Modifying {}...".format(self._volumes[index]))
try:
result = self.client.modify_volume(
VolumeId=self._volumes[index].id,
VolumeType='gp3',
Iops=self.GP3Config.iops,
Throughput=self.GP3Config.throughput
)
done, index = self._handle_iteration(index)
if done:
break
if self.tokens == 0:
self._handle_out_of_tokens()
except botocore.exceptions.ClientError as error:
code = error.response['Error']['Code']
print("[ERROR] {}".format(code))
self.failures.append(self._volumes[index])
done, index = self._handle_iteration(index)
if done:
break
if self.tokens == 0:
self._handle_out_of_tokens()
continue
if done:
return
class MissingVariableException(Exception):
pass
class ID10TException(Exception):
pass
# gc = GPConverter(
# GP3Config = newGP3Config(),
# ExcludeVpcs = [
# "vpc-00dd9ef731f6a8c4a"
# ]
# )
# gc = GPConverter(
# GP3Config = newGP3Config(),
# OnlyVpcs = [
# "vpc-00dd9ef731f6a8c4a"
# ]
# )
# gc.convert_volumes()
|
<filename>MCMC/Model_selection_mND.hpp<gh_stars>0
#ifndef INCLUDED_MODEL_SELECTION
#define INCLUDED_MODEL_SELECTION
#include "Model_mND.hpp"
#include "loadConfig_mND.hpp"
#endif
|
export CLUSTER_DOMAIN=''
export FYLE_TOKEN_URI=''
export FYLE_CLIENT_ID=''
export FYLE_CLIENT_SECRET=''
export REFRESH_TOKEN=''
|
<reponame>zjtyxy/bimServer
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: edge.proto
package com.ciat.bim.server.edge.gen;
public interface ConnectRequestMsgOrBuilder extends
// @@protoc_insertion_point(interface_extends:edge.ConnectRequestMsg)
com.google.protobuf.MessageOrBuilder {
/**
* <code>string edgeRoutingKey = 1;</code>
* @return The edgeRoutingKey.
*/
String getEdgeRoutingKey();
/**
* <code>string edgeRoutingKey = 1;</code>
* @return The bytes for edgeRoutingKey.
*/
com.google.protobuf.ByteString
getEdgeRoutingKeyBytes();
/**
* <code>string edgeSecret = 2;</code>
* @return The edgeSecret.
*/
String getEdgeSecret();
/**
* <code>string edgeSecret = 2;</code>
* @return The bytes for edgeSecret.
*/
com.google.protobuf.ByteString
getEdgeSecretBytes();
}
|
#!/usr/bin/env bash
cd /usr/src/app
export DJANGO_SETTINGS_MODULE=Translation.settings
export LANG=C.UTF-8
# added by Emil Abbasov (IOI2019) to fix the container timezone issue
# set noninteractive installation
export DEBIAN_FRONTEND=noninteractive
#install tzdata package
apt-get install -y tzdata
# set your timezone
ln -fs /usr/share/zoneinfo/Asia/Baku /etc/localtime
dpkg-reconfigure --frontend noninteractive tzdata
fc-cache
if [[ -n $DB_HOST ]]; then
while ! timeout 2 bash -c "cat < /dev/null > /dev/tcp/$DB_HOST/5432" 2> /dev/null; do
echo "Waiting for db..."
sleep 1
done
fi
if [[ $# -eq 0 ]]; then
echo "Collecting staticfiles"
python3 manage.py collectstatic --noinput
echo "Migrating Models"
python3 manage.py migrate
echo "Starting Gunicorn"
# For using docker in development settings, add `--reload` option below to the execution line of gunicorn
exec /usr/local/bin/gunicorn Translation.wsgi:application -w "${GUNICORN_WORKERS:-1}" -b :9000
fi
exec "$@"
|
#
# SIMPLE
#
TextFilters.define :simple, "Simple" do
def render_text(text)
text.gsub("\n", '<br/>')
end
end
|
from ..base import AST
CLASS = "statements.assignment"
class AssignmentStatement(AST):
"""
Assign statement class for AST.
interpret - runtime function for Evaluator (return variable by name from environment).
Example: x := 56
"""
def __init__(self, variable, aexp):
super().__init__(CLASS, "assign_statement")
self.variable = variable
self.aexp = aexp
self.children = [variable, aexp]
|
class RoboticFinger:
def __init__(self):
self.num_attempts = 0
self.max_try = 10
self.location = "main" # Initialize the location to "main"
def _in_main(self):
return self.location == "main"
def _in_exam(self):
return self.location == "exam"
def _exit_exam(self):
# Move the robotic finger out of the exam location
self.location = "main"
def move(self):
while not self._in_main():
if self._in_exam():
self._exit_exam()
else:
self.move_back()
self.num_attempts += 1
if self.num_attempts >= self.max_try: # failsafe
print("Failsafe triggered: Maximum attempts reached")
break
def move_back(self):
# Implement logic to move the robotic finger back one step
# For example, update the location based on the movement
if self.location == "main":
self.location = "previous_location"
elif self.location == "previous_location":
self.location = "another_previous_location"
# Update the location based on the actual movement logic
|
<reponame>edge-fortress/OSS-13<gh_stars>10-100
#include "IHasRepeatableID.h"
static std::queue<uint32_t> freeIDs;
uint32_t getNextID() {
if (!freeIDs.empty()) {
auto result = freeIDs.front();
freeIDs.pop();
return result;
} else {
static uint32_t nextID = 1;
return nextID++;
}
}
IHasRepeatableID::IHasRepeatableID() :
id(getNextID())
{ }
IHasRepeatableID::~IHasRepeatableID() {
freeIDs.push(id);
}
uint32_t IHasRepeatableID::ID() const {
return id;
}
|
#!/bin/bash
# This utility file contains functions that wrap commands to be tested. All wrapper functions run commands
# in a sub-shell and redirect all output. Tests in test-cmd *must* use these functions for testing.
# We assume ${OS_ROOT} is set
source "${OS_ROOT}/hack/text.sh"
source "${OS_ROOT}/hack/util.sh"
# expect_success runs the cmd and expects an exit code of 0
function os::cmd::expect_success() {
if [[ $# -ne 1 ]]; then echo "os::cmd::expect_success expects only one argument, got $#"; exit 1; fi
local cmd=$1
os::cmd::internal::expect_exit_code_run_grep "${cmd}"
}
# expect_failure runs the cmd and expects a non-zero exit code
function os::cmd::expect_failure() {
if [[ $# -ne 1 ]]; then echo "os::cmd::expect_failure expects only one argument, got $#"; exit 1; fi
local cmd=$1
os::cmd::internal::expect_exit_code_run_grep "${cmd}" "os::cmd::internal::failure_func"
}
# expect_success_and_text runs the cmd and expects an exit code of 0
# as well as running a grep test to find the given string in the output
function os::cmd::expect_success_and_text() {
if [[ $# -ne 2 ]]; then echo "os::cmd::expect_success_and_text expects two arguments, got $#"; exit 1; fi
local cmd=$1
local expected_text=$2
os::cmd::internal::expect_exit_code_run_grep "${cmd}" "os::cmd::internal::success_func" "${expected_text}"
}
# expect_failure_and_text runs the cmd and expects a non-zero exit code
# as well as running a grep test to find the given string in the output
function os::cmd::expect_failure_and_text() {
if [[ $# -ne 2 ]]; then echo "os::cmd::expect_failure_and_text expects two arguments, got $#"; exit 1; fi
local cmd=$1
local expected_text=$2
os::cmd::internal::expect_exit_code_run_grep "${cmd}" "os::cmd::internal::failure_func" "${expected_text}"
}
# expect_success_and_not_text runs the cmd and expects an exit code of 0
# as well as running a grep test to ensure the given string is not in the output
function os::cmd::expect_success_and_not_text() {
if [[ $# -ne 2 ]]; then echo "os::cmd::expect_success_and_not_text expects two arguments, got $#"; exit 1; fi
local cmd=$1
local expected_text=$2
os::cmd::internal::expect_exit_code_run_grep "${cmd}" "os::cmd::internal::success_func" "${expected_text}" "os::cmd::internal::failure_func"
}
# expect_failure_and_not_text runs the cmd and expects a non-zero exit code
# as well as running a grep test to ensure the given string is not in the output
function os::cmd::expect_failure_and_not_text() {
if [[ $# -ne 2 ]]; then echo "os::cmd::expect_failure_and_not_text expects two arguments, got $#"; exit 1; fi
local cmd=$1
local expected_text=$2
os::cmd::internal::expect_exit_code_run_grep "${cmd}" "os::cmd::internal::failure_func" "${expected_text}" "os::cmd::internal::failure_func"
}
# expect_code runs the cmd and expects a given exit code
function os::cmd::expect_code() {
if [[ $# -ne 2 ]]; then echo "os::cmd::expect_code expects two arguments, got $#"; exit 1; fi
local cmd=$1
local expected_cmd_code=$2
os::cmd::internal::expect_exit_code_run_grep "${cmd}" "os::cmd::internal::specific_code_func ${expected_cmd_code}"
}
# expect_code_and_text runs the cmd and expects the given exit code
# as well as running a grep test to find the given string in the output
function os::cmd::expect_code_and_text() {
if [[ $# -ne 3 ]]; then echo "os::cmd::expect_code_and_text expects three arguments, got $#"; exit 1; fi
local cmd=$1
local expected_cmd_code=$2
local expected_text=$3
os::cmd::internal::expect_exit_code_run_grep "${cmd}" "os::cmd::internal::specific_code_func ${expected_cmd_code}" "${expected_text}"
}
# expect_code_and_not_text runs the cmd and expects the given exit code
# as well as running a grep test to ensure the given string is not in the output
function os::cmd::expect_code_and_not_text() {
if [[ $# -ne 3 ]]; then echo "os::cmd::expect_code_and_not_text expects three arguments, got $#"; exit 1; fi
local cmd=$1
local expected_cmd_code=$2
local expected_text=$3
os::cmd::internal::expect_exit_code_run_grep "${cmd}" "os::cmd::internal::specific_code_func ${expected_cmd_code}" "${expected_text}" "os::cmd::internal::failure_func"
}
millisecond=1
second=$(( 1000 * millisecond ))
minute=$(( 60 * second ))
# os::cmd::try_until_success runs the cmd in a small interval until either the command succeeds or times out
# the default time-out for os::cmd::try_until_success is 60 seconds.
# the default interval for os::cmd::try_until_success is 200ms
function os::cmd::try_until_success() {
if [[ $# -lt 1 ]]; then echo "os::cmd::try_until_success expects at least one arguments, got $#"; exit 1; fi
local cmd=$1
local duration=${2:-minute}
local interval=${3:-0.2}
os::cmd::internal::run_until_exit_code "${cmd}" "os::cmd::internal::success_func" "${duration}" "${interval}"
}
# os::cmd::try_until_failure runs the cmd until either the command fails or times out
# the default time-out for os::cmd::try_until_failure is 60 seconds.
function os::cmd::try_until_failure() {
if [[ $# -lt 1 ]]; then echo "os::cmd::try_until_success expects at least one argument, got $#"; exit 1; fi
local cmd=$1
local duration=${2:-$minute}
local interval=${3:-0.2}
os::cmd::internal::run_until_exit_code "${cmd}" "os::cmd::internal::failure_func" "${duration}" "${interval}"
}
# os::cmd::try_until_text runs the cmd until either the command outputs the desired text or times out
# the default time-out for os::cmd::try_until_text is 60 seconds.
function os::cmd::try_until_text() {
if [[ $# -lt 2 ]]; then echo "os::cmd::try_until_success expects at least two arguments, got $#"; exit 1; fi
local cmd=$1
local text=$2
local duration=${3:-minute}
local interval=${4:-0.2}
os::cmd::internal::run_until_text "${cmd}" "${text}" "${duration}" "${interval}"
}
# Functions in the os::cmd::internal namespace are discouraged from being used outside of os::cmd
# In order to harvest stderr and stdout at the same time into different buckets, we need to stick them into files
# in an intermediate step
BASETMPDIR="${TMPDIR:-"/tmp"}/openshift"
os_cmd_internal_tmpdir="${BASETMPDIR}/test-cmd"
os_cmd_internal_tmpout="${os_cmd_internal_tmpdir}/tmp_stdout.log"
os_cmd_internal_tmperr="${os_cmd_internal_tmpdir}/tmp_stderr.log"
# os::cmd::internal::expect_exit_code_run_grep runs the provided test command and expects a specific
# exit code from that command as well as the success of a specified `grep` invocation. Output from the
# command to be tested is suppressed unless either `VERBOSE=1` or the test fails. This function bypasses
# any error exiting settings or traps set by upstream callers by masking the return code of the command
# with the return code of setting the result variable on failure.
function os::cmd::internal::expect_exit_code_run_grep() {
local cmd=$1
# default expected cmd code to 0 for success
local cmd_eval_func=${2:-os::cmd::internal::success_func}
# default to nothing
local grep_args=${3:-}
# default expected test code to 0 for success
local test_eval_func=${4:-os::cmd::internal::success_func}
os::cmd::internal::init_tempdir
local name=$(os::cmd::internal::describe_call "${cmd}" "${cmd_eval_func}" "${grep_args}" "${test_eval_func}")
echo "Running ${name}..."
local start_time=$(os::cmd::internal::seconds_since_epoch)
local cmd_result=$( os::cmd::internal::run_collecting_output "${cmd}"; echo $? )
local cmd_succeeded=$( ${cmd_eval_func} "${cmd_result}"; echo $? )
local test_result=0
if [[ -n "${grep_args}" ]]; then
test_result=$( os::cmd::internal::run_collecting_output 'os::cmd::internal::get_results | grep -Eq "${grep_args}"'; echo $? )
fi
local test_succeeded=$( ${test_eval_func} "${test_result}"; echo $? )
local end_time=$(os::cmd::internal::seconds_since_epoch)
local time_elapsed=$(echo "scale=3; ${end_time} - ${start_time}" | bc | xargs printf '%5.3f') # in decimal seconds, we need leading zeroes for parsing later
# some commands are multi-line, so we may need to clear more than just the previous line
local cmd_length=$(echo "${cmd}" | wc -l)
for (( i=0; i<${cmd_length}; i++ )); do
os::text::clear_last_line
done
if (( cmd_succeeded && test_succeeded )); then
os::text::print_green "SUCCESS after ${time_elapsed}s: ${name}"
if [[ -n ${VERBOSE-} ]]; then
os::cmd::internal::print_results
fi
return 0
else
local cause=$(os::cmd::internal::assemble_causes "${cmd_succeeded}" "${test_succeeded}")
os::text::print_red_bold "FAILURE after ${time_elapsed}s: ${name}: ${cause}"
os::text::print_red "$(os::cmd::internal::print_results)"
return 1
fi
}
# os::cmd::internal::init_tempdir initializes the temporary directory
function os::cmd::internal::init_tempdir() {
mkdir -p "${os_cmd_internal_tmpdir}"
rm -f "${os_cmd_internal_tmpdir}"/tmp_std{out,err}.log
}
# os::cmd::internal::describe_call determines the file:line of the latest function call made
# from outside of this file in the call stack, and the name of the function being called from
# that line, returning a string describing the call
function os::cmd::internal::describe_call() {
local cmd=$1
local cmd_eval_func=$2
local grep_args=${3:-}
local test_eval_func=${4:-}
local caller_id=$(os::cmd::internal::determine_caller)
local full_name="${caller_id}: executing '${cmd}'"
local cmd_expectation=$(os::cmd::internal::describe_expectation "${cmd_eval_func}")
local full_name="${full_name} expecting ${cmd_expectation}"
if [[ -n "${grep_args}" ]]; then
local text_expecting=
case "${test_eval_func}" in
"os::cmd::internal::success_func")
text_expecting="text" ;;
"os::cmd::internal::failure_func")
text_expecting="not text" ;;
esac
full_name="${full_name} and ${text_expecting} '${grep_args}'"
fi
echo "${full_name}"
}
# os::cmd::internal::determine_caller determines the file relative to the OpenShift Origin root directory
# and line number of the function call to the outer os::cmd wrapper function
function os::cmd::internal::determine_caller() {
local call_depth=
local len_sources="${#BASH_SOURCE[@]}"
for (( i=0; i<${len_sources}; i++ )); do
if [ ! $(echo "${BASH_SOURCE[i]}" | grep "hack/cmd_util\.sh$") ]; then
call_depth=i
break
fi
done
local caller_file="${BASH_SOURCE[${call_depth}]}"
if which realpath >&/dev/null; then
# if the caller has `realpath`, we can use it to make our file names cleaner by
# trimming the absolute file path up to `...openshift/origin/` and showing only
# the relative path from the Origin root directory
caller_file="$( realpath "${caller_file}" )"
caller_file="${caller_file//*openshift\/origin\/}"
fi
local caller_line="${BASH_LINENO[${call_depth}-1]}"
echo "${caller_file}:${caller_line}"
}
# os::cmd::internal::describe_expectation describes a command return code evaluation function
function os::cmd::internal::describe_expectation() {
local func=$1
case "${func}" in
"os::cmd::internal::success_func")
echo "success" ;;
"os::cmd::internal::failure_func")
echo "failure" ;;
"os::cmd::internal::specific_code_func"*[0-9])
local code=$(echo "${func}" | grep -Eo "[0-9]+$")
echo "exit code ${code}" ;;
"")
echo "any result"
esac
}
# os::cmd::internal::seconds_since_epoch returns the number of seconds elapsed since the epoch
# with milli-second precision
function os::cmd::internal::seconds_since_epoch() {
local ns=$(date +%s%N)
# if `date` doesn't support nanoseconds, return second precision
if [[ "$ns" == *N ]]; then
date "+%s.000"
return
fi
echo $(bc <<< "scale=3; ${ns}/1000000000")
}
# os::cmd::internal::run_collecting_output runs the command given, piping stdout and stderr into
# the given files, and returning the exit code of the command
function os::cmd::internal::run_collecting_output() {
local cmd=$1
local result=
$( eval "${cmd}" 1>>"${os_cmd_internal_tmpout}" 2>>"${os_cmd_internal_tmperr}" ) || result=$?
local result=${result:-0} # if we haven't set result yet, the command succeeded
return "${result}"
}
# os::cmd::internal::success_func determines if the input exit code denotes success
# this function returns 0 for false and 1 for true to be compatible with arithmetic tests
function os::cmd::internal::success_func() {
local exit_code=$1
# use a negated test to get output correct for (( ))
[[ "${exit_code}" -ne "0" ]]
return $?
}
# os::cmd::internal::failure_func determines if the input exit code denotes failure
# this function returns 0 for false and 1 for true to be compatible with arithmetic tests
function os::cmd::internal::failure_func() {
local exit_code=$1
# use a negated test to get output correct for (( ))
[[ "${exit_code}" -eq "0" ]]
return $?
}
# os::cmd::internal::specific_code_func determines if the input exit code matches the given code
# this function returns 0 for false and 1 for true to be compatible with arithmetic tests
function os::cmd::internal::specific_code_func() {
local expected_code=$1
local exit_code=$2
# use a negated test to get output correct for (( ))
[[ "${exit_code}" -ne "${expected_code}" ]]
return $?
}
# os::cmd::internal::get_results prints the stderr and stdout files
function os::cmd::internal::get_results() {
cat "${os_cmd_internal_tmpout}" "${os_cmd_internal_tmperr}"
}
# os::cmd::internal::get_try_until_results returns a concise view of the stdout and stderr output files
# using a timeline format, where consecutive output lines that are the same are condensed into one line
# with a counter
function os::cmd::internal::print_try_until_results() {
if grep -vq $'\x1e' "${os_cmd_internal_tmpout}"; then
echo "Standard output from the command:"
os::cmd::internal::compress_output "${os_cmd_internal_tmpout}"
else
echo "There was no output from the command."
fi
if grep -vq $'\x1e' "${os_cmd_internal_tmperr}"; then
echo "Standard error from the command:"
os::cmd::internal::compress_output "${os_cmd_internal_tmperr}"
else
echo "There was no error output from the command."
fi
}
# os::cmd::internal::mark_attempt marks the end of an attempt in the stdout and stderr log files
# this is used to make the try_until_* output more concise
function os::cmd::internal::mark_attempt() {
echo -e '\x1e' >> "${os_cmd_internal_tmpout}" | tee "${os_cmd_internal_tmperr}"
}
# os::cmd::internal::compress_output compresses an output file into timeline representation
function os::cmd::internal::compress_output() {
local logfile=$1
awk -f ${OS_ROOT}/hack/compress.awk $logfile
}
# os::cmd::internal::print_results pretty-prints the stderr and stdout files
function os::cmd::internal::print_results() {
if [[ -s "${os_cmd_internal_tmpout}" ]]; then
echo "Standard output from the command:"
cat "${os_cmd_internal_tmpout}"
else
echo "There was no output from the command."
fi
if [[ -s "${os_cmd_internal_tmperr}" ]]; then
echo "Standard error from the command:"
cat "${os_cmd_internal_tmperr}"
else
echo "There was no error output from the command."
fi
}
# os::cmd::internal::assemble_causes determines from the two input booleans which part of the test
# failed and generates a nice delimited list of failure causes
function os::cmd::internal::assemble_causes() {
local cmd_succeeded=$1
local test_succeeded=$2
local causes=()
if (( ! cmd_succeeded )); then
causes+=("the command returned the wrong error code")
fi
if (( ! test_succeeded )); then
causes+=("the output content test failed")
fi
local list=$(printf '; %s' "${causes[@]}")
echo "${list:2}"
}
# os::cmd::internal::run_until_exit_code runs the provided command until the exit code test given
# succeeds or the timeout given runs out. Output from the command to be tested is suppressed unless
# either `VERBOSE=1` or the test fails. This function bypasses any error exiting settings or traps
# set by upstream callers by masking the return code of the command with the return code of setting
# the result variable on failure.
function os::cmd::internal::run_until_exit_code() {
local cmd=$1
local cmd_eval_func=$2
local duration=$3
local interval=$4
os::cmd::internal::init_tempdir
local description=$(os::cmd::internal::describe_call "${cmd}" "${cmd_eval_func}")
local duration_seconds=$(echo "scale=3; $(( duration )) / 1000" | bc | xargs printf '%5.3f')
local description="${description}; re-trying every ${interval}s until completion or ${duration_seconds}s"
echo "Running ${description}..."
local start_time=$(os::cmd::internal::seconds_since_epoch)
local deadline=$(( $(date +%s000) + $duration ))
local cmd_succeeded=0
while [ $(date +%s000) -lt $deadline ]; do
local cmd_result=$( os::cmd::internal::run_collecting_output "${cmd}"; echo $? )
cmd_succeeded=$( ${cmd_eval_func} "${cmd_result}"; echo $? )
if (( cmd_succeeded )); then
break
fi
sleep "${interval}"
os::cmd::internal::mark_attempt
done
local end_time=$(os::cmd::internal::seconds_since_epoch)
local time_elapsed=$(echo "scale=9; ${end_time} - ${start_time}" | bc | xargs printf '%5.3f') # in decimal seconds, we need leading zeroes for parsing later
# some commands are multi-line, so we may need to clear more than just the previous line
local cmd_length=$(echo "${cmd}" | wc -l)
for (( i=0; i<${cmd_length}; i++ )); do
os::text::clear_last_line
done
if (( cmd_succeeded )); then
os::text::print_green "SUCCESS after ${time_elapsed}s: ${description}"
if [[ -n ${VERBOSE-} ]]; then
os::cmd::internal::print_try_until_results
fi
return 0
else
os::text::print_red_bold "FAILURE after ${time_elapsed}s: ${description}: the command timed out"
os::text::print_red "$(os::cmd::internal::print_try_until_results)"
return 1
fi
}
# os::cmd::internal::run_until_text runs the provided command until the command output contains the
# given text or the timeout given runs out. Output from the command to be tested is suppressed unless
# either `VERBOSE=1` or the test fails. This function bypasses any error exiting settings or traps
# set by upstream callers by masking the return code of the command with the return code of setting
# the result variable on failure.
function os::cmd::internal::run_until_text() {
local cmd=$1
local text=$2
local duration=$3
local interval=$4
os::cmd::internal::init_tempdir
local description=$(os::cmd::internal::describe_call "${cmd}" "" "${text}" "os::cmd::internal::success_func")
local duration_seconds=$(echo "scale=3; $(( duration )) / 1000" | bc | xargs printf '%5.3f')
local description="${description}; re-trying every ${interval}s until completion or ${duration_seconds}s"
echo "Running ${description}..."
local start_time=$(os::cmd::internal::seconds_since_epoch)
local deadline=$(( $(date +%s000) + $duration ))
local test_succeeded=0
while [ $(date +%s000) -lt $deadline ]; do
local cmd_result=$( os::cmd::internal::run_collecting_output "${cmd}"; echo $? )
local test_result=$( os::cmd::internal::run_collecting_output 'os::cmd::internal::get_results | grep -Eq "${text}"'; echo $? )
test_succeeded=$( os::cmd::internal::success_func "${test_result}"; echo $? )
if (( test_succeeded )); then
break
fi
sleep "${interval}"
os::cmd::internal::mark_attempt
done
local end_time=$(os::cmd::internal::seconds_since_epoch)
local time_elapsed=$(echo "scale=9; ${end_time} - ${start_time}" | bc | xargs printf '%5.3f') # in decimal seconds, we need leading zeroes for parsing later
# some commands are multi-line, so we may need to clear more than just the previous line
local cmd_length=$(echo "${cmd}" | wc -l)
for (( i=0; i<${cmd_length}; i++ )); do
os::text::clear_last_line
done
if (( test_succeeded )); then
os::text::print_green "SUCCESS after ${time_elapsed}s: ${description}"
if [[ -n ${VERBOSE-} ]]; then
os::cmd::internal::print_try_until_results
fi
return 0
else
os::text::print_red_bold "FAILURE after ${time_elapsed}s: ${description}: the command timed out"
os::text::print_red "$(os::cmd::internal::print_try_until_results)"
return 1
fi
}
|
<reponame>jianguotian/swift-hive-metastore
/*
* Copyright (C) 2013 Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.apache.hadoop.hive.metastore.api;
import com.facebook.swift.codec.ThriftConstructor;
import com.facebook.swift.codec.ThriftField;
import com.facebook.swift.codec.ThriftStruct;
import java.util.List;
import java.util.Map;
import static com.google.common.base.Objects.toStringHelper;
@ThriftStruct("StorageDescriptor")
public class StorageDescriptor
{
@ThriftConstructor
public StorageDescriptor(
@ThriftField(value = 1, name = "cols") final List<FieldSchema> cols,
@ThriftField(value = 2, name = "location") final String location,
@ThriftField(value = 3, name = "inputFormat") final String inputFormat,
@ThriftField(value = 4, name = "outputFormat") final String outputFormat,
@ThriftField(value = 5, name = "compressed") final boolean compressed,
@ThriftField(value = 6, name = "numBuckets") final int numBuckets,
@ThriftField(value = 7, name = "serdeInfo") final SerDeInfo serdeInfo,
@ThriftField(value = 8, name = "bucketCols") final List<String> bucketCols,
@ThriftField(value = 9, name = "sortCols") final List<Order> sortCols,
@ThriftField(value = 10, name = "parameters") final Map<String, String> parameters,
@ThriftField(value = 11, name = "skewedInfo") final SkewedInfo skewedInfo,
@ThriftField(value = 12, name = "storedAsSubDirectories") final boolean storedAsSubDirectories,
@ThriftField(value = 13, name = "statsFresh") final boolean statsFresh)
{
this.cols = cols;
this.location = location;
this.inputFormat = inputFormat;
this.outputFormat = outputFormat;
this.compressed = compressed;
this.numBuckets = numBuckets;
this.serdeInfo = serdeInfo;
this.bucketCols = bucketCols;
this.sortCols = sortCols;
this.parameters = parameters;
this.skewedInfo = skewedInfo;
this.storedAsSubDirectories = storedAsSubDirectories;
this.statsFresh = statsFresh;
}
public StorageDescriptor()
{
}
private List<FieldSchema> cols;
@ThriftField(value = 1, name = "cols")
public List<FieldSchema> getCols()
{
return cols;
}
public void setCols(final List<FieldSchema> cols)
{
this.cols = cols;
}
private String location;
@ThriftField(value = 2, name = "location")
public String getLocation()
{
return location;
}
public void setLocation(final String location)
{
this.location = location;
}
private String inputFormat;
@ThriftField(value = 3, name = "inputFormat")
public String getInputFormat()
{
return inputFormat;
}
public void setInputFormat(final String inputFormat)
{
this.inputFormat = inputFormat;
}
private String outputFormat;
@ThriftField(value = 4, name = "outputFormat")
public String getOutputFormat()
{
return outputFormat;
}
public void setOutputFormat(final String outputFormat)
{
this.outputFormat = outputFormat;
}
private boolean compressed;
@ThriftField(value = 5, name = "compressed")
public boolean isCompressed()
{
return compressed;
}
public void setCompressed(final boolean compressed)
{
this.compressed = compressed;
}
private int numBuckets;
@ThriftField(value = 6, name = "numBuckets")
public int getNumBuckets()
{
return numBuckets;
}
public void setNumBuckets(final int numBuckets)
{
this.numBuckets = numBuckets;
}
private SerDeInfo serdeInfo;
@ThriftField(value = 7, name = "serdeInfo")
public SerDeInfo getSerdeInfo()
{
return serdeInfo;
}
public void setSerdeInfo(final SerDeInfo serdeInfo)
{
this.serdeInfo = serdeInfo;
}
private List<String> bucketCols;
@ThriftField(value = 8, name = "bucketCols")
public List<String> getBucketCols()
{
return bucketCols;
}
public void setBucketCols(final List<String> bucketCols)
{
this.bucketCols = bucketCols;
}
private List<Order> sortCols;
@ThriftField(value = 9, name = "sortCols")
public List<Order> getSortCols()
{
return sortCols;
}
public void setSortCols(final List<Order> sortCols)
{
this.sortCols = sortCols;
}
private Map<String, String> parameters;
@ThriftField(value = 10, name = "parameters")
public Map<String, String> getParameters()
{
return parameters;
}
public void setParameters(final Map<String, String> parameters)
{
this.parameters = parameters;
}
private SkewedInfo skewedInfo;
@ThriftField(value = 11, name = "skewedInfo")
public SkewedInfo getSkewedInfo()
{
return skewedInfo;
}
public void setSkewedInfo(final SkewedInfo skewedInfo)
{
this.skewedInfo = skewedInfo;
}
private boolean storedAsSubDirectories;
@ThriftField(value = 12, name = "storedAsSubDirectories")
public boolean isStoredAsSubDirectories()
{
return storedAsSubDirectories;
}
public void setStoredAsSubDirectories(final boolean storedAsSubDirectories)
{
this.storedAsSubDirectories = storedAsSubDirectories;
}
private boolean statsFresh;
@ThriftField(value = 13, name = "statsFresh")
public boolean isStatsFresh()
{
return statsFresh;
}
public void setStatsFresh(final boolean statsFresh)
{
this.statsFresh = statsFresh;
}
@Override
public String toString()
{
return toStringHelper(this)
.add("cols", cols)
.add("location", location)
.add("inputFormat", inputFormat)
.add("outputFormat", outputFormat)
.add("compressed", compressed)
.add("numBuckets", numBuckets)
.add("serdeInfo", serdeInfo)
.add("bucketCols", bucketCols)
.add("sortCols", sortCols)
.add("parameters", parameters)
.add("skewedInfo", skewedInfo)
.add("storedAsSubDirectories", storedAsSubDirectories)
.add("statsFresh", statsFresh)
.toString();
}
}
|
<filename>ExtLib/Aurora/External/libRocket/Source/Core/Python/ContextInterface.cpp
/*
* This source file is part of libRocket, the HTML/CSS Interface Middleware
*
* For the latest information, see http://www.librocket.com
*
* Copyright (c) 2008-2010 CodePoint Ltd, Shift Technology Ltd
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
#include "precompiled.h"
#include "ContextInterface.h"
#include <Rocket/Core/Python/Utilities.h>
#include <Rocket/Core/Python/Wrapper.h>
#include <Rocket/Core/Context.h>
#include <Rocket/Core/Factory.h>
#include "EventListener.h"
#include "ContextInstancer.h"
namespace Rocket {
namespace Core {
namespace Python {
static PyObject* py_context = NULL;
// Initialises the Python interface.
bool ContextInterface::InitialisePythonInterface()
{
ContextDocumentProxy::InitialisePythonInterface();
py_context = python::class_< Context, Rocket::Core::Python::Wrapper< Context, const Rocket::Core::String& >, boost::noncopyable>("Context", python::init< const Rocket::Core::String& >())
.def("AddEventListener", &ContextInterface::AddEventListener)
.def("AddMouseCursor", &Context::AddMouseCursor, python::return_value_policy< python::return_by_value >())
.def("CreateDocument", &ContextInterface::CreateDocument)
.def("LoadDocument", &ContextInterface::LoadDocument)
.def("LoadDocumentFromMemory", &ContextInterface::LoadDocumentFromMemory)
.def("LoadMouseCursor", &ContextInterface::LoadMouseCursor)
.def("Render", &Context::Render)
.def("ShowMouseCursor", &Context::ShowMouseCursor)
.def("UnloadAllDocuments", &Context::UnloadAllDocuments)
.def("UnloadAllMouseCursors", &Context::UnloadAllMouseCursors)
.def("UnloadDocument", &Context::UnloadDocument)
.def("UnloadMouseCursor", &Context::UnloadMouseCursor)
.def("Update", &Context::Update)
.add_property("dimensions", python::make_function(&Context::GetDimensions, python::return_value_policy< python::return_by_value >()), &Context::SetDimensions)
.add_property("documents", &ContextInterface::GetDocuments)
.add_property("focus_element", python::make_function(&Context::GetFocusElement, python::return_value_policy< python::return_by_value >()))
.add_property("hover_element", python::make_function(&Context::GetHoverElement, python::return_value_policy< python::return_by_value >()))
.add_property("root_element", python::make_function(&Context::GetRootElement, python::return_value_policy< python::return_by_value >()))
.add_property("name", python::make_function(&Context::GetName, python::return_value_policy< python::return_by_value >()))
.ptr();
return true;
}
// Initialise the Rocket element interface.
void ContextInterface::InitialiseRocketInterface()
{
Factory::RegisterContextInstancer(new ContextInstancer(py_context))->RemoveReference();
}
// The "AddEventListener" function bound into Python context objects instead of the C++ function.
void ContextInterface::AddEventListener(Context* self, const char* event, const char* script, bool in_capture_phase)
{
self->AddEventListener(event, new EventListener(script, self->GetRootElement()), in_capture_phase);
}
// The "CreateDocument" function bound into Python context objects instead of the C++ function.
python::object ContextInterface::CreateDocument(Context* self, const char* tag)
{
Rocket::Core::ElementDocument* document = self->CreateDocument(tag);
if (document == NULL)
return python::object();
// Remove the C++ caller reference and add a Python one to replace it.
python::object py_document = Rocket::Core::Python::Utilities::MakeObject(document);
document->RemoveReference();
return py_document;
}
// The "LoadDocument" function bound into Python context objects instead of the C++ function.
python::object ContextInterface::LoadDocument(Context* self, const char* document_path)
{
Rocket::Core::ElementDocument* document = self->LoadDocument(document_path);
if (document == NULL)
return python::object();
// Remove the C++ caller reference and return the python::object
python::object py_document = Rocket::Core::Python::Utilities::MakeObject(document);
document->RemoveReference();
return py_document;
}
// The "LoadDocument" function bound into Python context objects instead of the C++ function.
python::object ContextInterface::LoadDocumentFromMemory(Context* self, const char* stream)
{
Rocket::Core::ElementDocument* document = self->LoadDocumentFromMemory(stream);
if (document == NULL)
return python::object();
// Remove the C++ caller reference and return the python::object
python::object py_document = Rocket::Core::Python::Utilities::MakeObject(document);
document->RemoveReference();
return py_document;
}
// The "LoadMouseCursor" function bound into Python context objects instead of the C++ function.
python::object ContextInterface::LoadMouseCursor(Context* self, const char* document_path)
{
Rocket::Core::ElementDocument* document = self->LoadMouseCursor(document_path);
if (document == NULL)
return python::object();
// Remove the C++ caller reference and add a Python one to replace it.
python::object py_document = Rocket::Core::Python::Utilities::MakeObject(document);
document->RemoveReference();
return py_document;
}
// Returns the document proxy object for the 'document' property.
ContextDocumentProxy ContextInterface::GetDocuments(Context* self)
{
return ContextDocumentProxy(self);
}
}
}
}
|
<filename>src/ios/sdk/IDMMobileSDKv2/OMAuthenticationChallenge.h
/**
* Copyright (c) 2017, Oracle and/or its affiliates.
* The Universal Permissive License (UPL), Version 1.0
*/
#import <Foundation/Foundation.h>
enum
{
OMChallengeUsernamePassword,
OMChallengeClientCert,
OMChallengeServerTrust,
OMChallengeExternalBrowser,
OMChallengeEmbeddedBrowser,
OMChallengeEmbeddedSafari,
OMChallengeInvalidRedirect
};
typedef NSUInteger OMChallengeType;
enum
{
OMProceed,
OMCancel
};
typedef NSUInteger OMChallengeResponse;
@interface OMAuthenticationChallenge : NSObject
@property (nonatomic, strong) NSDictionary *authData;
@property (nonatomic) OMChallengeType challengeType;
@property (nonatomic, copy) __block void(^authChallengeHandler)
(NSDictionary *authData,OMChallengeResponse response );
@end
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package DAO;
import Models.Alimento;
import Models.Turno;
import conexao.ConnectionFactory;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
/**
*
* @author Marcelo
*/
public class TurnoDAO {
private ConnectionFactory dao = ConnectionFactory.getInstancia();
private static TurnoDAO instancia;
public static TurnoDAO getInstancia() {
if (instancia == null) {
instancia = new TurnoDAO();
}
return instancia;
}
public void save(Turno turno) throws SQLException, ClassNotFoundException {
Connection conexao = dao.getConnection();
PreparedStatement stmt = null;
try {
stmt = conexao.prepareStatement("INSERT INTO `turno` VALUES (0, ?, ?)");
stmt.setString(1, turno.getNomeTurno());
stmt.setDate(2, turno.getDate());
stmt.executeUpdate();
turno.setId(this.find());
} finally {
ConnectionFactory.closeConnection(conexao, stmt);
}
}
public void save(Turno turno, Alimento alimento) throws SQLException, ClassNotFoundException {
Connection conexao = dao.getConnection();
PreparedStatement stmt = null;
try {
stmt = conexao.prepareStatement("INSERT INTO `itemalimento` VALUES (0, ?, ?)");
stmt.setInt(1, turno.getId());
stmt.setInt(2, alimento.getId());
stmt.executeUpdate();
} finally {
ConnectionFactory.closeConnection(conexao, stmt);
}
}
public void update(Turno turno) throws SQLException, ClassNotFoundException {
Connection conexao = dao.getConnection();
PreparedStatement stmt = null;
try {
stmt = conexao.prepareStatement("UPDATE `turno` SET `turno` = ?,`data` = ? WHERE `id` = ?");
stmt.setString(1, turno.getNomeTurno());
stmt.setDate(2, turno.getDate());
stmt.setInt(3, turno.getId());
stmt.executeUpdate();
} finally {
ConnectionFactory.closeConnection(conexao, stmt);
}
}
public void find(Turno turno) throws SQLException, ClassNotFoundException {
Connection conexao = dao.getConnection();
PreparedStatement stmt = null;
ResultSet result = null;
try {
stmt = conexao.prepareStatement("SELECT `turno`, `data` FROM `turno` WHERE `id` = ?");
stmt.setInt(1, turno.getId());
result = stmt.executeQuery();
while (result.next()) {
turno.setNomeTurno(result.getString("turno"));
turno.setDate(result.getDate("data"));
}
stmt = conexao.prepareStatement("SELECT `idTurno`, `idAlimento` FROM `itemalimento` WHERE `idTurno` = ?");
stmt.setInt(1, turno.getId());
result = stmt.executeQuery();
ArrayList<Alimento> alimentos = new ArrayList<Alimento>();
while (result.next()) {
Alimento alimento = new Alimento();
alimento.find(result.getInt("idAlimento"));
alimentos.add(alimento);
}
turno.setAlimentos(alimentos);
} finally {
ConnectionFactory.closeConnection(conexao, stmt);
}
}
public void delete(Turno turno) throws SQLException, ClassNotFoundException {
Connection conexao = dao.getConnection();
PreparedStatement stmt = null;
try {
stmt = conexao.prepareStatement("DELETE FROM `itemalimento` WHERE `idTurno` = ?");
stmt.setInt(1, turno.getId());
stmt.executeUpdate();
stmt = conexao.prepareStatement("DELETE FROM `turno` WHERE `id` = ?");
stmt.setInt(1, turno.getId());
stmt.executeUpdate();
} finally {
ConnectionFactory.closeConnection(conexao, stmt);
}
}
public void delete(Turno turno, Alimento alimento) throws SQLException, ClassNotFoundException {
Connection conexao = dao.getConnection();
PreparedStatement stmt = null;
try {
stmt = conexao.prepareStatement("DELETE FROM `itemalimento` WHERE `idTurno` = ? AND `idAlimento` = ?");
stmt.setInt(1, turno.getId());
stmt.setInt(2, alimento.getId());
stmt.executeUpdate();
} finally {
ConnectionFactory.closeConnection(conexao, stmt);
}
}
private int find() throws SQLException, ClassNotFoundException {
Connection conexao = dao.getConnection();
PreparedStatement stmt = null;
ResultSet result = null;
int resultado = 0;
try {
stmt = conexao.prepareStatement("SELECT AUTO_INCREMENT as id FROM information_schema.tables WHERE table_name = 'turno' AND table_schema = 'bancoweb'");
result = stmt.executeQuery();
while (result.next()) {
resultado = result.getInt("id");
}
} finally {
ConnectionFactory.closeConnection(conexao, stmt);
return resultado - 1;
}
}
}
|
function shuffle(array) {
var currentIndex = array.length, temporaryValue, randomIndex ;
while (0 !== currentIndex) {
randomIndex = Math.floor(Math.random() * currentIndex);
currentIndex -= 1;
temporaryValue = array[currentIndex];
array[currentIndex] = array[randomIndex];
array[randomIndex] = temporaryValue;
}
return array;
}
function convertToArray(string) {
var numSplit = string.split(",");
var ticketUserMap = [];
for (var key in numSplit) {
var rNum = numSplit[key].split("-");
if (rNum.length > 1) {
var rNumL, rNumR;
rNum[0] = Number(rNum[0]);
rNum[1] = Number(rNum[1]);
if (rNum[0] < rNum[1]) {
rNumL = rNum[0];
rNumR = rNum[1];
} else {
rNumL = rNum[1];
rNumR = rNum[0];
}
//console.log(rNumL+"--"+rNumR);
for (var i = rNum[0]; i < rNumR + 1; i++) {
ticketUserMap.push(i);
}
} else {
rNum = Number(rNum[0]);
if (!isNaN(rNum))
ticketUserMap.push(rNum);
}
}
return ticketUserMap;
}
|
module RAutomation
# Waiting with timeout
module WaitHelper
extend self
class TimeoutError < StandardError
end
# @private
# Wait until the block evaluates to true or times out.
def wait_until(timeout = Window.wait_timeout, &block)
end_time = ::Time.now + timeout
until ::Time.now > end_time
result = yield(self)
return result if result
sleep 0.5
end
raise TimeoutError, "timed out after #{timeout} seconds"
end
end
end
|
package cn.celess.blog.util;
import cn.celess.blog.BaseTest;
import cn.celess.blog.entity.User;
import io.jsonwebtoken.Jwts;
import io.jsonwebtoken.SignatureAlgorithm;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import java.time.Instant;
import java.util.Date;
import static org.junit.Assert.*;
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class JwtUtilTest extends BaseTest {
@Autowired
JwtUtil jwtUtil;
@Value("${jwt.secret}")
private String secret;
@Test
public void testGenerateToken() {
User user = new User();
user.setEmail("<EMAIL>");
String s = jwtUtil.generateToken(user, false);
assertNotNull(s);
String str = null;
try {
str = jwtUtil.generateToken(null, false);
} catch (Exception e) {
// ignore
}
assertNull(str);
}
@Test
public void testIsTokenExpired() throws InterruptedException {
String s = Jwts.builder()
.setClaims(null)
.setExpiration(new Date(Instant.now().toEpochMilli() + 1000))
.signWith(SignatureAlgorithm.HS512, secret)
.compact();
Thread.sleep(1010);
assertTrue(jwtUtil.isTokenExpired(s));
assertFalse(jwtUtil.isTokenExpired(jwtUtil.generateToken(new User(), false)));
}
@Test
public void testGetUsernameFromToken() {
User user = new User();
user.setEmail("<EMAIL>");
String s = jwtUtil.generateToken(user, false);
assertEquals(user.getEmail(), jwtUtil.getUsernameFromToken(s));
user.setEmail("<EMAIL>");
assertNotEquals(user.getEmail(), jwtUtil.getUsernameFromToken(s));
}
@Test
public void testGetExpirationDateFromToken() {
User user = new User();
user.setEmail("<EMAIL>");
String s = jwtUtil.generateToken(user, false);
assertNotNull(jwtUtil.getExpirationDateFromToken(s));
}
@Test
public void updateTokenDate() {
User user = new User();
user.setEmail("<EMAIL>");
String s = jwtUtil.generateToken(user, false);
Date before = jwtUtil.getExpirationDateFromToken(s);
String s1 = jwtUtil.updateTokenDate(s);
assertTrue(jwtUtil.getExpirationDateFromToken(s1).getTime() - jwtUtil.getExpirationDateFromToken(s).getTime() > 0);
}
}
|
package ee.ituk.api.user;
import ee.ituk.api.user.domain.UserStatus;
import org.springframework.data.jpa.repository.JpaRepository;
public interface UserStatusRepository extends JpaRepository<UserStatus, Long> {
UserStatus getByStatusName(String statusName);
}
|
A = [1 2 3 4];
weights = [1 2 3 4];
B = weights .* A;
disp(B);
|
def print_reverse_complement_fasta(s, h):
complement = {'A': 'T', 'T': 'A', 'C': 'G', 'G': 'C'}
rc = ''.join(complement[base] for base in reversed(s.upper()))
print('>%s' % h)
print(rc)
|
#! /bin/sh
if [ "$1" = "" ]; then
user=$(whoami)
else
user=$1
fi
if [ -n "$(getent passwd $user)" ]; then
echo "Setting permissions for $user ..."
setfacl -R -m u:$user:rwx app/cache app/logs tmp web/uploads
setfacl -dR -m u:$user:rwx app/cache app/logs tmp web/uploads
echo "[OK] Permissions set for user $user"
exit 0
else
echo "User $user not found - permissions not set"
exit 0
fi
exit
|
package org.museautomation.ui.steptask;
import org.museautomation.settings.*;
import org.museautomation.ui.extend.components.*;
import java.util.*;
/**
* @author <NAME> (see LICENSE.txt for license details)
*/
public class TaskEditorSettings extends BaseSettingsFile
{
@SuppressWarnings("unused") // required for serialization
public double getSplitterPos()
{
return _splitter_pos;
}
@SuppressWarnings("unused") // required for serialization
public void setSplitterPos(double splitter_pos)
{
_splitter_pos = splitter_pos;
}
private double _splitter_pos = 0.80;
public static TaskEditorSettings get()
{
return get(FILENAME);
}
public static TaskEditorSettings get(String name)
{
TaskEditorSettings settings = SETTINGS.get(name);
if (settings == null)
{
settings = (TaskEditorSettings) load(TaskEditorSettings.class, name, null);
Closer.get().add(settings);
SETTINGS.put(name, settings);
}
return settings;
}
private static final Map<String, TaskEditorSettings> SETTINGS = new HashMap<>();
private final static String FILENAME = "TaskEditor.json";
}
|
class BankAccount:
def __init__(self, accountNumber, balance, interestRate):
self.accountNumber = accountNumber
self.balance = balance
self.interestRate = interestRate
|
package ua.kata;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
class StringCalcTest {
private StringCalc calc;
@BeforeEach
void setUp() {
calc = new StringCalc();
}
@Test
void computeNegativeNumber() throws Exception {
assertThat(calc.compute("-14")).isEqualTo(-14);
}
@Test
void computeAddition() throws Exception {
assertThat(calc.compute("5+4")).isEqualTo(5 + 4);
}
@Test
void computeSubtraction() throws Exception {
assertThat(calc.compute("10-4")).isEqualTo(10 - 4);
}
@Test
void computeMultiplication() throws Exception {
assertThat(calc.compute("4*5")).isEqualTo(4 * 5);
}
@Test
void computeDivision() throws Exception {
assertThat(calc.compute("12/3")).isEqualTo(12 / 3);
}
@Test
void computeMultipleOperations() throws Exception {
assertThat(calc.compute("4+2*10-42/7-1")).isEqualTo(4 + 2 * 10 - 42 / 7 - 1);
}
@Test
void computeOperationsWithParenthesis() throws Exception {
assertThat(calc.compute("(4+2)*10-42/(7-1)")).isEqualTo((4 + 2) * 10 - 42 / (7 - 1));
}
}
|
package batch
import (
"github.com/muniere/glean/internal/app/server/batch/download"
"github.com/muniere/glean/internal/app/server/batch/index"
"github.com/muniere/glean/internal/app/server/batch/walk"
)
type (
SiteInfo = index.SiteInfo
WalkOptions = walk.Options
IndexOptions = index.Options
DownloadOptions = download.Options
)
var (
Walk = walk.Perform
Index = index.Perform
Download = download.Perform
)
|
SELECT AVG(age) AS Avg_age
FROM members
WHERE department = 'IT';
|
<filename>src/app/resources/events/events.resource.ts
import {Injectable} from '@angular/core';
import {Observable, of} from 'rxjs';
import {switchMap} from 'rxjs/operators';
import {HttpClient} from '@angular/common/http';
import {environment} from '@src/environments/environment';
import {deserialize as deserializeEvents, ResponseInterface} from '@src/app/resources/events/events-get.mapper';
import {Events} from '@src/app/resources/events/events.model';
@Injectable({
providedIn: 'root',
})
export class EventsResource {
static deserialize(response: ResponseInterface): Observable<Events> {
return of(deserializeEvents(response));
}
constructor(
private http: HttpClient,
) {
}
public getEvents() {
const href = `${environment.api}/`;
return this.http.get<ResponseInterface>(href)
.pipe(
switchMap(response => {
return EventsResource.deserialize(response);
})
);
}
}
|
class Client:
@staticmethod
def connect(ip):
# Simulated connection establishment with the server at the given IP address
print(f"Connected to server at {ip}")
@staticmethod
def waitTick(milliseconds):
# Simulated waiting for the specified duration in milliseconds
import time
time.sleep(milliseconds / 1000)
def performTask(ip):
Client.connect(ip)
Client.waitTick(100)
return "Task completed successfully"
# Example usage:
result = performTask("192.168.1.100")
print(result) # Output: Task completed successfully
|
<reponame>mcanlas/doodle<filename>shared/src/main/scala/doodle/syntax/NormalizedSyntax.scala<gh_stars>0
package doodle.syntax
import doodle.core.Normalized
trait NormalizedSyntax {
implicit class ToNormalizedOps(val value: Double) {
def normalized: Normalized =
Normalized.clip(value)
}
}
|
#!/usr/bin/env bash
export ROOT='erichang@bwrcrdsl-6.eecs.berkeley.edu:~/projects/bag_gen/central/BAG_framework/pybag'
rsync -zv CMakeLists.txt ${ROOT}/CMakeLists.txt
rsync -zv build.sh ${ROOT}/build.sh
rsync -zv run_test.sh ${ROOT}/run_test.sh
rsync -zv setup.py ${ROOT}/setup.py
rsync -zarv --delete src/ ${ROOT}/src
rsync -zarv --delete tests/ ${ROOT}/tests
rsync -zarv --delete --exclude '_build' --exclude '.git' cbag/ ${ROOT}/cbag
rsync -zarv --delete --exclude '_build' --exclude '.git' pybind11_generics/ ${ROOT}/pybind11_generics
|
<filename>stopify-continuations-compiler/src/compiler/sourceMaps.ts
import { SourceMapConsumer, RawSourceMap } from 'source-map';
import { LineMapping } from '../types';
import * as convertSourceMap from 'convert-source-map';
class LineMappingImpl implements LineMapping {
constructor(public getLine: (line: number, column: number) => number | null) {}
}
export function getSourceMap(jsCode: string): RawSourceMap | undefined {
const mapConverter = convertSourceMap.fromSource(jsCode);
if (mapConverter === null) {
return;
}
return mapConverter.toObject() as RawSourceMap;
}
/**
* Returns a custom line mapper which maps `node_modules` sources to `null`.
*/
export function generateLineMapping(map: RawSourceMap | undefined): LineMapping {
if (map) {
const sourceMap = new SourceMapConsumer(map);
return new LineMappingImpl((line: number, column: number) => {
const mapping = sourceMap.originalPositionFor({ line, column });
// NOTE(arjun): Ignoring these directories is a bit of a hack
if (mapping.source === null ||
mapping.source.includes('node_modules/') ||
mapping.source.includes('https://') ||
mapping.source.includes('goog/') ||
mapping.source.includes('cljs/') ||
mapping.source.includes('opt/') ||
mapping.source.includes('user_code/') ||
mapping.line === null) {
return null;
} else {
return mapping.line;
}
});
} else {
return new LineMappingImpl((line: number, column: number) => line);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.