text stringlengths 1 1.05M |
|---|
def calculate_sum(*args):
"""
Calculate the sum of two numbers, or more than two numbers.
:param args: The numbers to be summed.
"""
if len(args) == 0:
return 0
elif len(args) == 1:
return args[0]
else:
sum = 0
for arg in args:
sum += arg
return sum |
#! /bin/bash
#AvNav raspberry startup checks
#set -x
CONFIG=/boot/avnav.conf
LAST=/etc/avnav-startup-checks
MCS_INSTALL=`dirname $0`/setup-mcs.sh
log(){
logger -t 'avnav-startup-check' "$*"
}
log "started"
LAST_PASSWD=""
LAST_MCS=""
if [ -f $LAST ] ; then
source $LAST
fi
if [ ! -f $CONFIG ]; then
log "no $CONFIG found"
exit 0
fi
source <(tr -d '\015' < $CONFIG)
hasChanges=0
if [ "$AVNAV_PASSWD" != "" ] ; then
if [ "$AVNAV_PASSWD" = "$LAST_PASSWD" ] ; then
log "AVNAV_PASSWD is set but unchanged"
else
log "setting AVNAV_PASSWD from $CONFIG"
usermod -p "$AVNAV_PASSWD" pi
if [ $? = 0 ] ; then
LAST_PASSWD="$AVNAV_PASSWD"
hasChanges=1
else
log "ERROR: unable to set password, trying on next start"
fi
fi
else
log "AVNAV_PASSWD not set"
fi
runMcs=0
if [ "$AVNAV_MCS" = "yes" ] ; then
if [ "$LAST_MCS" = "yes" ] ; then
log "AVNAV_MCS is set but unchanged"
else
log "AVNAV_MCS is set to $AVNAV_MCS"
if [ -f "$MCS_INSTALL" ] ; then
LAST_MCS="$AVNAV_MCS"
runMcs=1
hasChanges=1
else
log "ERROR: $MCS_INSTALL not found, cannot set up MCS"
fi
fi
else
log "AVNAV_MCS not enabled"
fi
needsReboot=0
if [ "$runMcs" = 1 ];then
log "running $MCS_INSTALL"
$MCS_INSTALL -r -p -c
rt=$?
log "mcs install returned $rt"
if [ $rt = 1 ] ; then
log "reboot requested by MCS install"
needsReboot=1
else
[ $rt != 0 ] && LAST_MCS='' #retry next time
fi
else
log "startup check done"
fi
if [ "$hasChanges" = 1 ]; then
log "writing back $LAST"
echo "LAST_MCS=$LAST_MCS" > $LAST
echo "LAST_PASSWD='$LAST_PASSWD'" >> $LAST
chmod 600 $LAST
fi
if [ $needsReboot = 1 ] ; then
log "****rebooting now****"
reboot
fi
exit 0
|
<reponame>zavodil/skyward-ui
import { singletonHook } from "react-singleton-hook";
import { useEffect, useState } from "react";
import Big from "big.js";
import { useAccount } from "./account";
import { keysToCamel } from "./utils";
const defaultSales = {
loading: true,
sales: [],
};
const OneWeek = 7 * 24 * 60 * 60 * 1000;
const mapSubscription = (s) => {
return {
claimedOutBalance: s.claimedOutBalance.map(Big),
spentInBalance: Big(s.spentInBalance),
remainingInBalance: Big(s.remainingInBalance),
unclaimedOutBalances: s.unclaimedOutBalances.map(Big),
shares: Big(s.shares),
referralId: s.referralId,
};
};
const saleRefreshTimers = {};
export const addSaleMethods = (s) => {
s.started = () => s.remainingDuration < s.duration;
s.ended = () => s.remainingDuration === 0;
s.farAhead = () => !s.started() && s.startTime - s.currentTime > OneWeek;
return s;
};
export const mapSale = (s) => {
s = keysToCamel(s);
s.outTokens.forEach((o) => {
o.remaining = Big(o.remaining);
o.distributed = Big(o.distributed);
if (o.treasuryUnclaimed) {
o.treasuryUnclaimed = Big(o.treasuryUnclaimed);
}
});
s.inTokenRemaining = Big(s.inTokenRemaining);
s.inTokenPaidUnclaimed = Big(s.inTokenPaidUnclaimed);
s.inTokenPaid = Big(s.inTokenPaid);
s.totalShares = Big(s.totalShares);
s.startTime = parseFloat(s.startTime) / 1e6;
s.startDate = new Date(s.startTime);
s.duration = parseFloat(s.duration) / 1e6;
s.endTime = s.startTime + s.duration;
s.endDate = new Date(s.endTime);
s.remainingDuration = parseFloat(s.remainingDuration) / 1e6;
if (s.currentTime) {
s.currentTime = parseFloat(s.currentTime) / 1e6;
s.currentDate = new Date(s.currentTime);
} else {
s.currentDate = new Date(s.startTime + s.duration - s.remainingDuration);
s.currentTime = s.currentDate.getTime();
}
if (s.subscription) {
s.subscription = mapSubscription(s.subscription);
}
return addSaleMethods(s);
};
export const useSales = singletonHook(defaultSales, () => {
const [sales, setSales] = useState(defaultSales);
const account = useAccount();
useEffect(() => {
if (!account.near) {
return;
}
let scheduleRefresh = null;
const localMapSale = (sale) => {
sale = mapSale(sale);
sale.scheduleRefresh = (fast) => scheduleRefresh(sale, fast);
return sale;
};
const fetchSale = async (saleId) => {
return localMapSale(
await account.near.contract.get_sale({
sale_id: saleId,
account_id: account.accountId || undefined,
})
);
};
const refreshSale = async (saleId) => {
const sale = await fetchSale(saleId);
setSales((prev) =>
Object.assign({}, prev, {
sales: Object.assign([], prev.sales, { [saleId]: sale }),
})
);
};
scheduleRefresh = (sale, fast) => {
clearTimeout(saleRefreshTimers[sale.saleId]);
saleRefreshTimers[sale.saleId] = null;
if (!sale.ended()) {
saleRefreshTimers[sale.saleId] = setTimeout(
async () => {
if (!document.hidden) {
await refreshSale(sale.saleId);
} else {
scheduleRefresh(sale, fast);
}
},
fast ? 1000 : sale.started() ? 5000 : 30000
);
}
};
const fetchSales = async () => {
const rawSales = await account.near.contract.get_sales({
account_id: account.accountId || undefined,
});
const sales = rawSales.map(localMapSale);
return sales;
};
fetchSales()
.then((sales) => {
setSales({
loading: false,
sales,
fetchSale,
refreshSale,
});
})
.catch((e) => {
console.log(e);
});
}, [account]);
return sales;
});
|
<reponame>premss79/zignaly-webapp
import React, { useState } from "react";
import { Box, Slider, Typography } from "@material-ui/core";
import { FormattedMessage } from "react-intl";
import "./LeverageForm.scss";
import useStoreSettingsSelector from "../../../hooks/useStoreSettingsSelector";
import CustomButton from "../../CustomButton";
/**
* @typedef {Object} LeverageFormProps
* @property {number} min Minimum leverage limit.
* @property {number} max Maximum leverage limit.
* @property {number} leverage Current leverage.
* @property {function} setValue Hook form setValue callback.
* @property {function} onClose Hook form setValue callback.
*/
/**
* Leverage form control component.
*
* @param {LeverageFormProps} props Component props.
* @returns {JSX.Element} Leverage form element.
*/
const LeverageForm = (props) => {
const { min, max, leverage, setValue, onClose } = props;
const [val, setVal] = useState(leverage);
const marks = [
{ value: 1, label: "1" },
{ value: 25, label: "25" },
{ value: 50, label: "50" },
{ value: 75, label: "75" },
{ value: 100, label: "100" },
{ value: 125, label: "125" },
];
const { darkStyle } = useStoreSettingsSelector();
const handleCancel = () => {
onClose();
};
const handleConfirm = () => {
setValue("leverage", val);
onClose();
};
/**
* Leverage slided change handler.
*
* @param {React.ChangeEvent<{}>} event Change event.
* @param {number} newValue Selected slider value.
* @returns {Void} None.
*/
const handleSliderChange = (event, newValue) => {
setVal(newValue);
};
/**
* Leverage input value change handler.
*
* @param {React.ChangeEvent<HTMLInputElement>} event Change event.
* @returns {Void} None.
*/
const handleInputChange = (event) => {
const targetElement = event.currentTarget;
setVal(Number(targetElement.value));
};
const protectLimits = () => {
if (val < min) {
setVal(min);
} else if (val > max) {
setVal(max);
}
};
const increaseValue = () => {
const newValue = val + 1;
if (newValue <= max) {
setVal(newValue);
}
};
const decreaseValue = () => {
const newValue = val - 1;
if (newValue >= min) {
setVal(newValue);
}
};
return (
<Box
className="leverageForm"
display="flex"
flexDirection="column"
justifyContent="space-between"
>
<Typography className="title" id="range-slider" variant="h3">
<FormattedMessage id="terminal.leverage.adjust" />
</Typography>
<Box className="inputValue" display="flex" flexDirection="row" justifyContent="space-between">
<button
className={darkStyle ? "dark" : "light"}
onClick={() => decreaseValue()}
type="button"
>
−
</button>
<input
className={darkStyle ? "dark" : "light"}
onBlur={protectLimits}
onChange={handleInputChange}
value={val}
/>
<button
className={darkStyle ? "dark" : "light"}
onClick={() => increaseValue()}
type="button"
>
+
</button>
</Box>
<Slider
aria-labelledby="range-slider"
className="slider"
classes={{ mark: "mark", thumb: "thumb", track: "track", markLabel: "markLabel" }}
marks={marks}
max={max}
min={min}
onChange={handleSliderChange}
step={1}
value={val}
/>
{max > 25 && val >= 25 && (
<span className="errorText">
<FormattedMessage id="terminal.leverage.alert" />
</span>
)}
<Box
alignItems="center"
className="formActions"
display="flex"
flexDirection="row"
justifyContent="space-between"
>
<CustomButton className="textDefault" onClick={handleCancel}>
<FormattedMessage id="terminal.leverage.cancel" />
</CustomButton>
<CustomButton className="submitButton" onClick={handleConfirm}>
<FormattedMessage id="terminal.leverage.confirm" />
</CustomButton>
</Box>
</Box>
);
};
export default React.memo(LeverageForm);
|
<filename>tapestry-core/src/test/java/org/apache/tapestry5/integration/app1/pages/SelectModelFromObjectsDemo.java
package org.apache.tapestry5.integration.app1.pages;
import org.apache.tapestry5.SelectModel;
import org.apache.tapestry5.annotations.Persist;
import org.apache.tapestry5.annotations.Property;
import org.apache.tapestry5.integration.app1.data.Track;
import org.apache.tapestry5.integration.app1.services.MusicLibrary;
import org.apache.tapestry5.ioc.annotations.Inject;
import org.apache.tapestry5.services.SelectModelFactory;
public class SelectModelFromObjectsDemo
{
@Inject
private MusicLibrary library;
@Inject
private SelectModelFactory modelFactory;
@Property
private SelectModel model;
@Property
@Persist
private Track track;
void onPrepare()
{
model = modelFactory.create(library.getTracks());
}
}
|
int max_of_three(int x, int y, int z)
{
int max = x;
if (y > max)
max = y;
if (z > max)
max = z;
return max;
} |
class FileSystem:
def __init__(self):
self.root = {'name': '/', 'type': 'directory', 'contents': []}
self.current_directory = self.root
def create_file(self, name, content):
file = {'name': name, 'type': 'file', 'content': content}
self.current_directory['contents'].append(file)
def create_directory(self, name):
directory = {'name': name, 'type': 'directory', 'contents': []}
self.current_directory['contents'].append(directory)
def list_contents(self):
contents = [item['name'] + '/' if item['type'] == 'directory' else item['name'] for item in self.current_directory['contents']]
return contents
def change_directory(self, directory_name):
for item in self.current_directory['contents']:
if item['name'] == directory_name and item['type'] == 'directory':
self.current_directory = item
return
raise ValueError(f"Directory '{directory_name}' not found")
def navigate_parent_directory(self):
if self.current_directory != self.root:
parent_directory = self.root
path = self.current_directory['name'].split('/')
for directory in path[1:-1]:
for item in parent_directory['contents']:
if item['name'] == directory and item['type'] == 'directory':
parent_directory = item
break
self.current_directory = parent_directory |
import React from "react";
import { IFilterField, IFilterObject } from "./utils/models";
interface IContainerProps {
disabled?: boolean;
fields: IFilterField[];
filters: IFilterObject[];
onFilterChange?: (filters: IFilterObject[]) => void;
primaryColor?: string;
negativeColor?: string;
}
export declare const ReactFilterBar: React.FC<IContainerProps>;
export {};
|
<filename>test/unit/index.js
import {
expect
} from "chai";
import nock from "nock";
import {
apiRequests
} from "services/api-requests";
import {
WRITE_API_ENDPOINT
} from "config";
describe("Translate readings", () => {
const reading = {
"sensorId": "IT001E00030554",
"date": "2015-10-14T15:08:16.652Z",
"source": "reading",
"measurements": [{
"type": "activeEnergy",
"value": 18,
"unitOfMeasurement": "kWh"
}]
};
const matchPost = (body) => {
return !!(body.sensorId && body.date && body.source && body.measurements);
};
nock(WRITE_API_ENDPOINT)
.post("/readings", matchPost)
.reply(201, "Element created");
after(() => {
nock.cleanAll();
});
it("send translated readings", async () => {
const payload = [{
...reading
}];
await expect(apiRequests(payload)).to.be.not.rejected;
});
it("invalid reading object", async () => {
const payload = [{
someRandom: true
}];
await expect(apiRequests(payload)).to.be.rejected;
});
});
|
//go:generate mockgen -source=time.go -destination=time_mock.go -package=$GOPACKAGE -self_package=github.com/42milez/ProtocolStack/src/$GOPACKAGE
package time
import "time"
var Time ITime
type ITime interface {
Now() time.Time
}
type timeProvider struct{}
func (timeProvider) Now() time.Time {
return time.Now()
}
func init() {
Time = &timeProvider{}
}
|
<gh_stars>1-10
const mongoose = require('mongoose');
const { Schema } = mongoose;
// class PurchaseClass {}
const mongoSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
required: true,
},
bookId: {
type: Schema.Types.ObjectId,
required: true,
},
amount: {
type: Number,
required: true,
},
bookmarks: [
{
chapterId: {
type: Schema.Types.ObjectId,
required: true,
},
chapterSlug: {
type: String,
required: true,
},
chapterOrder: {
type: Number,
required: true,
},
text: {
type: String,
required: true,
},
hash: {
type: String,
required: true,
},
},
],
createdAt: {
type: Date,
required: true,
},
stripeCharge: {
id: String,
amount: Number,
created: Number,
livemode: Boolean,
paid: Boolean,
status: String,
},
isFree: {
type: Boolean,
defaultValue: false,
},
});
// mongoSchema.loadClass(PurchaseClass);
mongoSchema.index({ bookId: 1, userId: 1 }, { unique: true });
const Purchase = mongoose.model('Purchase', mongoSchema);
module.exports = Purchase;
|
#!/bin/bash
# Verify iOS devices
if idevice_id -l | wc -l | grep -q '0'; then
echo "iOS device is not connected!"
idevice_id -l
echo "Exit.."
exit 1
else
echo "iOS device is connected!"
fi
# Verify Android devices
if $ANDROID_HOME/platform-tools/adb devices -l | grep usb | wc -l | grep -q '0'; then
echo "Android device is not connected!
"$ANDROID_HOME/platform-tools/adb devices -l
echo "Exit.."
exit 1
else
echo "Android device is connected!"
fi
|
#!/bin/bash
set -e
# This script is to be run directly after a fresh install of the OS
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# Update anything on the fresh install
sudo apt-get -y upgrade
# 1. Install all apps
bash "$SCRIPT_DIR/install_apps.bash" "$@"
# 2. Configure and customize the system
bash "$SCRIPT_DIR/configure_machine.bash" "$@"
|
package utils
import (
resources "github.com/pikami/tiktok-dl/resources"
fileio "github.com/pikami/tiktok-dl/utils/fileio"
)
// GetScraper - Retrieve scraper
func GetScraper() string {
if fileio.CheckIfExists(resources.ScraperPath) {
return ReadFileAsString(resources.ScraperPath)
}
if resources.ScraperScript != "" {
return resources.ScraperScript
}
panic(resources.FailedToLoadScraper)
}
|
<reponame>MrinAK/Spring-Boot
package eu.itdc.internetprovider.service;
import eu.itdc.internetprovider.persistence.entity.Role;
import eu.itdc.internetprovider.persistence.entity.User;
import eu.itdc.internetprovider.persistence.repository.RoleRepository;
import eu.itdc.internetprovider.persistence.repository.UserRepository;
import eu.itdc.internetprovider.service.dto.JwtResponseDTO;
import eu.itdc.internetprovider.service.dto.LoginRequestDTO;
import eu.itdc.internetprovider.service.dto.SignupRequestDTO;
import eu.itdc.internetprovider.service.util.JwtUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.authentication.BadCredentialsException;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
@Service
public class AuthenticationService {
private final UserRepository userRepository;
private final RoleRepository roleRepository;
private final PasswordEncoder passwordEncoder;
private final AuthenticationManager authenticationManager;
private final JwtUtils jwtUtils;
@Value("${app.login.maxNumberOfAttempt}")
private Integer maxNumberOfAttempt;
public AuthenticationService(UserRepository userRepository,
RoleRepository roleRepository,
PasswordEncoder passwordEncoder,
AuthenticationManager authenticationManager,
JwtUtils jwtUtils) {
this.userRepository = userRepository;
this.roleRepository = roleRepository;
this.passwordEncoder = passwordEncoder;
this.authenticationManager = authenticationManager;
this.jwtUtils = jwtUtils;
}
private static final Map<Role.RoleType, Role> roles = new HashMap<>();
@PostConstruct
protected void postConstruct() {
roleRepository.findAll().forEach(role -> roles.put(role.getName(), role));
}
public void signup(SignupRequestDTO createUserDTO) {
Role role = roles.get(Role.RoleType.ROLE_CUSTOMER);
if (userRepository.count() == 0) {
role = roles.get(Role.RoleType.ROLE_ADMIN);
}
if (userRepository.findByUsername(createUserDTO.getUsername()).isPresent()) {
throw new RuntimeException(String.format("Username %s already exist", createUserDTO.getUsername()));
}
User user = new User(createUserDTO.getUsername(), passwordEncoder.encode(createUserDTO.getPassword()),
createUserDTO.getEmail(),
Set.of(role));
userRepository.save(user);
}
public JwtResponseDTO signin(LoginRequestDTO loginRequestDTO) {
Authentication authentication;
try {
authentication = authenticationManager.authenticate(
new UsernamePasswordAuthenticationToken(loginRequestDTO.getUsername(),
loginRequestDTO.getPassword()));
} catch (BadCredentialsException exception) {
userRepository.findByUsername(loginRequestDTO.getUsername()).ifPresent(user -> {
user.failLoginAttempt(maxNumberOfAttempt);
userRepository.save(user);
});
throw exception;
}
SecurityContextHolder.getContext().setAuthentication(authentication);
String jwt = jwtUtils.generatedJwtToken(authentication);
return new JwtResponseDTO(jwt);
}
} |
const express = require('express')
const nunjucks = require('nunjucks')
const server = express()
const videos = require("./data")
server.use(express.static('public'))
server.set("view engine", "njk")
nunjucks.configure("views", {
express: server,
autoescape: false,
noCache: true
})
server.get("/", function (req, res) {
const about = {
avatar_url: "https://media-exp1.licdn.com/dms/image/C4E03AQFpnxn6QJ3MDw/profile-displayphoto-shrink_200_200/0/1599800937695?e=1617840000&v=beta&t=8iHBjixg2RHkcr9MpINHS6GXf0zwjzZpuK9sj4nRE4g",
name: "<NAME>",
role: "Developer Front-End",
description: 'Graduated in Systems Analysis and Development at Unopar. My goal is to transform layouts into code, using HTMl, CSS, JavaScript. Student of Rocketseat: GoStack Bootcamp. Training around techs, such as: Node.js, React.js, React Native. Usage of libraries like Express, Adonis, Mongoose, Sequelize, Json Web Token, Multer, Bee Queue (Redis). Usage of databases, such as PostgreSQL, MongoDB and Redis. I currently work as a sales assistant at <a href="https://casashopdecor.com.br" target="_blank">CasaShopDecor</a> and front end developer at studio <a href="https://instagram.com/pina.criacao_" target="_blank">Piná</a>.',
link: [
{ name: "Github", url: "https://github.com/FernandaDsilva" },
{ name: "Linkedin", url: "https://www.linkedin.com/in/fernanda-dias-silva/" },
{ name: "Whatsapp", url: "https://api.whatsapp.com/send?phone=5543996870552&text=Ol%C3%A1%2C%20Ferna!%20Visitei%20seu%20site%20e%20tenho%20uma%20d%C3%BAvida%2C%20pode%20me%20ajudar%3F" },
{ name: "Instagram", url: "https://instagram.com/falaprafeer" }
]
}
return res.render("about", { about })
})
server.get("/projetos", function (req, res) {
return res.render("projetos", { items: videos })
})
server.get("/video", function (req, res) {
const id = req.query.id
const video = videos.find(function (video) {
return video.id == id
})
if (!video) {
return res.send("Video not found!")
}
return res.render("video", { item: video })
})
server.listen(5000, function () {
console.log("http://127.0.0.1:5000/ Is Running")
})
|
#!/bin/sh
DIR='mark-viewport'
FNa='README.md'
FNb='LICENSE'
FN_log='.generated.log'
: > "${FN_log}"
exec 5<> "${FN_log}"
echo 'sync-rdme.sh starting...' >&5
echo -n 'pwd=' >&5 ; pwd >&5
DIROK=$(find . -maxdepth 1 -type d -name "${DIR}" | wc -l)
if [ x"${DIROK}" = x"1" ]
then
$(cmp -s "${FNa}" "${DIR}"/"${FNa}" && cmp -s "${FNb}" "${DIR}"/"${FNb}")
if [ $? -eq 0 ]
then
echo 'no extra commit, files match' >&5
else
cp "${FNa}" "${FNb}" './'"${DIR}"'/' >&5
git config user.name meerkut-C >&5
git add . >&5
git commit -m "an extra commit from sync-rdme.sh" >&5
git push >&5
echo '+ extra commit produced' >&5
fi
else
echo 'there will be no additional commit, missing target dir' >&5
fi
echo ' done.' >&5 |
package org.hiro;
import org.hiro.character.Human;
import org.hiro.map.AbstractCoordinate;
import org.hiro.things.Food;
import org.hiro.things.ObjectType;
import org.hiro.things.Potion;
import org.hiro.things.RingEnum;
import org.hiro.things.Scroll;
import org.hiro.things.Thing;
import java.util.Random;
public class Util {
/*
* rnd:
* Pick a very random number.
*/
public static int rnd(int range) {
Random rn = new Random();
return range == 0 ? 0 : rn.nextInt(range);
}
// 左に5つシフトは、*32と同じMAXLINES * MAXCOLSで32*80
public static Place getPlace(AbstractCoordinate c) {
return Global.places.get((c.getX() << 5) + c.getY());
}
public static Place INDEX(int y, int x) {
return Global.places.get((x << 5) + y);
}
public static int flat(AbstractCoordinate c) {
return getPlace(c).p_flags;
}
static int GOLDCALC() {
return rnd(50 + 10 * Human.instance.getLevel()) + 2;
}
static int CCHAR(int x) {
return x; // A_CHARTEXTは、文字を取り出すためのビットマスク
}
private static boolean ISRING(int h, RingEnum r) {
return Global.cur_ring[h] != null && Global.cur_ring[h]._o_which == r.getValue();
}
static boolean ISWEARING(RingEnum r) {
return ISRING(Const.LEFT, r) || ISRING(Const.RIGHT, r);
}
/**
* thing.containsState(StateEnum.flg)に変更
*/
@Deprecated
static boolean on(Thing thing, int flag) {
// return (thing.containsState(flag);
throw new UnsupportedOperationException("使わないで");
}
/**
* Coordinateのequals()に変更
*/
@Deprecated
static boolean ce(AbstractCoordinate a, AbstractCoordinate b) {
return false;
}
// #define moat(y,x) Global.places.get((x << 5) + y).p_monst
// INDEX.p_ch
// static int chat(int y, int x) {
// return Global.places.get((x << 5) + y).p_ch
// }
public static ObjectType winat(AbstractCoordinate coordinate) {
Place place = getPlace(coordinate);
if (place.p_monst != null) {
return ObjectType.get((char) place.p_monst.getDisplayTile());
} else {
return place.p_ch;
}
}
public static char CTRL(char c) {
return (char) (c & 037);
}
// 違う判定がよいかも
static boolean ISMULT(Thing type) {
return (type instanceof Potion || type instanceof Scroll || type instanceof Food);
}
}
|
/**
*
*/
package jframe.pay.domain.util;
import java.util.Map;
/**
* @author dzh
* @date Aug 15, 2014 5:26:56 PM
* @since 1.0
*/
public class ToString {
public static final String toString(Map<?, ?> map) {
if (map == null || map.isEmpty())
return "";
StringBuilder buf = new StringBuilder();
buf.append("\nbegin\n");
for (Object o : map.keySet()) {
buf.append(String.valueOf(o) + "->" + String.valueOf(map.get(o))
+ ",");
}
buf.append("\nend\n");
return buf.toString();
}
}
|
/*
* Copyright 2020 Google LLC
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#include "include/gpu/GrYUVABackendTextures.h"
static int num_channels(const GrBackendFormat& format) {
switch (format.channelMask()) {
case kRed_SkColorChannelFlag : return 1;
case kAlpha_SkColorChannelFlag: return 1;
case kGray_SkColorChannelFlag : return 1;
case kRG_SkColorChannelFlags : return 2;
case kRGB_SkColorChannelFlags : return 3;
case kRGBA_SkColorChannelFlags: return 4;
default : return 0;
}
}
GrYUVABackendTextures::GrYUVABackendTextures(
const SkYUVAInfo& yuvaInfo,
const GrBackendTexture textures[SkYUVAInfo::kMaxPlanes],
GrSurfaceOrigin textureOrigin)
: fYUVAInfo(yuvaInfo), fTextureOrigin(textureOrigin) {
if (!fYUVAInfo.isValid()) {
return;
}
SkISize planeDimensions[SkYUVAInfo::kMaxPlanes];
int numPlanes = yuvaInfo.planeDimensions(planeDimensions);
for (int i = 0; i < numPlanes; ++i) {
int numRequiredChannels = fYUVAInfo.numChannelsInPlane(i);
if (!textures[i].isValid() ||
textures[i].dimensions() != planeDimensions[i] ||
textures[i].backend() != textures[0].backend() ||
num_channels(textures[i].getBackendFormat()) < numRequiredChannels) {
*this = {};
return;
}
fTextures[i] = textures[i];
}
}
bool GrYUVABackendTextures::toYUVAIndices(SkYUVAIndex indices[SkYUVAIndex::kIndexCount]) const {
SkASSERT(indices);
uint32_t channelFlags[] = {fTextures[0].getBackendFormat().channelMask(),
fTextures[1].getBackendFormat().channelMask(),
fTextures[2].getBackendFormat().channelMask(),
fTextures[3].getBackendFormat().channelMask()};
return fYUVAInfo.toYUVAIndices(channelFlags, indices);
}
|
# /etc/fstab: static file system information.
#
# Use 'blkid' to print the universally unique identifier for a
# device; this may be used with UUID= as a more robust way to name devices
# that works even if disks are added and removed. See fstab(5).
#
# / was on /dev/sda1 during installation
# swap was on /dev/sda5 during installation
#
# <file system> <mount point> <type> <options> <dump> <pass>
UUID=28a0fd25-2311-4e94-90e6-f4033e778aee / ext4 errors=remount-ro 0 1
UUID=57d9f548-a496-4c1b-95f1-098967e9ad26 none swap sw 0 0
/dev/sr0 /media/cdrom0 udf,iso9660 user,noauto 0 0
VirtualBoxShared /home/jonas/VBoxShare vboxsf defaults 0 0
|
#!/usr/bin/env bash
set -Eeuo pipefail
### IMPORTANT DEVELOPER NOTE ###
# You should ALWAYS test any changes you make to this script by running:
# ./install.sh -vv --fake-home "test"
# The error flags set above are very, very picky about bad coding practices, and
# you'd be amazed at what they preclude. Never assume your changes are safe
# before you test!
# Variable Definitions {{{
# The name of the current executable
declare THIS_EXEC="$(basename "${BASH_SOURCE[0]}")"
# Where dotfiles will be installed (useful for testing the installation on a
# fake home directory).
declare TARGET_HOME="${HOME}"
# This repository's root directory.
declare DOTFILES_REPO="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# Dotfiles directories (will be set after we've established TARGET_HOME)
declare DOTFILES_HOME
declare DOTFILES_SHELL
declare COMMON_SHELL
declare COMMON_SOURCE
declare DOTFILES_LINK
declare DOTFILES_COPY
declare DOTFILES_ZDOTDIR
# Filesystem directories to create
# Main projects directory
declare WS="${HOME}/workspace"
# Practice projects directory
declare PRAC="${HOME}/practice"
# Directory for installed third-party applications
declare APPS="${HOME}/applications"
# Third party archives directory
declare ARCHIVES="${APPS}/archives"
# Program flags
# Force all actions that would otherwise involve answering a prompt
declare FORCE_INSTALL=false
# Logging control variables
declare LOG_LEVEL=1
declare LOG_TO_FILE=""
# }}}
# OS Preparation {{{
# Perform other preparation steps depending on the current operating system.
# This function runs BEFORE the `setup()` function.
prepare_for_os() {
set_dotfiles_variables
source_common_defs
log_info "Checking for additional preparation steps for OS..."
local osName="$(uname -s)"
case "${osName}" in
"Darwin")
prepare_for_macos
;;
*)
log_info "No preparation function found for OS type: ${osName}"
;;
esac
}
# macOS specific preparation.
prepare_for_macos() {
log_info "Preparing for macOS installation"
source "${COMMON_SOURCE}/mac/functions/functions_mac.sh"
# Make sure developer tools are installed
install_mac_developer_tools
# Make sure homebrew is installed.
install_homebrew
# Install all the regular GNU CLI tools.
install_gnu_cli_tools_for_mac
# Make sure the CLI tools we reference throughout this install script are the
# GNU versions, not the BSD versions which come standard on macOS.
create_gnu_cli_tool_aliases_for_mac
}
# }}}
# Setup/Cleanup {{{
# Set all dotfiles-related variables after all arguments have been parsed and
# key variables have been set.
set_dotfiles_variables() {
DOTFILES_HOME="${TARGET_HOME}/.dotfiles"
DOTFILES_SHELL="${DOTFILES_REPO}/shell"
COMMON_SHELL="${DOTFILES_SHELL}/common"
COMMON_SOURCE="${COMMON_SHELL}/source"
DOTFILES_LINK="${DOTFILES_REPO}/link"
DOTFILES_COPY="${DOTFILES_REPO}/copy"
DOTFILES_ZDOTDIR="${DOTFILES_SHELL}/zsh/zdotdir"
}
# Take care of backing up existing ~/.dotfiles directory
backup_existing_installation() {
local oldDotfilesDir
# Safe name for backup directory
oldDotfilesDir="$(mktemp -u "${DOTFILES_HOME}.bak.XXXXXXXXXX")"
if [ -d "${DOTFILES_HOME}" ]; then
log_info "Backing up existing dotfiles installation to ${oldDotfilesDir}"
mv "${DOTFILES_HOME}" "${oldDotfilesDir}"
fi
}
# Check for an existing dotfiles installation at $DOTFILES_HOME.
check_existing_installation() {
log_info "Checking for existing dotfiles installation"
test -h "${DOTFILES_HOME}" || test -d "${DOTFILES_HOME}"
}
# Figure out what to do if an existing dotfiles installation is found.
remove_existing_installation() {
local response=""
command cat <<EOF
An existing dotfiles installation was found at ${DOTFILES_HOME}.
It must be removed before this installation can progress.
EOF
while ! echo "${response}" | grep -q '^[YyNn]$'; do
echoe "Remove it and continue with the installation? [y/n]"
IFS="" read -r response
done
if echo "${response}" | grep -q '^[Nn]$'; then
echoe "Exiting dotfiles installation."
exit 1
elif [ -h "${DOTFILES_HOME}" ]; then
log_info "Removing old dotfiles symlink"
rm -f "${DOTFILES_HOME}"
else
log_info "Removing old dotfiles installation"
rm -rf "${DOTFILES_HOME}"
fi
}
# Performs initial setup.
setup() {
log_info "Setting up..."
if ! ${FORCE_INSTALL}; then
check_existing_installation && remove_existing_installation
fi
backup_existing_installation
ensure_dirs_present
}
# }}}
# Help {{{
_help() {
command cat <<EOF
${THIS_EXEC}
Install tjtrabue's dotfiles on the current system. For the most part, this
script just creates a bunch of symlinks, so it is highly non-destructive. As
opposed to overwriting the user's existing dotfiles, this script backs up all
of the existing files before creating any symlinks. Nothing should be lost in
the process. Check the 'backup' directory created by this script if you wish to
restore your old dotfiles.
USAGE:
${THIS_EXEC} [OPTIONS]
OPTIONS:
-h | --help
Print the help message (this message) and exit.
-v | --verbose
Increase the logging output level. This command may be specified multiple
times to further increase verbosity of output.
-f | --force
Force dotfiles to install, assuming "yes" for all prompts.
This option should be used with caution, as it may overwrite some of your
files, even though this script tries hard not to do that.
-k | --fake-home <target_directory>
Install dotfiles to a different directory. The default is to install
dotfiles to the user's \$HOME directory. This option changes the default
behavior, telling the install script to instead install everything to a
different directory. This only real use for this option is to test the
install script.
EXAMPLES:
Install dotfiles with INFO logging output:
./install.sh -vv
Force a new dotfiles installation, wihtout prompting for confirmation:
./install.sh --force
Test the install script by installing dotfiles to a fake home directory:
./install.sh -vv --fake-home ./test
EOF
}
# }}}
# Primary Functions {{{
# Link files/directories to the ~/.config directory.
link_config() {
local dotfilesConfig="${DOTFILES_LINK}/config"
local homeConfig="${TARGET_HOME}/.config"
local homeConfigBackup="${homeConfig}.bak"
log_info "Linking ${dotfilesConfig} to ${homeConfig}"
if [ -d "${homeConfig}" ]; then
log_info "Backing up files in ${homeConfig} to ${homeConfigBackup}"
mv -f "${homeConfig}" "${homeConfigBackup}"
fi
ln -sf "${dotfilesConfig}" "${homeConfig}"
if [ -d "${homeConfigBackup}" ]; then
log_info "Restoring files from ${homeConfigBackup} to ${homeConfig}"
rsync -ah "${homeConfigBackup}/" "${homeConfig}"
rm -rf "${homeConfigBackup}"
fi
log_info "Done."
}
# Link the LSP configuration dir to ~/.lsp
link_lsp_config() {
local lspConfigDir="${DOTFILES_LINK}/lsp"
local lspConfigTarget="${HOME}/.lsp"
if [ -d "${lspConfigTarget}" ]; then
rm -rf "${lspConfigTarget}"
fi
ln -sf "${lspConfigDir}" "${lspConfigTarget}"
}
# Link the gpg.conf file to ~/.gnupg/gpg.conf
link_gpg_config() {
local gpgConfFile="${DOTFILES_LINK}/gnupg/gpg.conf"
local gnupgHome="${HOME}/.gnupg"
local gpgConfTarget="${gnupgHome}/gpg.conf"
if [ ! -f "${gpgConfFile}" ]; then
err "No GPG config file found at: ${BLUE}${gpgConfFile}${NC}"
return 1
fi
log_info "Linking ${BLUE}${gpgConfFile}${NC} to ${BLUE}${gpgConfTarget}${NC}"
mkdir -p "${gnupgHome}"
ln -sf "${gpgConfFile}" "${gpgConfTarget}"
}
# Link the repository itself, if necessary.
link_repo() {
log_info "Linking dotfiles repository to: ${DOTFILES_HOME}"
if [ "${DOTFILES_REPO}" != "${DOTFILES_HOME}" ]; then
ln -sf "${DOTFILES_REPO}" "${DOTFILES_HOME}"
fi
log_info "Done."
}
# Create dotfile symlinks from user's home dir to those managed by the dotfiles
# repository. This keeps the user's dotfiles in sync with the repository.
# Use the `dotsync` function to keep dotfiles up to date after the initial
# installation.
link_dotfiles() {
log_info "Linking dotfiles"
find "${DOTFILES_LINK}/home" -type f -exec ln -sfb -t "${TARGET_HOME}" '{}' \;
log_info "Done"
}
# Link the Zsh dotfiles directory to ~/.zsh
link_zdotdir() {
local targetZdotdir="${TARGET_HOME}/.zsh"
log_info "Linking Zsh directory ${DOTFILES_ZDOTDIR} to ${targetZdotdir}"
if [ -h "${targetZdotdir}" ]; then
rm -f "${targetZdotdir}"
elif [ -d "${targetZdotdir}" ]; then
rm -rf "${targetZdotdir}"
fi
ln -sf "${DOTFILES_ZDOTDIR}" "${targetZdotdir}"
}
# Copy one-time transfer files.
copy_dotfiles() {
local oneTimeTransfersDir="${DOTFILES_COPY}/dotfiles_to_copy"
log_info "Copying dotfiles from: ${BLUE}${oneTimeTransfersDir}${NC}"
find "${oneTimeTransfersDir}" -maxdepth 1 -mindepth 1 -type f \
-exec cp -f -t "${TARGET_HOME}/" '{}' \;
log_info "Copying complete"
}
# Create important directories.
ensure_dirs_present() {
log_info "Creating important directories"
local dirs=(
"${TARGET_HOME}"
"${WS}"
"${PRAC}"
"${APPS}"
"${ARCHIVES}"
)
local dir
for dir in "${dirs[@]}"; do
mkdir -p "${dir}" &>/dev/null
done
}
# Import common aliases and functions for use within this script.
source_common_defs() {
local f
for f in "${COMMON_SOURCE}"/{aliases,functions,other}/*; do
. "${f}"
done
}
# Main that calls all subroutines
main() {
setup
copy_dotfiles
link_repo
link_dotfiles
link_zdotdir
link_config
link_lsp_config
link_gpg_config
add_extra_os_vars
add_extra_paths_to_path_file
}
# }}}
# Need to prepare OS before CLI option parsing because we may not even have
# access to GNU getopt yet.
prepare_for_os
# Parse CLI Options {{{
args=$(getopt -o hvfk: \
--long help,verbose,force,fake-home: \
-n 'install.sh' \
-- "$@")
eval set -- "$args"
# extract options and their arguments into variables.
while true; do
case "$1" in
-h | --help)
_help
shift
exit 0
;;
-v | --verbose)
((LOG_LEVEL += 1))
shift
;;
-f | --force)
FORCE_INSTALL=true
shift
;;
-k | --fake-home)
case "$2" in
"")
shift 2
;;
*)
TARGET_HOME="${2}"
shift 2
;;
esac
;;
--)
shift
break
;;
*)
err "Unknown option $1 to ${THIS_EXEC}"
exit 2
;;
esac
done
# }}}
# Main execution
main
# Modeline for this file (KEEP IT COMMENTED!)
# vim:foldenable:foldmethod=marker:foldlevel=0
|
#!/usr/bin/python3
"""
Rectangle module
"""
BaseGeometry = __import__('7-base_geometry').BaseGeometry
class Rectangle(BaseGeometry):
"""
class that inherits from BaseGeometry
"""
def __init__(self, width, height):
"""
Initialize rectangle from BaseGeometry
"""
self.integer_validator('width', width)
self.integer_validator('height', height)
self.__width = width
self.__height = height
|
#!/bin/sh
sudo -E PATH=$PATH ./build-image-qemu.sh
|
<gh_stars>100-1000
/**
* Created by Wonderchief on 2017/1/6.
* Legle co,.ltd.
* This is a auto-generated code file.
* 版权所有:广州聆歌信息科技有限公司
*/
var models = require('../models');
var Promise = require('bluebird');
var Moment = require('moment');
module.exports = {
//创建新的记录
create:function (note) {
//判断是否为空
if (note != null) {
return models.note.create(note);
} else {
return Promise.reject({message:"note对象不能为空"});
}
}
//根据主键查找一条记录
,findOne: function (note,attributes) {
if (typeof(note) === 'string') {
return models.note.findOne({where: {listid: note},attributes:attributes});
}
else {
if (note != null) {
if (note.listid != null) {
return models.note.findOne({where: {listid: note.listid},attributes:attributes});
}
}
}
return Promise.reject({message:"note对象不能为空"});
}
//根据对象查找一条记录
,findOne_obj:function(_obj,attributes){
if (_obj){
return models.note.findOne({where:_obj,attributes:attributes});
}
return Promise.reject({message:"note对象不能为空"});
}
//根据主键更新记录
,update: function (note) {
if (note != null) {
if (note.listid != null) {
return models.note.update(note, {where: {listid: note.listid}});
}
}
return Promise.reject({message:"note对象不能为空"});
}
//根据对象更新记录
,update_obj: function (note,obj) {
if (note != null) {
return models.note.update(note, {where: obj});
}
return Promise.reject({message:"note对象不能为空"});
}
//根据主键删除记录
,delete: function (note) {
if (typeof(note) === 'string') {
return models.note.destroy({where: {listid: note}});
}
else {
if (note != null) {
if (note.listid != null) {
return models.note.destroy({where: {listid: note.listid}});
}
}
}
return Promise.reject({message:"note对象不能为空"});
}
//根据对象删除记录
,delete_obj: function (note) {
if (note != null) {
return models.note.destroy({where: note});
}
return Promise.reject({message:"note对象不能为空"});
}
//列出前1000条记录
,list: function (limit) {
if(typeof(limit)==='number')
{
if(limit>1000)
{
limit=1000;
}
return models.note.findAndCountAll({limit:limit});
}else {
if(limit==null)
return models.note.findAndCountAll({limit:1000});
}
return Promise.reject({message:"list 参数类型有误"});
}
//根据日期范围内列出指定字段和,从n到n+size,size小于50,默认为20,没有日期范围则列出最新
,listPage: function (offset,limit,st,et,attributes) {
var where = {};
if (st != null || et != null) {
where.createdAt = {
$between: [st, et]
}
}
if (typeof(offset) === 'number') {
if (limit > 50) {
limit = 50;
}
}
else {
limit = 20;
}
if (limit == null)limit = 20;
if (typeof(offset) === 'number') {
return models['note'].findAndCountAll({where: where,attributes:attributes, limit: limit, offset: offset,order: 'createdAt DESC'});
} else {
if (offset == null)
return models.note.findAndCountAll({where: where, limit: limit,order: 'createdAt DESC'});
}
return Promise.reject({message:"listPage 参数类型有误"});
}
//按时间范围统计created数
,count: function(st,et)
{
var where={};
if (st != null || et != null) {
where.createdAt = {
$between: [st, et]
}
}
return models.note.count({where: where});
}
//按照listid索引列出从n到n+size,size小于50,默认为20
,listPage_listid: function (offset,limit,listid,st,et,attributes) {
var where = {listid:listid};
if (st != null || et != null) {
where.createdAt = {
$between: [st, et]
}
}
if (typeof(offset) === 'number') {
if (limit > 50) {
limit = 50;
}
}
else {
limit = 20;
}
if (limit == null)limit = 20;
if (typeof(offset) === 'number') {
return models['note'].findAndCountAll({where: where,attributes:attributes, limit: limit, offset: offset,order: 'createdAt DESC'});
} else {
if (offset == null)
return models.note.findAndCountAll({where: where,attributes:attributes, limit: limit,order: 'createdAt DESC'});
}
return Promise.reject({message:"listPage_listid 参数类型有误"});
}
//按listid索引和时间范围统计created数
,count_listid: function(listid,st,et)
{
var where = {listid:listid};
if (st != null || et != null) {
where.createdAt = {
$between: [st, et]
}
}
return models.note.count({where: where});
}
//按照userid索引列出从n到n+size,size小于50,默认为20
,listPage_userid: function (offset,limit,userid,st,et,attributes) {
var where = {userid:userid};
if (st != null || et != null) {
where.createdAt = {
$between: [st, et]
}
}
if (typeof(offset) === 'number') {
if (limit > 50) {
limit = 50;
}
}
else {
limit = 20;
}
if (limit == null)limit = 20;
if (typeof(offset) === 'number') {
return models['note'].findAndCountAll({where: where,attributes:attributes, limit: limit, offset: offset,order: 'createdAt DESC'});
} else {
if (offset == null)
return models.note.findAndCountAll({where: where,attributes:attributes, limit: limit,order: 'createdAt DESC'});
}
return Promise.reject({message:"listPage_userid 参数类型有误"});
}
//按userid索引和时间范围统计created数
,count_userid: function(userid,st,et)
{
var where = {userid:userid};
if (st != null || et != null) {
where.createdAt = {
$between: [st, et]
}
}
return models.note.count({where: where});
}
//按照userid索引列出从n到n+size,size小于50,默认为20
,listPage_userid: function (offset,limit,userid,st,et,attributes) {
var where = {userid:userid};
if (st != null || et != null) {
where.createdAt = {
$between: [st, et]
}
}
if (typeof(offset) === 'number') {
if (limit > 50) {
limit = 50;
}
}
else {
limit = 20;
}
if (limit == null)limit = 20;
if (typeof(offset) === 'number') {
return models['note'].findAndCountAll({where: where,attributes:attributes, limit: limit, offset: offset,order: 'createdAt DESC'});
} else {
if (offset == null)
return models.note.findAndCountAll({where: where,attributes:attributes, limit: limit,order: 'createdAt DESC'});
}
return Promise.reject({message:"listPage_userid 参数类型有误"});
}
//按userid索引和时间范围统计created数
,count_userid: function(userid,st,et)
{
var where = {userid:userid};
if (st != null || et != null) {
where.createdAt = {
$between: [st, et]
}
}
return models.note.count({where: where});
}
};
|
package KataRubenToni.CookingSocialNetwork.model;
import java.util.ArrayList;
/**
* Created by Ruben on 06/12/2015.
*/
public class User {
private int id;
private String name;
private String userName;
private String password;
private Picture picture;
private String description;
private ArrayList<User> followed;
private ArrayList<User> followers;
private ArrayList<Recipe> recipes;
public User(String name, String password, String userName) {
this.name = name;
this.userName = userName;
this.password = password;
this.followed = new ArrayList<User>();
this.followers = new ArrayList<User>();
this.recipes = new ArrayList<Recipe>();
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public Picture getPicture() {
return picture;
}
public void setPicture(Picture picture) {
this.picture = picture;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public ArrayList<User> getFollowed() {
return new ArrayList<User>(followed);
}
public void setFollowed(ArrayList<User> followed) {
this.followed = followed;
}
public ArrayList<User> getFollowers() {
return new ArrayList<User>(followers);
}
public void setFollowers(ArrayList<User> followers) { this.followers = followers; }
public ArrayList<Recipe> getRecipes() { return new ArrayList<Recipe>(recipes); }
public void setRecipes(ArrayList<Recipe> recipes) { this.recipes = recipes; }
public void addRecipe(Recipe r) {
this.recipes.add(r);
}
public void removeRecipe(Recipe r) { this.recipes.remove(r); }
public boolean equals(Object o) {
if (o instanceof User) {
User other = (User) o;
return this.userName.equals(other.userName);
} else return false;
}
}
|
# Optimize a linear regression model
from sklearn.linear_model import LinearRegression
import numpy as np
X = np.array([[1,2,3], [4,5,6], [7,8,9], [10,11,12]])
y = np.array([3,5,7,9])
# Create a linear regression model
model = LinearRegression().fit(X, y)
# Optimize the model
from sklearn.model_selection import GridSearchCV
parameters = {'fit_intercept':[True,False], 'normalize':[True,False], 'copy_X':[True, False]}
grid = GridSearchCV(model, parameters, scoring='neg_mean_squared_error')
grid.fit(X, y)
# Print the best model
print(grid.best_estimator_) |
#!/bin/sh
apt-get update
apt-get -y install git build-essential libsqlite3-dev
git clone https://github.com/msteveb/jimtcl.git
cd jimtcl || exit 1
git checkout --detach 0.77
./configure --with-ext="oo tree binary sqlite3" --enable-utf8 --ipv6 --disable-docs
make
make install
cd .. || exit 1
|
/*
* Copyright 2011-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.glowroot.instrumentation.servlet;
import java.util.List;
import java.util.Map;
import org.glowroot.instrumentation.api.ThreadContext;
import org.glowroot.instrumentation.api.ThreadContext.Priority;
import org.glowroot.instrumentation.api.checker.Nullable;
import org.glowroot.instrumentation.api.util.Beans;
import org.glowroot.instrumentation.api.weaving.Advice;
import org.glowroot.instrumentation.api.weaving.Bind;
import org.glowroot.instrumentation.servlet.boot.ServletInstrumentationProperties;
import org.glowroot.instrumentation.servlet.boot.ServletInstrumentationProperties.SessionAttributePath;
import org.glowroot.instrumentation.servlet.boot.ServletMessageSupplier;
import org.glowroot.instrumentation.servlet.boot.Strings;
public class SessionInstrumentation {
@Advice.Pointcut(className = "javax.servlet.http.HttpSession",
methodName = "setAttribute|putValue",
methodParameterTypes = {"java.lang.String", "java.lang.Object"},
nestingGroup = "servlet-inner-call")
public static class SetAttributeAdvice {
@Advice.OnMethodAfter
public static void onAfter(
@Bind.Argument(0) @Nullable String name,
@Bind.Argument(1) @Nullable Object value,
ThreadContext context) {
onAfterCommon(name, value, context);
}
}
@Advice.Pointcut(className = "javax.servlet.http.HttpSession",
methodName = "removeAttribute",
methodParameterTypes = {"java.lang.String"},
nestingGroup = "servlet-inner-call")
public static class RemoveAttributeAdvice {
@Advice.OnMethodAfter
public static void onAfter(
@Bind.Argument(0) @Nullable String name,
ThreadContext context) {
// calling HttpSession.setAttribute() with null value is the same as calling
// removeAttribute(), per the setAttribute() javadoc
onAfterCommon(name, null, context);
}
}
private static void onAfterCommon(@Nullable String name, @Nullable Object value,
ThreadContext context) {
if (name == null) {
// theoretically possible, so just ignore
return;
}
// name is non-null per HttpSession.setAttribute() javadoc, but value may be null
// (which per the javadoc is the same as calling removeAttribute())
ServletMessageSupplier messageSupplier =
(ServletMessageSupplier) context.getServletRequestInfo();
if (messageSupplier != null) {
updateUserIfApplicable(name, value, context);
updateSessionAttributesIfApplicable(messageSupplier, name, value);
}
}
private static void updateUserIfApplicable(String attributeName,
@Nullable Object attributeValue,
ThreadContext context) {
if (attributeValue == null) {
// if user value is set to null, don't clear it
return;
}
SessionAttributePath userAttributePath =
ServletInstrumentationProperties.userAttributePath();
if (userAttributePath == null) {
return;
}
if (!userAttributePath.getAttributeName().equals(attributeName)) {
return;
}
// capture user now, don't use a lazy supplier
List<String> nestedPath = userAttributePath.getNestedPath();
if (nestedPath.isEmpty()) {
context.setTransactionUser(attributeValue.toString(),
Priority.CORE_INSTRUMENTATION);
} else {
Object user;
try {
user = Beans.value(attributeValue, nestedPath);
} catch (Exception e) {
user = "<could not access: " + e + ">";
}
if (user != null) {
// if user is null, don't clear it
context.setTransactionUser(user.toString(), Priority.CORE_INSTRUMENTATION);
}
}
}
private static void updateSessionAttributesIfApplicable(
ServletMessageSupplier messageSupplier, String attributeName,
@Nullable Object attributeValue) {
if (ServletInstrumentationProperties.captureSessionAttributeNames()
.contains(attributeName)
|| ServletInstrumentationProperties.captureSessionAttributeNames()
.contains("*")) {
// update all session attributes (possibly nested) at or under the set attribute
for (SessionAttributePath attributePath : ServletInstrumentationProperties
.captureSessionAttributePaths()) {
if (attributePath.getAttributeName().equals(attributeName)
|| attributePath.isAttributeNameWildcard()) {
if (attributePath.getNestedPath().isEmpty()
&& !attributePath.isWildcard()) {
updateSessionAttribute(messageSupplier, attributeName, attributeValue);
} else {
updateNestedSessionAttributes(messageSupplier, attributePath,
attributeValue);
}
}
}
}
}
private static void updateSessionAttribute(ServletMessageSupplier messageSupplier,
String attributeName, @Nullable Object attributeValue) {
if (attributeValue == null) {
messageSupplier.putSessionAttributeChangedValue(attributeName, null);
} else {
messageSupplier.putSessionAttributeChangedValue(attributeName,
Strings.nullToEmpty(attributeValue.toString()));
}
}
private static void updateNestedSessionAttributes(ServletMessageSupplier messageSupplier,
SessionAttributePath attributePath, @Nullable Object attributeValue) {
String fullPath = attributePath.getFullPath();
if (attributePath.isWildcard()) {
Object val = HttpSessions.getSessionAttribute(attributeValue, attributePath);
if (val == null) {
messageSupplier.putSessionAttributeChangedValue(fullPath, null);
} else if (val instanceof Map<?, ?>) {
for (Map.Entry<?, ?> entry : ((Map<?, ?>) val).entrySet()) {
Object v = entry.getValue();
messageSupplier.putSessionAttributeChangedValue(
fullPath + "." + entry.getKey(),
v == null ? null : Strings.nullToEmpty(v.toString()));
}
} else {
for (Map.Entry<String, String> entry : Beans.propertiesAsText(val).entrySet()) {
messageSupplier.putSessionAttributeChangedValue(
fullPath + "." + entry.getKey(), entry.getValue());
}
}
} else if (attributeValue == null) {
// no need to navigate path since it will always be null
messageSupplier.putSessionAttributeChangedValue(fullPath, null);
} else {
Object val = HttpSessions.getSessionAttribute(attributeValue, attributePath);
messageSupplier.putSessionAttributeChangedValue(fullPath,
val == null ? null : Strings.nullToEmpty(val.toString()));
}
}
}
|
/*
* SimpleModal Basic Modal Dialog
* http://simplemodal.com
*
* Copyright (c) 2013 <NAME> - http://ericmmartin.com
*
* Licensed under the MIT license:
* http://www.opensource.org/licenses/mit-license.php
*/
jQuery(function ($) {
// Load dialog on page load
//$('#basic-modal-content').modal();
// Load dialog on click
$('#basic-modal .basic').click(function (e) {
$('#basic-modal-content').modal();
return false;
});
}); |
#!/bin/bash
function GETAPPROVAL {
until false ; do
echo "Do you want to run the command (y/n)?"
read -n 1 WISH
if [ "$WISH" == "y" ]; then
return true;
elif [ "$WISH" == "n" ]; then
return false;
fi
done
}
function ESCAPE_PARAMS {
local out=""
for v in "$@"; do
if [[ "$v" == *"<"* ]]; then
out="$out \"$v\""
elif [[ "$v" == *">"* ]] ; then
out="$out \"$v\""
elif [[ "$v" == *"|"* ]] ; then
out="$out \'$v\'"
elif [[ "$v" == *" "* ]] ; then
out="$out \"$v\""
else
out="$out $v"
fi
done
echo $out
}
function CHK {
local ID=DEFAULT
CHECKPOINT $ID "$@"
}
function CHECKPOINT {
COLOR_GREEN='\e[00;32m'
COLOR_RED='\e[00;31m'
COLOR_BLUE='\e[00;34m'
COLOR_DEFAULT='\e[00m'
local ID=$1; shift
#We retrieve the counter variable we use for checkpointing
#Because the name of the variable is govern by the checkpoint ID
#we must use indirect approach
local COUNTER_NAME="CHECKPOINT_${ID}_COUNTER"
local COUNTER
eval COUNTER=\$$COUNTER_NAME
if [ -z $COUNTER ]; then
COUNTER=0
fi
echo -e ${COLOR_GREEN}CHECKPOINT:$ID, COUNTER=$COUNTER $COLOR_DEFAULT >&2
#Now the same for "LAST GOOD STATE"
if [ "$ID" == "DEFAULT" ]; then
local LAST_GOOD_NAME="LAST_GOOD"
else
local LAST_GOOD_NAME="LAST_GOOD_$ID"
fi
local LAST_GOOD_VALUE
eval LAST_GOOD_VALUE=\$$LAST_GOOD_NAME
echo -e ${COLOR_GREEN}"CHECKPOINT: $LAST_GOOD_NAME=$LAST_GOOD_VALUE"${COLOR_DEFAULT} >&2
#The command has to be run, if no-checkpoint tracking is in progress
#or we are already gone through the last problematic part
if [ -z $LAST_GOOD_VALUE ] || [ $COUNTER -ge $LAST_GOOD_VALUE ]; then
#bash print_args.sh `ESCAPE_PARAMS $CMD`
if [ !$INTERACTIVE_CHECKPOINT ] ; then
eval `ESCAPE_PARAMS "$@"`
else
APPROVAL=GETAPPROVAL
if $APPROVAL ; then
eval `ESCAPE_PARAMS $@`
fi
fi
if [ $? -ne 0 ] ; then
echo -e ${COLOR_RED}"CHECKPOINT FAILURE: The command returned non-zero status" >&2
echo -e " rerun the script with the parameter -c $LAST_GOOD_NAME=$COUNTER" >&2
echo -e "COMMAND">&2
echo -e " " "$@" ${COLOR_RED} >&2
exit 1
fi
else
#Else, we just skip the command....
echo -e ${COLOR_GREEN}"CHECKPOINT: SKIPPING, $LAST_GOOD_NAME=$COUNTER" >&2
echo -e "$@"${COLOR_DEFAULT} >&2
fi
COUNTER=$(( $COUNTER + 1 ))
eval export $COUNTER_NAME=$COUNTER
}
function KILLBG_JOBS {
jobs \
| perl -ne 'print "$1\n" if m/^\[(\d+)\][+-]? +Running/;' \
| while read -r ; do kill %"$REPLY" ; done
}
function ONEXIT_HANDLER {
COLOR_GREEN='\e[00;32m'
COLOR_RED='\e[00;31m'
COLOR_BLUE='\e[00;34m'
COLOR_DEFAULT='\e[00m'
counters=`set | egrep "^CHECKPOINT_[_A-Z]+_COUNTER=" | sed 's/^CHECKPOINT\(_[_A-Z][_A-Z]*\)_COUNTER=/LAST_GOOD\1=/g' | sed "s/^LAST_GOOD_DEFAULT=/LAST_GOOD=/g"`
if [[ ! -z "$counters" ]]; then
echo -e ${COLOR_RED}"CHECKPOINT FAILURE: The last command returned non-zero status"${COLOR_DEFAULT} >&2
echo -e ${COLOR_RED}"look at the counters and try to rerun this script (after figuring the issue)"${COLOR_DEFAULT} >&2
echo -e ${COLOR_RED}"using the -c COUNTER_NAME=COUNTER_VALUE parameters;"${COLOR_DEFAULT} >&2
echo -e ${COLOR_RED}"You can use -c \"COUNTER_NAME1=COUNTER_VALUE1;COUNTER_NAME2=COUNTER_VALUE2\" as well"${COLOR_DEFAULT} >&2
echo -e ${COLOR_RED}"The counters: \n $counters"${COLOR_DEFAULT} >&2
else
echo -e ${COLOR_RED}"CHECKPOINT FAILURE: The last command returned non-zero status"${COLOR_DEFAULT} >&2
echo -e ${COLOR_RED}"No checkpoint was found. Try to figure out the problem and "${COLOR_DEFAULT} >&2
echo -e ${COLOR_RED}"run the script again"${COLOR_DEFAULT} >&2
fi
}
trap "ONEXIT_HANDLER; exit; " SIGINT SIGKILL SIGTERM ERR
while getopts ":c:i" opt; do
case $opt in
c)
eval $OPTARG
;;
i)
INTERACTIVE_CHECKPOINT=true
esac
done
|
<filename>mbhd-swing/src/main/java/org/multibit/hd/ui/events/view/WalletDetailChangedEvent.java<gh_stars>10-100
package org.multibit.hd.ui.events.view;
import org.multibit.hd.ui.views.components.wallet_detail.WalletDetail;
/**
* <p>Event to provide the following to View Event API:</p>
* <ul>
* <li>Indicates the wallet detail has changed</li></li>
* </ul>
*
* @since 0.0.1
*
*/
public class WalletDetailChangedEvent implements ViewEvent {
private final WalletDetail walletDetail;
/**
* @param walletDetail the wallet detail
*/
public WalletDetailChangedEvent(WalletDetail walletDetail) {
this.walletDetail = walletDetail;
}
/**
* @return The wallet detail to populate the view with
*/
public WalletDetail getWalletDetail() {
return walletDetail;
}
}
|
#!/bin/bash
# script to run the necessary preprocessing steps before starting the wrf run
# setting -e to abort on error
set -e
# define terminal colors
. "${COLOR_PATH}"
# cleaning up in wps preprocessing folder
if [ -z "${LOG_PATH}" ]; then
printf " Log path is not set, exiting with error."
exit 1
fi
printf "Cleaning up wps data from last time at %s\\n" "$(date +"%T")" >> "${INFO_LOG}"
if [ -z "${BUILD_PATH}" ]; then
printf " Build path is not set, exiting with error."
exit 1
fi
# remove met_em files from the last run
find "${WPS_DIR}" -name 'met_em.d01.*' -exec rm {} \;
# remove grib files
find "${WPS_DIR}" -name 'GRIB*' -exec rm {} \;
# remove FILE objects of the time stamps
find "${WPS_DIR}" -name 'FILE*' -exec rm {} \;
find "${WPS_DIR}" -name 'PFILE*' -exec rm {} \;
# cleaning up in wrf
printf "Cleaning up wrf data from last time at %s\\n" "$(date +"%T")" >> "${INFO_LOG}"
# remove met_em files from the last run
find "${WRF_DIR}/test/em_real/" -name 'met_em.d01.*' -exec rm {} \;
|
import feign.RequestInterceptor;
import feign.RequestTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
@Configuration
public class ExtendFeignTenantInterceptor implements RequestInterceptor {
@Autowired
private TenantService tenantService; // Assuming a TenantService to retrieve tenant-specific information
@Override
public void apply(RequestTemplate template) {
// Retrieve tenant-specific information from the current user's session context
String tenantId = getTenantIdFromSessionContext();
// Add the tenant-specific information to the request headers
template.header("X-Tenant-Id", tenantId);
}
private String getTenantIdFromSessionContext() {
// Assuming the tenant-specific information is stored in the user's session context
ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
if (attributes != null) {
// Retrieve tenant-specific information from the session context
return tenantService.getTenantId(attributes.getRequest().getSession());
}
return null;
}
} |
<gh_stars>0
package net.anatolich.subscriptions;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class SubscriptionsServiceApplication {
public static void main(String[] args) {
SpringApplication.run(SubscriptionsServiceApplication.class, args);
}
}
|
import os
import weakref
from collections import defaultdict
from numbers import Number as numeric_type
import numpy as np
from yt.data_objects.index_subobjects.grid_patch import AMRGridPatch
from yt.data_objects.particle_unions import ParticleUnion
from yt.data_objects.profiles import (
Profile1DFromDataset,
Profile2DFromDataset,
Profile3DFromDataset,
)
from yt.data_objects.static_output import Dataset, ParticleFile, validate_index_order
from yt.fields.field_exceptions import NeedsGridType
from yt.funcs import is_root, parse_h5_attr
from yt.geometry.grid_geometry_handler import GridIndex
from yt.geometry.particle_geometry_handler import ParticleIndex
from yt.units import dimensions
from yt.units.unit_registry import UnitRegistry
from yt.units.yt_array import YTQuantity, uconcatenate
from yt.utilities.exceptions import GenerationInProgress, YTFieldTypeNotFound
from yt.utilities.logger import ytLogger as mylog
from yt.utilities.on_demand_imports import _h5py as h5py
from yt.utilities.parallel_tools.parallel_analysis_interface import parallel_root_only
from yt.utilities.tree_container import TreeContainer
from .fields import YTDataContainerFieldInfo, YTGridFieldInfo
_grid_data_containers = ["arbitrary_grid", "covering_grid", "smoothed_covering_grid"]
class SavedDataset(Dataset):
"""
Base dataset class for products of calling save_as_dataset.
"""
_con_attrs = ()
def _parse_parameter_file(self):
self.refine_by = 2
with h5py.File(self.parameter_filename, mode="r") as f:
for key in f.attrs.keys():
v = parse_h5_attr(f, key)
if key == "con_args":
try:
v = eval(v)
except ValueError:
# support older ytdata outputs
v = v.astype("str")
except NameError:
# This is the most common error we expect, and it
# results from having the eval do a concatenated decoded
# set of the values.
v = [_.decode("utf8") for _ in v]
self.parameters[key] = v
self._with_parameter_file_open(f)
# if saved, restore unit registry from the json string
if "unit_registry_json" in self.parameters:
self.unit_registry = UnitRegistry.from_json(
self.parameters["unit_registry_json"]
)
# reset self.arr and self.quan to use new unit_registry
self._arr = None
self._quan = None
for dim in [
"length",
"mass",
"pressure",
"temperature",
"time",
"velocity",
]:
cu = "code_" + dim
if cu not in self.unit_registry:
self.unit_registry.add(cu, 1.0, getattr(dimensions, dim))
if "code_magnetic" not in self.unit_registry:
self.unit_registry.add("code_magnetic", 1.0, dimensions.magnetic_field)
# if saved, set unit system
if "unit_system_name" in self.parameters:
unit_system = self.parameters["unit_system_name"]
del self.parameters["unit_system_name"]
else:
unit_system = "cgs"
# reset unit system since we may have a new unit registry
self._assign_unit_system(unit_system)
# assign units to parameters that have associated unit string
del_pars = []
for par in self.parameters:
ustr = f"{par}_units"
if ustr in self.parameters:
if isinstance(self.parameters[par], np.ndarray):
to_u = self.arr
else:
to_u = self.quan
self.parameters[par] = to_u(self.parameters[par], self.parameters[ustr])
del_pars.append(ustr)
for par in del_pars:
del self.parameters[par]
for attr in self._con_attrs:
setattr(self, attr, self.parameters.get(attr))
if self.geometry is None:
self.geometry = "cartesian"
def _with_parameter_file_open(self, f):
# This allows subclasses to access the parameter file
# while it's still open to get additional information.
pass
def set_units(self):
if "unit_registry_json" in self.parameters:
self._set_code_unit_attributes()
del self.parameters["unit_registry_json"]
else:
super(SavedDataset, self).set_units()
def _set_code_unit_attributes(self):
attrs = (
"length_unit",
"mass_unit",
"time_unit",
"velocity_unit",
"magnetic_unit",
)
cgs_units = ("cm", "g", "s", "cm/s", "gauss")
base_units = np.ones(len(attrs))
for unit, attr, cgs_unit in zip(base_units, attrs, cgs_units):
if attr in self.parameters and isinstance(
self.parameters[attr], YTQuantity
):
uq = self.parameters[attr]
elif attr in self.parameters and f"{attr}_units" in self.parameters:
uq = self.quan(self.parameters[attr], self.parameters[f"{attr}_units"])
del self.parameters[attr]
del self.parameters[f"{attr}_units"]
elif isinstance(unit, str):
uq = self.quan(1.0, unit)
elif isinstance(unit, numeric_type):
uq = self.quan(unit, cgs_unit)
elif isinstance(unit, YTQuantity):
uq = unit
elif isinstance(unit, tuple):
uq = self.quan(unit[0], unit[1])
else:
raise RuntimeError(f"{attr} ({unit}) is invalid.")
setattr(self, attr, uq)
class YTDataset(SavedDataset):
"""Base dataset class for all ytdata datasets."""
_con_attrs = (
"cosmological_simulation",
"current_time",
"current_redshift",
"hubble_constant",
"omega_matter",
"omega_lambda",
"dimensionality",
"domain_dimensions",
"geometry",
"periodicity",
"domain_left_edge",
"domain_right_edge",
"container_type",
"data_type",
)
def _with_parameter_file_open(self, f):
self.num_particles = dict(
[
(group, parse_h5_attr(f[group], "num_elements"))
for group in f
if group != self.default_fluid_type
]
)
def create_field_info(self):
self.field_dependencies = {}
self.derived_field_list = []
self.filtered_particle_types = []
self.field_info = self._field_info_class(self, self.field_list)
self.coordinates.setup_fields(self.field_info)
self.field_info.setup_fluid_fields()
for ptype in self.particle_types:
self.field_info.setup_particle_fields(ptype)
self._setup_gas_alias()
self.field_info.setup_fluid_index_fields()
if "all" not in self.particle_types:
mylog.debug("Creating Particle Union 'all'")
pu = ParticleUnion("all", list(self.particle_types_raw))
self.add_particle_union(pu)
self.field_info.setup_extra_union_fields()
mylog.debug("Loading field plugins.")
self.field_info.load_all_plugins()
deps, unloaded = self.field_info.check_derived_fields()
self.field_dependencies.update(deps)
def _setup_gas_alias(self):
pass
def _setup_override_fields(self):
pass
class YTDataHDF5File(ParticleFile):
def __init__(self, ds, io, filename, file_id, range):
with h5py.File(filename, mode="r") as f:
self.header = dict(
(field, parse_h5_attr(f, field)) for field in f.attrs.keys()
)
super(YTDataHDF5File, self).__init__(ds, io, filename, file_id, range)
class YTDataContainerDataset(YTDataset):
"""Dataset for saved geometric data containers."""
_index_class = ParticleIndex
_file_class = YTDataHDF5File
_field_info_class = YTDataContainerFieldInfo
_suffix = ".h5"
fluid_types = ("grid", "gas", "deposit", "index")
def __init__(
self,
filename,
dataset_type="ytdatacontainer_hdf5",
index_order=None,
index_filename=None,
units_override=None,
unit_system="cgs",
):
self.index_order = validate_index_order(index_order)
self.index_filename = index_filename
super(YTDataContainerDataset, self).__init__(
filename,
dataset_type,
units_override=units_override,
unit_system=unit_system,
)
def _parse_parameter_file(self):
super(YTDataContainerDataset, self)._parse_parameter_file()
self.particle_types_raw = tuple(self.num_particles.keys())
self.particle_types = self.particle_types_raw
self.filename_template = self.parameter_filename
self.file_count = 1
self.domain_dimensions = np.ones(3, "int32")
def _setup_gas_alias(self):
"Alias the grid type to gas by making a particle union."
if "grid" in self.particle_types and "gas" not in self.particle_types:
pu = ParticleUnion("gas", ["grid"])
self.add_particle_union(pu)
# We have to alias this because particle unions only
# cover the field_list.
self.field_info.alias(("gas", "cell_volume"), ("grid", "cell_volume"))
_data_obj = None
@property
def data(self):
"""
Return a data container configured like the original used to
create this dataset.
"""
if self._data_obj is None:
# Some data containers can't be reconstructed in the same way
# since this is now particle-like data.
data_type = self.parameters.get("data_type")
container_type = self.parameters.get("container_type")
ex_container_type = ["cutting", "quad_proj", "ray", "slice", "cut_region"]
if data_type == "yt_light_ray" or container_type in ex_container_type:
mylog.info("Returning an all_data data container.")
return self.all_data()
my_obj = getattr(self, self.parameters["container_type"])
my_args = [
self.parameters[con_arg] for con_arg in self.parameters["con_args"]
]
self._data_obj = my_obj(*my_args)
return self._data_obj
@classmethod
def _is_valid(cls, filename, *args, **kwargs):
if not filename.endswith(".h5"):
return False
with h5py.File(filename, mode="r") as f:
data_type = parse_h5_attr(f, "data_type")
cont_type = parse_h5_attr(f, "container_type")
if data_type is None:
return False
if (
data_type == "yt_data_container"
and cont_type not in _grid_data_containers
):
return True
return False
class YTDataLightRayDataset(YTDataContainerDataset):
"""Dataset for saved LightRay objects."""
def _parse_parameter_file(self):
super(YTDataLightRayDataset, self)._parse_parameter_file()
self._restore_light_ray_solution()
def _restore_light_ray_solution(self):
"""
Restore all information associate with the light ray solution
to its original form.
"""
key = "light_ray_solution"
self.light_ray_solution = []
lrs_fields = [
par for par in self.parameters if key in par and not par.endswith("_units")
]
if len(lrs_fields) == 0:
return
self.light_ray_solution = [{} for val in self.parameters[lrs_fields[0]]]
for sp3 in ["unique_identifier", "filename"]:
ksp3 = f"{key}_{sp3}"
if ksp3 not in lrs_fields:
continue
self.parameters[ksp3] = self.parameters[ksp3].astype(str)
for field in lrs_fields:
field_name = field[len(key) + 1 :]
for i in range(self.parameters[field].shape[0]):
self.light_ray_solution[i][field_name] = self.parameters[field][i]
@classmethod
def _is_valid(cls, filename, *args, **kwargs):
if not filename.endswith(".h5"):
return False
with h5py.File(filename, mode="r") as f:
data_type = parse_h5_attr(f, "data_type")
if data_type in ["yt_light_ray"]:
return True
return False
class YTSpatialPlotDataset(YTDataContainerDataset):
"""Dataset for saved slices and projections."""
_field_info_class = YTGridFieldInfo
def __init__(self, *args, **kwargs):
super(YTSpatialPlotDataset, self).__init__(
*args, dataset_type="ytspatialplot_hdf5", **kwargs
)
def _parse_parameter_file(self):
super(YTSpatialPlotDataset, self)._parse_parameter_file()
if self.parameters["container_type"] == "proj":
if (
isinstance(self.parameters["weight_field"], str)
and self.parameters["weight_field"] == "None"
):
self.parameters["weight_field"] = None
elif isinstance(self.parameters["weight_field"], np.ndarray):
self.parameters["weight_field"] = tuple(self.parameters["weight_field"])
@classmethod
def _is_valid(cls, filename, *args, **kwargs):
if not filename.endswith(".h5"):
return False
with h5py.File(filename, mode="r") as f:
data_type = parse_h5_attr(f, "data_type")
cont_type = parse_h5_attr(f, "container_type")
if data_type == "yt_data_container" and cont_type in [
"cutting",
"proj",
"slice",
"quad_proj",
]:
return True
return False
class YTGrid(AMRGridPatch):
_id_offset = 0
def __init__(self, gid, index, filename=None):
AMRGridPatch.__init__(self, gid, filename=filename, index=index)
self._children_ids = []
self._parent_id = -1
self.Level = 0
self.LeftEdge = self.index.ds.domain_left_edge
self.RightEdge = self.index.ds.domain_right_edge
def __getitem__(self, key):
tr = super(AMRGridPatch, self).__getitem__(key)
try:
fields = self._determine_fields(key)
except YTFieldTypeNotFound:
return tr
finfo = self.ds._get_field_info(*fields[0])
if not finfo.sampling_type == "particle":
return tr.reshape(self.ActiveDimensions[: self.ds.dimensionality])
return tr
@property
def Parent(self):
return None
@property
def Children(self):
return []
class YTDataHierarchy(GridIndex):
def __init__(self, ds, dataset_type=None):
self.dataset_type = dataset_type
self.float_type = "float64"
self.dataset = weakref.proxy(ds)
self.directory = os.getcwd()
super(YTDataHierarchy, self).__init__(ds, dataset_type)
def _count_grids(self):
self.num_grids = 1
def _parse_index(self):
self.grid_dimensions[:] = self.ds.domain_dimensions
self.grid_left_edge[:] = self.ds.domain_left_edge
self.grid_right_edge[:] = self.ds.domain_right_edge
self.grid_levels[:] = np.zeros(self.num_grids)
self.grid_procs = np.zeros(self.num_grids)
self.grid_particle_count[:] = sum(self.ds.num_particles.values())
self.grids = []
for gid in range(self.num_grids):
self.grids.append(self.grid(gid, self))
self.grids[gid].Level = self.grid_levels[gid, 0]
self.max_level = self.grid_levels.max()
temp_grids = np.empty(self.num_grids, dtype="object")
for i, grid in enumerate(self.grids):
grid.filename = self.ds.parameter_filename
grid._prepare_grid()
grid.proc_num = self.grid_procs[i]
temp_grids[i] = grid
self.grids = temp_grids
def _detect_output_fields(self):
self.field_list = []
self.ds.field_units = self.ds.field_units or {}
with h5py.File(self.ds.parameter_filename, mode="r") as f:
for group in f:
for field in f[group]:
field_name = (str(group), str(field))
self.field_list.append(field_name)
self.ds.field_units[field_name] = parse_h5_attr(
f[group][field], "units"
)
class YTGridHierarchy(YTDataHierarchy):
grid = YTGrid
def _populate_grid_objects(self):
for g in self.grids:
g._setup_dx()
self.max_level = self.grid_levels.max()
class YTGridDataset(YTDataset):
"""Dataset for saved covering grids, arbitrary grids, and FRBs."""
_index_class = YTGridHierarchy
_field_info_class = YTGridFieldInfo
_dataset_type = "ytgridhdf5"
geometry = "cartesian"
default_fluid_type = "grid"
fluid_types = ("grid", "gas", "deposit", "index")
def __init__(self, filename, unit_system="cgs"):
super(YTGridDataset, self).__init__(
filename, self._dataset_type, unit_system=unit_system
)
self.data = self.index.grids[0]
def _parse_parameter_file(self):
super(YTGridDataset, self)._parse_parameter_file()
self.num_particles.pop(self.default_fluid_type, None)
self.particle_types_raw = tuple(self.num_particles.keys())
self.particle_types = self.particle_types_raw
# correct domain dimensions for the covering grid dimension
self.base_domain_left_edge = self.domain_left_edge
self.base_domain_right_edge = self.domain_right_edge
self.base_domain_dimensions = self.domain_dimensions
if self.container_type in _grid_data_containers:
self.domain_left_edge = self.parameters["left_edge"]
if "level" in self.parameters["con_args"]:
dx = (self.base_domain_right_edge - self.base_domain_left_edge) / (
self.domain_dimensions * self.refine_by ** self.parameters["level"]
)
self.domain_right_edge = (
self.domain_left_edge + self.parameters["ActiveDimensions"] * dx
)
self.domain_dimensions = (
(self.domain_right_edge - self.domain_left_edge) / dx
).astype(int)
else:
self.domain_right_edge = self.parameters["right_edge"]
self.domain_dimensions = self.parameters["ActiveDimensions"]
dx = (
self.domain_right_edge - self.domain_left_edge
) / self.domain_dimensions
self.periodicity = (
np.abs(self.domain_left_edge - self.base_domain_left_edge) < 0.5 * dx
)
self.periodicity &= (
np.abs(self.domain_right_edge - self.base_domain_right_edge) < 0.5 * dx
)
elif self.data_type == "yt_frb":
dle = self.domain_left_edge
self.domain_left_edge = uconcatenate(
[self.parameters["left_edge"].to(dle.units), [0] * dle.uq]
)
dre = self.domain_right_edge
self.domain_right_edge = uconcatenate(
[self.parameters["right_edge"].to(dre.units), [1] * dre.uq]
)
self.domain_dimensions = np.concatenate(
[self.parameters["ActiveDimensions"], [1]]
)
def _setup_gas_alias(self):
"Alias the grid type to gas with a field alias."
for ftype, field in self.field_list:
if ftype == "grid":
self.field_info.alias(("gas", field), ("grid", field))
@classmethod
def _is_valid(cls, filename, *args, **kwargs):
if not filename.endswith(".h5"):
return False
with h5py.File(filename, mode="r") as f:
data_type = parse_h5_attr(f, "data_type")
cont_type = parse_h5_attr(f, "container_type")
if data_type == "yt_frb":
return True
if data_type == "yt_data_container" and cont_type in _grid_data_containers:
return True
return False
class YTNonspatialGrid(AMRGridPatch):
_id_offset = 0
def __init__(self, gid, index, filename=None):
super(YTNonspatialGrid, self).__init__(gid, filename=filename, index=index)
self._children_ids = []
self._parent_id = -1
self.Level = 0
self.LeftEdge = self.index.ds.domain_left_edge
self.RightEdge = self.index.ds.domain_right_edge
def __repr__(self):
return "YTNonspatialGrid"
def __getitem__(self, key):
tr = super(AMRGridPatch, self).__getitem__(key)
try:
fields = self._determine_fields(key)
except YTFieldTypeNotFound:
return tr
self.ds._get_field_info(*fields[0])
return tr
def get_data(self, fields=None):
if fields is None:
return
nfields = []
apply_fields = defaultdict(list)
for field in self._determine_fields(fields):
if field[0] in self.ds.filtered_particle_types:
f = self.ds.known_filters[field[0]]
apply_fields[field[0]].append((f.filtered_type, field[1]))
else:
nfields.append(field)
for filter_type in apply_fields:
f = self.ds.known_filters[filter_type]
with f.apply(self):
self.get_data(apply_fields[filter_type])
fields = nfields
if len(fields) == 0:
return
# Now we collect all our fields
# Here is where we need to perform a validation step, so that if we
# have a field requested that we actually *can't* yet get, we put it
# off until the end. This prevents double-reading fields that will
# need to be used in spatial fields later on.
fields_to_get = []
# This will be pre-populated with spatial fields
fields_to_generate = []
for field in self._determine_fields(fields):
if field in self.field_data:
continue
finfo = self.ds._get_field_info(*field)
try:
finfo.check_available(self)
except NeedsGridType:
fields_to_generate.append(field)
continue
fields_to_get.append(field)
if len(fields_to_get) == 0 and len(fields_to_generate) == 0:
return
elif self._locked:
raise GenerationInProgress(fields)
# Track which ones we want in the end
ofields = set(list(self.field_data.keys()) + fields_to_get + fields_to_generate)
# At this point, we want to figure out *all* our dependencies.
fields_to_get = self._identify_dependencies(fields_to_get, self._spatial)
# We now split up into readers for the types of fields
fluids, particles = [], []
finfos = {}
for ftype, fname in fields_to_get:
finfo = self.ds._get_field_info(ftype, fname)
finfos[ftype, fname] = finfo
if finfo.sampling_type == "particle":
particles.append((ftype, fname))
elif (ftype, fname) not in fluids:
fluids.append((ftype, fname))
# The _read method will figure out which fields it needs to get from
# disk, and return a dict of those fields along with the fields that
# need to be generated.
read_fluids, gen_fluids = self.index._read_fluid_fields(
fluids, self, self._current_chunk
)
for f, v in read_fluids.items():
convert = True
if v.dtype != np.float64:
if finfos[f].units == "":
self.field_data[f] = v
convert = False
else:
v = v.astype(np.float64)
if convert:
self.field_data[f] = self.ds.arr(v, units=finfos[f].units)
self.field_data[f].convert_to_units(finfos[f].output_units)
read_particles, gen_particles = self.index._read_fluid_fields(
particles, self, self._current_chunk
)
for f, v in read_particles.items():
convert = True
if v.dtype != np.float64:
if finfos[f].units == "":
self.field_data[f] = v
convert = False
else:
v = v.astype(np.float64)
if convert:
self.field_data[f] = self.ds.arr(v, units=finfos[f].units)
self.field_data[f].convert_to_units(finfos[f].output_units)
fields_to_generate += gen_fluids + gen_particles
self._generate_fields(fields_to_generate)
for field in list(self.field_data.keys()):
if field not in ofields:
self.field_data.pop(field)
@property
def Parent(self):
return None
@property
def Children(self):
return []
class YTNonspatialHierarchy(YTDataHierarchy):
grid = YTNonspatialGrid
def _populate_grid_objects(self):
for g in self.grids:
g._setup_dx()
# this is non-spatial, so remove the code_length units
g.dds = self.ds.arr(g.dds.d, "")
g.ActiveDimensions = self.ds.domain_dimensions
self.max_level = self.grid_levels.max()
def _read_fluid_fields(self, fields, dobj, chunk=None):
if len(fields) == 0:
return {}, []
fields_to_read, fields_to_generate = self._split_fields(fields)
if len(fields_to_read) == 0:
return {}, fields_to_generate
selector = dobj.selector
fields_to_return = self.io._read_fluid_selection(dobj, selector, fields_to_read)
return fields_to_return, fields_to_generate
class YTNonspatialDataset(YTGridDataset):
"""Dataset for general array data."""
_index_class = YTNonspatialHierarchy
_field_info_class = YTGridFieldInfo
_dataset_type = "ytnonspatialhdf5"
geometry = "cartesian"
default_fluid_type = "data"
fluid_types = ("data", "gas")
def _parse_parameter_file(self):
super(YTGridDataset, self)._parse_parameter_file()
self.num_particles.pop(self.default_fluid_type, None)
self.particle_types_raw = tuple(self.num_particles.keys())
self.particle_types = self.particle_types_raw
def _set_derived_attrs(self):
# set some defaults just to make things go
default_attrs = {
"dimensionality": 3,
"domain_dimensions": np.ones(3, dtype="int"),
"domain_left_edge": np.zeros(3),
"domain_right_edge": np.ones(3),
"periodicity": np.ones(3, dtype="bool"),
}
for att, val in default_attrs.items():
if getattr(self, att, None) is None:
setattr(self, att, val)
def _setup_classes(self):
# We don't allow geometric selection for non-spatial datasets
self.objects = []
@parallel_root_only
def print_key_parameters(self):
for a in [
"current_time",
"domain_dimensions",
"domain_left_edge",
"domain_right_edge",
"cosmological_simulation",
]:
v = getattr(self, a)
if v is not None:
mylog.info("Parameters: %-25s = %s", a, v)
if hasattr(self, "cosmological_simulation") and self.cosmological_simulation:
for a in [
"current_redshift",
"omega_lambda",
"omega_matter",
"hubble_constant",
]:
v = getattr(self, a)
if v is not None:
mylog.info("Parameters: %-25s = %s", a, v)
mylog.warning("Geometric data selection not available for this dataset type.")
@classmethod
def _is_valid(cls, filename, *args, **kwargs):
if not filename.endswith(".h5"):
return False
with h5py.File(filename, mode="r") as f:
data_type = parse_h5_attr(f, "data_type")
if data_type == "yt_array_data":
return True
return False
class YTProfileDataset(YTNonspatialDataset):
"""Dataset for saved profile objects."""
fluid_types = ("data", "gas", "standard_deviation")
def __init__(self, filename, unit_system="cgs"):
super(YTProfileDataset, self).__init__(filename, unit_system=unit_system)
_profile = None
@property
def profile(self):
if self._profile is not None:
return self._profile
if self.dimensionality == 1:
self._profile = Profile1DFromDataset(self)
elif self.dimensionality == 2:
self._profile = Profile2DFromDataset(self)
elif self.dimensionality == 3:
self._profile = Profile3DFromDataset(self)
else:
self._profile = None
return self._profile
def _parse_parameter_file(self):
super(YTGridDataset, self)._parse_parameter_file()
if (
isinstance(self.parameters["weight_field"], str)
and self.parameters["weight_field"] == "None"
):
self.parameters["weight_field"] = None
elif isinstance(self.parameters["weight_field"], np.ndarray):
self.parameters["weight_field"] = tuple(
self.parameters["weight_field"].astype(str)
)
for a in ["profile_dimensions"] + [
f"{ax}_{attr}" for ax in "xyz"[: self.dimensionality] for attr in ["log"]
]:
setattr(self, a, self.parameters[a])
self.base_domain_left_edge = self.domain_left_edge
self.base_domain_right_edge = self.domain_right_edge
self.base_domain_dimensions = self.domain_dimensions
domain_dimensions = np.ones(3, dtype="int")
domain_dimensions[: self.dimensionality] = self.profile_dimensions
self.domain_dimensions = domain_dimensions
domain_left_edge = np.zeros(3)
domain_right_edge = np.ones(3)
for i, ax in enumerate("xyz"[: self.dimensionality]):
range_name = f"{ax}_range"
my_range = self.parameters[range_name]
if getattr(self, f"{ax}_log", False):
my_range = np.log10(my_range)
domain_left_edge[i] = my_range[0]
domain_right_edge[i] = my_range[1]
setattr(self, range_name, self.parameters[range_name])
bin_field = f"{ax}_field"
if (
isinstance(self.parameters[bin_field], str)
and self.parameters[bin_field] == "None"
):
self.parameters[bin_field] = None
elif isinstance(self.parameters[bin_field], np.ndarray):
self.parameters[bin_field] = tuple(
["data", self.parameters[bin_field].astype(str)[1]]
)
setattr(self, bin_field, self.parameters[bin_field])
self.domain_left_edge = domain_left_edge
self.domain_right_edge = domain_right_edge
def _setup_gas_alias(self):
"Alias the grid type to gas with a field alias."
for ftype, field in self.field_list:
if ftype == "data":
self.field_info.alias(("gas", field), (ftype, field))
def create_field_info(self):
super(YTProfileDataset, self).create_field_info()
if self.parameters["weight_field"] is not None:
self.field_info.alias(
self.parameters["weight_field"], (self.default_fluid_type, "weight")
)
def _set_derived_attrs(self):
self.domain_center = 0.5 * (self.domain_right_edge + self.domain_left_edge)
self.domain_width = self.domain_right_edge - self.domain_left_edge
def print_key_parameters(self):
if is_root():
mylog.info("YTProfileDataset")
for a in ["dimensionality", "profile_dimensions"] + [
f"{ax}_{attr}"
for ax in "xyz"[: self.dimensionality]
for attr in ["field", "range", "log"]
]:
v = getattr(self, a)
mylog.info("Parameters: %-25s = %s", a, v)
super(YTProfileDataset, self).print_key_parameters()
@classmethod
def _is_valid(cls, filename, *args, **kwargs):
if not filename.endswith(".h5"):
return False
with h5py.File(filename, mode="r") as f:
data_type = parse_h5_attr(f, "data_type")
if data_type == "yt_profile":
return True
return False
class YTClumpContainer(TreeContainer):
def __init__(
self, clump_id, global_id, parent_id, contour_key, contour_id, ds=None
):
self.clump_id = clump_id
self.global_id = global_id
self.parent_id = parent_id
self.contour_key = contour_key
self.contour_id = contour_id
self.parent = None
self.ds = ds
TreeContainer.__init__(self)
def add_child(self, child):
if self.children is None:
self.children = []
self.children.append(child)
child.parent = self
def __repr__(self):
return "Clump[%d]" % self.clump_id
def __getitem__(self, field):
g = self.ds.data
f = g._determine_fields(field)[0]
if f[0] == "clump":
return g[f][self.global_id]
if self.contour_id == -1:
return g[f]
cfield = (f[0], f"contours_{self.contour_key.decode('utf-8')}")
if f[0] == "grid":
return g[f][g[cfield] == self.contour_id]
return self.parent[f][g[cfield] == self.contour_id]
class YTClumpTreeDataset(YTNonspatialDataset):
"""Dataset for saved clump-finder data."""
def __init__(self, filename, unit_system="cgs"):
super(YTClumpTreeDataset, self).__init__(filename, unit_system=unit_system)
self._load_tree()
def _load_tree(self):
my_tree = {}
for i, clump_id in enumerate(self.data[("clump", "clump_id")]):
my_tree[clump_id] = YTClumpContainer(
clump_id,
i,
self.data["clump", "parent_id"][i],
self.data["clump", "contour_key"][i],
self.data["clump", "contour_id"][i],
self,
)
for clump in my_tree.values():
if clump.parent_id == -1:
self.tree = clump
else:
parent = my_tree[clump.parent_id]
parent.add_child(clump)
_leaves = None
@property
def leaves(self):
if self._leaves is None:
self._leaves = []
for clump in self.tree:
if clump.children is None:
self._leaves.append(clump)
return self._leaves
@classmethod
def _is_valid(cls, filename, *args, **kwargs):
if not filename.endswith(".h5"):
return False
with h5py.File(filename, mode="r") as f:
data_type = parse_h5_attr(f, "data_type")
if data_type is None:
return False
if data_type == "yt_clump_tree":
return True
return False
|
<reponame>RenukaGurumurthy/Gooru-Core-API
/**
*
*/
package org.ednovo.gooru.core.cassandra.model;
import javax.persistence.Column;
import javax.persistence.Entity;
@Entity(name = "feed")
public class FeedCo {
@Column
private String title;
@Column
private String description;
@Column
private Long favoriteCount;
@Column
private Long viewCount;
@Column
private Integer urlStatus;
@Column
private Double ratingAverage;
@Column
private Long likeCount;
@Column
private Long dislikeCount;
@Column
private Long durationInSec;
@Column
private String hasAdvertisement;
@Column
private String hasCopyright;
@Column
private String contentClarity;
@Column
private String mediaClarity;
@Column
private String text;
public Long getFavoriteCount() {
return favoriteCount;
}
public void setFavoriteCount(Long favoriteCount) {
this.favoriteCount = favoriteCount;
}
public Long getViewCount() {
return viewCount;
}
public void setViewCount(Long viewCount) {
this.viewCount = viewCount;
}
public Integer getUrlStatus() {
return urlStatus;
}
public void setUrlStatus(Integer urlStatus) {
this.urlStatus = urlStatus;
}
public Double getRatingAverage() {
return ratingAverage;
}
public void setRatingAverage(Double ratingAverage) {
this.ratingAverage = ratingAverage;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Long getLikeCount() {
return likeCount;
}
public void setLikeCount(Long likeCount) {
this.likeCount = likeCount;
}
public Long getDislikeCount() {
return dislikeCount;
}
public void setDislikeCount(Long dislikeCount) {
this.dislikeCount = dislikeCount;
}
public Long getDurationInSec() {
return durationInSec;
}
public void setDurationInSec(Long durationInSec) {
this.durationInSec = durationInSec;
}
public String getHasAdvertisement() {
return hasAdvertisement;
}
public void setHasAdvertisement(String hasAdvertisement) {
this.hasAdvertisement = hasAdvertisement;
}
public String getHasCopyright() {
return hasCopyright;
}
public void setHasCopyright(String hasCopyright) {
this.hasCopyright = hasCopyright;
}
public String getContentClarity() {
return contentClarity;
}
public void setContentClarity(String contentClarity) {
this.contentClarity = contentClarity;
}
public String getMediaClarity() {
return mediaClarity;
}
public void setMediaClarity(String mediaClarity) {
this.mediaClarity = mediaClarity;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
} |
#!/bin/bash
# for reboot call: sudo /home/admin/XXshutdown.sh reboot
# use this script instead of dirct shutdown command to:
# 1) give UI the info that a reboot/shutdown is now happening
# 2) shutdown/reboot in a safe way to prevent data corruption
# INFOFILE - state data from bootstrap
infoFile="/home/admin/raspiblitz.info"
# get network info from config
source ${infoFile} 2>/dev/null
source /mnt/hdd/raspiblitz.conf 2>/dev/null
if [ ${#network} -eq 0 ]; then
network=bitcoin
fi
# display info
echo ""
echo "LCD turns white when shutdown complete."
if [ "$1" = "reboot" ]; then
shutdownParams="-h -r now"
echo "It will then reboot again automatically."
sed -i "s/^state=.*/state=reboot/g" ${infoFile}
sed -i "s/^message=.*/message=''/g" ${infoFile}
else
shutdownParams="-h now"
echo "Then wait 5 seconds and disconnect power."
sed -i "s/^state=.*/state=shutdown/g" ${infoFile}
sed -i "s/^message=.*/message=''/g" ${infoFile}
fi
# do shutdown/reboot
echo "-----------------------------------------------"
echo "stop lnd - please wait .."
sudo systemctl stop lnd 2>/dev/null
echo "stop ${network}d (1) - please wait .."
sudo -u bitcoin ${network}-cli stop 2>/dev/null
sleep 10
echo "stop ${network}d (2) - please wait .."
sudo systemctl stop ${network}d 2>/dev/null
sleep 3
# make sure drives are synced before shutdown
source <(sudo /home/admin/config.scripts/blitz.datadrive.sh status)
if [ ${isBTRFS} -eq 1 ] && [ ${isMounted} -eq 1 ]; then
echo "STARTING BTRFS RAID DATA CHECK ..."
sudo btrfs scrub start /mnt/hdd/
fi
sync
echo "starting shutdown ..."
sudo shutdown ${shutdownParams}
exit 0
|
package com.example.donations.controllers.request;
import lombok.Data;
import javax.validation.constraints.NotEmpty;
import javax.validation.constraints.NotNull;
@Data
public class DonationRequest {
@NotNull
private Float amount;
@NotNull
private Long idUser;
@NotNull
private Long idInstitution;
}
|
package com.alipay.api.domain;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
/**
* 订单状态查询接口
*
* @author auto create
* @since 1.0, 2020-12-31 11:39:24
*/
public class AlipayEcoMycarParkingOrderstatusQueryModel extends AlipayObject {
private static final long serialVersionUID = 8737243911139196336L;
/**
* 如果商户订单号为空,停车场id和车牌号不能为空,商户订单号优先查询
*/
@ApiField("car_number")
private String carNumber;
/**
* 如果商户订单号为空,停车场id和车牌号不能为空,商户订单号优先查询
*/
@ApiField("parking_id")
private String parkingId;
/**
* 查询订单时间(不传值为当日时间),格式"yyyy-MM-dd “
*/
@ApiField("sel_time")
private String selTime;
/**
* 车主平台交易号,不能跟停车场编号和车牌号同时为空
*/
@ApiField("transaction_no")
private String transactionNo;
public String getCarNumber() {
return this.carNumber;
}
public void setCarNumber(String carNumber) {
this.carNumber = carNumber;
}
public String getParkingId() {
return this.parkingId;
}
public void setParkingId(String parkingId) {
this.parkingId = parkingId;
}
public String getSelTime() {
return this.selTime;
}
public void setSelTime(String selTime) {
this.selTime = selTime;
}
public String getTransactionNo() {
return this.transactionNo;
}
public void setTransactionNo(String transactionNo) {
this.transactionNo = transactionNo;
}
}
|
import { NO_ERRORS_SCHEMA } from '@angular/core';
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { ModalController } from '@ionic/angular';
import * as moment from 'moment';
import { DemoModalBasicComponent } from './modal-basic';
describe('DemoModalBasicComponent', () => {
let component: DemoModalBasicComponent;
let fixture: ComponentFixture<DemoModalBasicComponent>;
beforeEach(() => {
const modalControllerStub = { create: () => ({}) };
TestBed.configureTestingModule({
schemas: [NO_ERRORS_SCHEMA],
declarations: [DemoModalBasicComponent],
providers: [
{ provide: ModalController, useValue: modalControllerStub }
]
});
fixture = TestBed.createComponent(DemoModalBasicComponent);
component = fixture.componentInstance;
});
// ===================== TESTS =====================
it('can load instance', () => {
expect(component).toBeTruthy();
});
it('return undefined on cancel', () => {
const dismissSpy = spyOn(component.modalCtrlMock, 'dismiss');
component.openCalendar();
component.myPicker.onCancel();
expect(dismissSpy).toHaveBeenCalledWith(undefined, 'cancel');
});
it('return input with duration 30 minutes if unchanged and no to-Date', () => {
const testDate = moment().add(1, 'hour').minutes(0).startOf('minute');
component.dateRange = {
from: testDate,
to: testDate,
};
const dismissSpy = spyOn(component.modalCtrlMock, 'dismiss');
component.openCalendar();
component.myPicker.done();
component.dateRange.to = moment(testDate).add(30, 'minute');
expect(dismissSpy.calls.mostRecent().args[1]).toBe('done');
expect(dismissSpy.calls.mostRecent().args[0].from.time).toBe(moment(component.dateRange.from).valueOf());
expect(dismissSpy.calls.mostRecent().args[0].to.time).toBe(moment(component.dateRange.to).valueOf());
});
it('return input if valid to-from supplied', () => {
const testDate = moment().add(1, 'hour').minutes(0).startOf('minute');
component.dateRange = {
from: testDate,
to: moment().add(2, 'hour').minutes(0).startOf('minute'),
};
const dismissSpy = spyOn(component.modalCtrlMock, 'dismiss');
component.openCalendar();
component.myPicker.done();
expect(dismissSpy.calls.mostRecent().args[1]).toBe('done');
expect(dismissSpy.calls.mostRecent().args[0].from.time).toBe(moment(component.dateRange.from).valueOf());
expect(dismissSpy.calls.mostRecent().args[0].to.time).toBe(moment(component.dateRange.to).valueOf());
});
it('prevent invalid input dates from being returned', () => {
const testDate = moment().add(1, 'hour').minutes(0).startOf('minute');
const expectedTo = moment(testDate).add(30, 'minute');
component.dateRange = {
from: testDate,
to: moment(testDate).subtract(2, 'hour'),
};
const dismissSpy = spyOn(component.modalCtrlMock, 'dismiss');
component.openCalendar();
component.myPicker.done();
expect(dismissSpy.calls.mostRecent().args[1]).toBe('done');
expect(dismissSpy.calls.mostRecent().args[0].from.time).toBe(moment(component.dateRange.from).valueOf());
expect(dismissSpy.calls.mostRecent().args[0].to.time).toBe(moment(expectedTo).valueOf());
});
});
|
<filename>src/geometry/biarc.cpp
#include <biarc.h>
#include <utils.h>
#include <bulge.h>
#include <QDebug>
#include <QtMath>
namespace Geometry
{
Orientation Biarc::orientation() const
{
const float det = (m_middle.x() - m_point1.x()) * (m_middle.y() + m_point1.y()) +
(m_point2.x() - m_middle.x()) * (m_point2.y() + m_middle.y()) +
(m_point1.x() - m_point2.x()) * (m_point1.y() + m_point2.y());
return (det < 0.0f) ? Orientation::CW : Orientation::CCW;
}
Biarc::Biarc(const QVector2D& point1, const QVector2D& middle, const QVector2D& point2,
const QVector2D& tangent1, const QVector2D& tangent2)
:m_point1(point1),
m_point2(point2),
m_middle(middle),
m_tangent1(tangent1),
m_tangent2(tangent2),
m_line1(middle - point1),
m_line2(middle - point2)
{
}
const QVector2D &Biarc::middle() const
{
return m_middle;
}
QVector2D Biarc::tangentAtMiddle() const
{
// Rotate line by PI/2
const QVector2D normalizedLine1 = m_line1.normalized();
const QVector2D perpendicularLine1 = PerpendicularLine(normalizedLine1);
// Tangent at middle is the reflect of tangent at start by perpendicular line start to end.
return ReflectLine(m_tangent1.normalized(), perpendicularLine1);
}
Polyline Biarc::toPolyline() const
{
/* Angle from end to start line with arc tangent at start point is double of
* bulge tangent angle.
*/
// Half angle from tangent1 to line1
const float thetab1 = (LineAngle(m_line1) - LineAngle(m_tangent1)) / 2.0f;
// Half angle from line2 to tangent2
const float thetab2 = (LineAngle(m_tangent2) - LineAngle(m_line2)) / 2.0f;
const Bulge b1(m_point1, m_middle, std::tan(thetab1));
const Bulge b2(m_middle, m_point2, std::tan(thetab2));
return Polyline({b1, b2});
}
}
|
let facade = require('gamecloud')
let {em_Effect_Comm} = facade.const
/**
* 用户每日行为管理类,限制每日可执行次数限制
*/
class action extends facade.Assistants.Action {
constructor(parent) {
super(parent);
}
/**
* 读取指定行为的额外执行次数
* @param $_type
* @return int|mixed
*/
GetExtraNum($_type){
let cur = 0;
if(!!this.v.extNum){
cur = !!this.v.extNum[$_type] ? this.v.extNum[$_type] : 0;
if(this.isInterOperation($_type) == em_Effect_Comm.InterOperation){
cur = this.parent.effect().CalcFinallyValue(em_Effect_Comm.InterOperation, cur);
}
}
return cur;
}
/**
* 获取互动行为的类型
* @param {*}
*/
isInterOperation($_type) {
let ActionExecuteType = this.parent.core.const.ActionExecuteType;
switch($_type){
case ActionExecuteType.slaveAvenge:
case ActionExecuteType.slaveCommend:
case ActionExecuteType.slaveFlattery:
case ActionExecuteType.slaveFood:
case ActionExecuteType.slaveLash:
return em_Effect_Comm.InterOperation;
default:
return em_Effect_Comm.None;
}
}
}
exports = module.exports = action; |
import { DeclarationMetadata } from '../declaration-metadata';
import { ClassMirror } from '../../mirrors';
/**
* class元数据
* @class ClassMetadata
*/
export abstract class ClassMetadata<T = any> extends DeclarationMetadata<T> {
/**
* classMirror
* 元数据所属的ClassMirror
*/
public classMirror: ClassMirror;
}
|
<reponame>piotrb5e3/1023-alternative<filename>app/models/experiment.js
import DS from 'ember-data';
import Ember from 'ember';
export default DS.Model.extend({
name: DS.attr('string'),
lightoffmode: DS.attr('string'),
lightofftimeout: DS.attr('number'),
audiomode: DS.attr('string'),
repeatscount: DS.attr('number'),
traininglength: DS.attr('number'),
instructions: DS.attr('string'),
sessions: DS.hasMany('experiment-session', {async: 'false'}),
sessions_count: Ember.computed('sessions.[]', function () {
return this.get('sessions').toArray().length;
}),
sessions_fin_count: Ember.computed('sessions.@each.status', function () {
return this.get('sessions').filterBy('status', 'F').length;
}),
lightoffmode_str: Ember.computed('lightoffmode', function () {
return {
fixed: 'Fixed',
waiting: 'Waiting'
}[this.get('lightoffmode')];
}),
audiomode_str: Ember.computed('audiomode', function () {
return {
none: 'None',
beep: 'Audible beep on error'
}[this.get('audiomode')];
})
});
|
<reponame>AtarisMio/node-dubbo-sleuth
import { HttpHeaders, TraceId } from 'zipkin';
import { AbstractInjector } from './abstract-injector';
export class ProxyInjector extends AbstractInjector {
public static readonly TRACE_ID_NAME = HttpHeaders.TraceId; // traceIdKey -> 'X-B3-TraceId'
public static readonly SPAN_ID_NAME = HttpHeaders.SpanId; // spanIdKey -> 'X-B3-SpanId'
public static readonly PARENT_SPAN_ID_NAME = HttpHeaders.ParentSpanId; // parentSpanIdKey -> 'X-B3-ParentSpanId'
public static readonly SAMPLED_NAME = HttpHeaders.Sampled; // sampledKey -> 'X-B3-Sampled'
public static readonly FLAGS_NAME = HttpHeaders.Flags; // debugKey -> '<KEY>'
constructor() {
super(
ProxyInjector.TRACE_ID_NAME,
ProxyInjector.SPAN_ID_NAME,
ProxyInjector.PARENT_SPAN_ID_NAME,
ProxyInjector.SAMPLED_NAME,
ProxyInjector.FLAGS_NAME,
);
}
/**
* inject traceId into proxy request header
* @param headers proxy request header
* @param traceId trace id
*/
public inject(headers: Object, traceId: TraceId) {
super.inject(headers, traceId);
}
} |
export { default } from 'ember-facial-recognition/models/mcs-train-group-request';
|
#!/usr/bin/env bash
# Copyright (c) 2015-2019 Michael Klement mklement0@gmail.com (http://same2u.net), released under the MIT license.
###
# Home page: https://github.com/mklement0/fileicon
# Author: Michael Klement <mklement0@gmail.com> (http://same2u.net)
# Invoke with:
# --version for version information
# --help for usage information
###
# --- STANDARD SCRIPT-GLOBAL CONSTANTS
kTHIS_NAME=${BASH_SOURCE##*/}
kTHIS_HOMEPAGE='https://github.com/mklement0/fileicon'
kTHIS_VERSION='v0.2.4' # NOTE: This assignment is automatically updated by `make version VER=<newVer>` - DO keep the 'v' prefix.
unset CDPATH # To prevent unpredictable `cd` behavior.
# --- Begin: STANDARD HELPER FUNCTIONS
die() { echo "$kTHIS_NAME: ERROR: ${1:-"ABORTING due to unexpected error."}" 1>&2; exit ${2:-1}; }
dieSyntax() { echo "$kTHIS_NAME: ARGUMENT ERROR: ${1:-"Invalid argument(s) specified."} Use -h for help." 1>&2; exit 2; }
# SYNOPSIS
# openUrl <url>
# DESCRIPTION
# Opens the specified URL in the system's default browser.
openUrl() {
local url=$1 platform=$(uname) cmd=()
case $platform in
'Darwin') # OSX
cmd=( open "$url" )
;;
'CYGWIN_'*) # Cygwin on Windows; must call cmd.exe with its `start` builtin
cmd=( cmd.exe /c start '' "$url " ) # !! Note the required trailing space.
;;
'MINGW32_'*) # MSYS or Git Bash on Windows; they come with a Unix `start` binary
cmd=( start '' "$url" )
;;
*) # Otherwise, assume a Freedesktop-compliant OS, which includes many Linux distros, PC-BSD, OpenSolaris, ...
cmd=( xdg-open "$url" )
;;
esac
"${cmd[@]}" || { echo "Cannot locate or failed to open default browser; please go to '$url' manually." >&2; return 1; }
}
# Prints the embedded Markdown-formatted man-page source to stdout.
printManPageSource() {
/usr/bin/sed -n -e $'/^: <<\'EOF_MAN_PAGE\'/,/^EOF_MAN_PAGE/ { s///; t\np;}' "$BASH_SOURCE"
}
# Opens the man page, if installed; otherwise, tries to display the embedded Markdown-formatted man-page source; if all else fails: tries to display the man page online.
openManPage() {
local pager embeddedText
if ! man 1 "$kTHIS_NAME" 2>/dev/null; then
# 2nd attempt: if present, display the embedded Markdown-formatted man-page source
embeddedText=$(printManPageSource)
if [[ -n $embeddedText ]]; then
pager='more'
command -v less &>/dev/null && pager='less' # see if the non-standard `less` is available, because it's preferable to the POSIX utility `more`
printf '%s\n' "$embeddedText" | "$pager"
else # 3rd attempt: open the the man page on the utility's website
openUrl "${kTHIS_HOMEPAGE}/doc/${kTHIS_NAME}.md"
fi
fi
}
# Prints the contents of the synopsis chapter of the embedded Markdown-formatted man-page source for quick reference.
printUsage() {
local embeddedText
# Extract usage information from the SYNOPSIS chapter of the embedded Markdown-formatted man-page source.
embeddedText=$(/usr/bin/sed -n -e $'/^: <<\'EOF_MAN_PAGE\'/,/^EOF_MAN_PAGE/!d; /^## SYNOPSIS$/,/^#/{ s///; t\np; }' "$BASH_SOURCE")
if [[ -n $embeddedText ]]; then
# Print extracted synopsis chapter - remove backticks for uncluttered display.
printf '%s\n\n' "$embeddedText" | tr -d '`'
else # No SYNOPIS chapter found; fall back to displaying the man page.
echo "WARNING: usage information not found; opening man page instead." >&2
openManPage
fi
}
# --- End: STANDARD HELPER FUNCTIONS
# --- PROCESS STANDARD, OUTPUT-INFO-THEN-EXIT OPTIONS.
case $1 in
--version)
# Output version number and exit, if requested.
ver="v0.2.4"; echo "$kTHIS_NAME $kTHIS_VERSION"$'\nFor license information and more, visit '"$kTHIS_HOMEPAGE"; exit 0
;;
-h|--help)
# Print usage information and exit.
printUsage; exit
;;
--man)
# Display the manual page and exit.
openManPage; exit
;;
--man-source) # private option, used by `make update-doc`
# Print raw, embedded Markdown-formatted man-page source and exit
printManPageSource; exit
;;
--home)
# Open the home page and exit.
openUrl "$kTHIS_HOMEPAGE"; exit
;;
esac
# --- Begin: SPECIFIC HELPER FUNCTIONS
# NOTE: The functions below operate on byte strings such as the one above:
# A single single string of pairs of hex digits, without separators or line breaks.
# Thus, a given byte position is easily calculated: to get byte $byteIndex, use
# ${byteString:byteIndex*2:2}
# Outputs the specified EXTENDED ATTRIBUTE VALUE as a byte string (a hex dump that is a single-line string of pairs of hex digits, without separators or line breaks, such as "000A2C".
# IMPORTANT: Hex. digits > 9 use UPPPERCASE characters.
# getAttribByteString <file> <attrib_name>
getAttribByteString() {
xattr -px "$2" "$1" | tr -d ' \n'
return ${PIPESTATUS[0]}
}
# Outputs the specified file's RESOURCE FORK as a byte string (a hex dump that is a single-line string of pairs of hex digits, without separators or line breaks, such as "000a2c".
# IMPORTANT: Hex. digits > 9 use *lowercase* characters.
# Note: This function relies on `xxd -p <file>/..namedfork/rsrc | tr -d '\n'` rather than the conceptually equivalent `getAttributeByteString <file> com.apple.ResourceFork`
# for PERFORMANCE reasons: getAttributeByteString() relies on `xattr`, which is a *Python* script and therefore quite slow due to Python's startup cost.
# getAttribByteString <file>
getResourceByteString() {
xxd -p "$1"/..namedfork/rsrc | tr -d '\n'
}
# Patches a single byte in the byte string provided via stdin.
# patchByteInByteString ndx byteSpec
# ndx is the 0-based byte index
# - If <byteSpec> has NO prefix: <byteSpec> becomes the new byte
# - If <byteSpec> has prefix '|': "adds" the value: the result of a bitwise OR with the existing byte becomes the new byte
# - If <byteSpec> has prefix '~': "removes" the value: the result of a applying a bitwise AND with the bitwise complement of <byteSpec> to the existing byte becomes the new byte
patchByteInByteString() {
local ndx=$1 byteSpec=$2 byteVal byteStr charPos op='' charsBefore='' charsAfter='' currByte
byteStr=$(</dev/stdin)
charPos=$(( 2 * ndx ))
# Validat the byte spec.
case ${byteSpec:0:1} in
'|')
op='|'
byteVal=${byteSpec:1}
;;
'~')
op='& ~'
byteVal=${byteSpec:1}
;;
*)
byteVal=$byteSpec
;;
esac
[[ $byteVal == [0-9A-Fa-f][0-9A-Fa-f] ]] || return 1
# Validat the byte index.
(( charPos > 0 && charPos < ${#byteStr} )) || return 1
# Determine the target byte, and strings before and after the byte to patch.
(( charPos >= 2 )) && charsBefore=${byteStr:0:charPos}
charsAfter=${byteStr:charPos + 2}
# Determine the new byte value
if [[ -n $op ]]; then
currByte=${byteStr:charPos:2}
printf -v patchedByte '%02X' "$(( 0x${currByte} $op 0x${byteVal} ))"
else
patchedByte=$byteSpec
fi
printf '%s%s%s' "$charsBefore" "$patchedByte" "$charsAfter"
}
# hasAttrib <fileOrFolder> <attrib_name>
hasAttrib() {
xattr "$1" | /usr/bin/grep -Fqx "$2"
}
# hasIconsResource <file>
hasIconsResource() {
local file=$1
getResourceByteString "$file" | /usr/bin/grep -Fq "$kMAGICBYTES_ICNS_RESOURCE"
}
# setCustomIcon <fileOrFolder> <imgFile>
setCustomIcon() {
local fileOrFolder=$1 imgFile=$2
[[ (-f $fileOrFolder || -d $fileOrFolder) && -r $fileOrFolder && -w $fileOrFolder ]] || return 3
[[ -f $imgFile ]] || return 3
# !!
# !! Sadly, Apple decided to remove the `-i` / `--addicon` option from the `sips` utility.
# !! Therefore, use of *Cocoa* is required, which we do *via Python*, which has the added advantage
# !! of creating a *set* of icons from the source image, scaling as necessary to create a
# !! 512 x 512 top resolution icon (whereas sips -i created a single, 128 x 128 icon).
# !! Thanks, https://apple.stackexchange.com/a/161984/28668
# !!
# !! Note: setIcon_forFile_options_() seemingly always indicates True, even with invalid image files, so
# !! we attempt no error handling in the Python code.
# /usr/bin/python3 - "$imgFile" "$fileOrFolder" <<'EOF' || return
python - "$imgFile" "$fileOrFolder" <<'EOF' || return
import Cocoa
import sys
Cocoa.NSWorkspace.sharedWorkspace().setIcon_forFile_options_(Cocoa.NSImage.alloc().initWithContentsOfFile_(sys.argv[1]), sys.argv[2], 0)
EOF
# Verify that a resource fork with icons was actually created.
# For *files*, the resource fork is embedded in the file itself.
# For *folders* a hidden file named $'Icon\r' is created *inside the folder*.
[[ -d $fileOrFolder ]] && fileWithResourceFork=${fileOrFolder}/$kFILENAME_FOLDERCUSTOMICON || fileWithResourceFork=$fileOrFolder
hasIconsResource "$fileWithResourceFork" || {
cat >&2 <<EOF
Failed to create resource fork with icons. Typically, this means that the specified image file is not supported or corrupt: $imgFile
Supported image formats: jpeg | tiff | png | gif | jp2 | pict | bmp | qtif| psd | sgi | tga
EOF
return 1
}
return 0
}
# getCustomIcon <fileOrFolder> <icnsOutFile>
getCustomIcon() {
local fileOrFolder=$1 icnsOutFile=$2 byteStr fileWithResourceFork byteOffset byteCount
[[ (-f $fileOrFolder || -d $fileOrFolder) && -r $fileOrFolder ]] || return 3
# Determine what file to extract the resource fork from.
if [[ -d $fileOrFolder ]]; then
fileWithResourceFork=${fileOrFolder}/$kFILENAME_FOLDERCUSTOMICON
[[ -f $fileWithResourceFork ]] || { echo "Custom-icon file does not exist: '${fileWithResourceFork/$'\r'/\\r}'" >&2; return 1; }
else
fileWithResourceFork=$fileOrFolder
fi
# Determine (based on format description at https://en.wikipedia.org/wiki/Apple_Icon_Image_format):
# - the byte offset at which the icns resource begins, via the magic literal identifying an icns resource
# - the length of the resource, which is encoded in the 4 bytes right after the magic literal.
read -r byteOffset byteCount < <(getResourceByteString "$fileWithResourceFork" | /usr/bin/awk -F "$kMAGICBYTES_ICNS_RESOURCE" '{ printf "%s %d", (length($1) + 2) / 2, "0x" substr($2, 0, 8) }')
(( byteOffset > 0 && byteCount > 0 )) || { echo "Custom-icon file contains no icons resource: '${fileWithResourceFork/$'\r'/\\r}'" >&2; return 1; }
# Extract the actual bytes using tail and head and save them to the output file.
tail -c "+${byteOffset}" "$fileWithResourceFork/..namedfork/rsrc" | head -c $byteCount > "$icnsOutFile" || return
return 0
}
# removeCustomIcon <fileOrFolder>
removeCustomIcon() {
local fileOrFolder=$1 byteStr
[[ (-f $fileOrFolder || -d $fileOrFolder) && -r $fileOrFolder && -w $fileOrFolder ]] || return 1
# Step 1: Turn off the custom-icon flag in the com.apple.FinderInfo extended attribute.
if hasAttrib "$fileOrFolder" com.apple.FinderInfo; then
byteStr=$(getAttribByteString "$fileOrFolder" com.apple.FinderInfo | patchByteInByteString $kFI_BYTEOFFSET_CUSTOMICON '~'$kFI_VAL_CUSTOMICON) || return
if [[ $byteStr == "$kFI_BYTES_BLANK" ]]; then # All bytes cleared? Remove the entire attribute.
xattr -d com.apple.FinderInfo "$fileOrFolder"
else # Update the attribute.
xattr -wx com.apple.FinderInfo "$byteStr" "$fileOrFolder" || return
fi
fi
# Step 2: Remove the resource fork (if target is a file) / hidden file with custom icon (if target is a folder)
if [[ -d $fileOrFolder ]]; then
rm -f "${fileOrFolder}/${kFILENAME_FOLDERCUSTOMICON}"
else
if hasIconsResource "$fileOrFolder"; then
xattr -d com.apple.ResourceFork "$fileOrFolder"
fi
fi
return 0
}
# testForCustomIcon <fileOrFolder>
testForCustomIcon() {
local fileOrFolder=$1 byteStr byteVal fileWithResourceFork
[[ (-f $fileOrFolder || -d $fileOrFolder) && -r $fileOrFolder ]] || return 3
# Step 1: Check if the com.apple.FinderInfo extended attribute has the custom-icon
# flag set.
byteStr=$(getAttribByteString "$fileOrFolder" com.apple.FinderInfo 2>/dev/null) || return 1
byteVal=${byteStr:2*kFI_BYTEOFFSET_CUSTOMICON:2}
(( byteVal & kFI_VAL_CUSTOMICON )) || return 1
# Step 2: Check if the resource fork of the relevant file contains an icns resource
if [[ -d $fileOrFolder ]]; then
fileWithResourceFork=${fileOrFolder}/${kFILENAME_FOLDERCUSTOMICON}
else
fileWithResourceFork=$fileOrFolder
fi
hasIconsResource "$fileWithResourceFork" || return 1
return 0
}
# --- End: SPECIFIC HELPER FUNCTIONS
# --- Begin: SPECIFIC SCRIPT-GLOBAL CONSTANTS
kFILENAME_FOLDERCUSTOMICON=$'Icon\r'
# The blank hex dump form (single string of pairs of hex digits) of the 32-byte data structure stored in extended attribute
# com.apple.FinderInfo
kFI_BYTES_BLANK='0000000000000000000000000000000000000000000000000000000000000000'
# The hex dump form of the full 32 bytes that Finder assigns to the hidden $'Icon\r'
# file whose com.apple.ResourceFork extended attribute contains the icon image data for the enclosing folder.
# The first 8 bytes spell out the magic literal 'iconMACS'; they are followed by the invisibility flag, '40' in the 9th byte, and '10' (?? specifying what?)
# in the 10th byte.
# NOTE: Since file $'Icon\r' serves no other purpose than to store the icon, it is
# safe to simply assign all 32 bytes blindly, without having to worry about
# preserving existing values.
kFI_BYTES_CUSTOMICONFILEFORFOLDER='69636F6E4D414353401000000000000000000000000000000000000000000000'
# The hex dump form of the magic literal inside a resource fork that marks the
# start of an icns (icons) resource.
# NOTE: This will be used with `xxd -p .. | tr -d '\n'`, which uses *lowercase*
# hex digits, so we must use lowercase here.
kMAGICBYTES_ICNS_RESOURCE='69636e73'
# The byte values (as hex strings) of the flags at the relevant byte position
# of the com.apple.FinderInfo extended attribute.
kFI_VAL_CUSTOMICON='04'
# The custom-icon-flag byte offset in the com.apple.FinderInfo extended attribute.
kFI_BYTEOFFSET_CUSTOMICON=8
# --- End: SPECIFIC SCRIPT-GLOBAL CONSTANTS
# Option defaults.
force=0 quiet=0
# --- Begin: OPTIONS PARSING
allowOptsAfterOperands=1 operands=() i=0 optName= isLong=0 prefix= optArg= haveOptArgAttached=0 haveOptArgAsNextArg=0 acceptOptArg=0 needOptArg=0
while (( $# )); do
if [[ $1 =~ ^(-)[a-zA-Z0-9]+.*$ || $1 =~ ^(--)[a-zA-Z0-9]+.*$ ]]; then # an option: either a short option / multiple short options in compressed form or a long option
prefix=${BASH_REMATCH[1]}; [[ $prefix == '--' ]] && isLong=1 || isLong=0
for (( i = 1; i < (isLong ? 2 : ${#1}); i++ )); do
acceptOptArg=0 needOptArg=0 haveOptArgAttached=0 haveOptArgAsNextArg=0 optArgAttached= optArgOpt= optArgReq=
if (( isLong )); then # long option: parse into name and, if present, argument
optName=${1:2}
[[ $optName =~ ^([^=]+)=(.*)$ ]] && { optName=${BASH_REMATCH[1]}; optArgAttached=${BASH_REMATCH[2]}; haveOptArgAttached=1; }
else # short option: *if* it takes an argument, the rest of the string, if any, is by definition the argument.
optName=${1:i:1}; optArgAttached=${1:i+1}; (( ${#optArgAttached} >= 1 )) && haveOptArgAttached=1
fi
(( haveOptArgAttached )) && optArgOpt=$optArgAttached optArgReq=$optArgAttached || { (( $# > 1 )) && { optArgReq=$2; haveOptArgAsNextArg=1; }; }
# ---- BEGIN: CUSTOMIZE HERE
case $optName in
f|force)
force=1
;;
q|quiet)
quiet=1
;;
*)
dieSyntax "Unknown option: ${prefix}${optName}."
;;
esac
# ---- END: CUSTOMIZE HERE
(( needOptArg )) && { (( ! haveOptArgAttached && ! haveOptArgAsNextArg )) && dieSyntax "Option ${prefix}${optName} is missing its argument." || (( haveOptArgAsNextArg )) && shift; }
(( acceptOptArg || needOptArg )) && break
done
else # an operand
if [[ $1 == '--' ]]; then
shift; operands+=( "$@" ); break
elif (( allowOptsAfterOperands )); then
operands+=( "$1" ) # continue
else
operands=( "$@" )
break
fi
fi
shift
done
(( "${#operands[@]}" > 0 )) && set -- "${operands[@]}"; unset allowOptsAfterOperands operands i optName isLong prefix optArgAttached haveOptArgAttached haveOptArgAsNextArg acceptOptArg needOptArg
# --- End: OPTIONS PARSING: "$@" now contains all operands (non-option arguments).
# Validate the command
cmd=$(printf %s "$1" | tr '[:upper:]' '[:lower:]') # translate to all-lowercase - we don't want the command name to be case-sensitive
[[ $cmd == 'remove' ]] && cmd='rm' # support alias 'remove' for 'rm'
case $cmd in
set|get|rm|remove|test)
shift
;;
*)
dieSyntax "Unrecognized or missing command: '$cmd'."
;;
esac
# Validate file operands
(( $# > 0 )) || dieSyntax "Missing operand(s)."
# Target file or folder.
targetFileOrFolder=$1 imgFile= outFile=
[[ -f $targetFileOrFolder || -d $targetFileOrFolder ]] || die "Target not found or neither file nor folder: '$targetFileOrFolder'"
# Make sure the target file/folder is readable, and, unless only getting or testing for an icon are requested, writeable too.
[[ -r $targetFileOrFolder ]] || die "Cannot access '$targetFileOrFolder': you do not have read permissions."
[[ $cmd == 'test' || $cmd == 'get' || -w $targetFileOrFolder ]] || die "Cannot modify '$targetFileOrFolder': you do not have write permissions."
# Other operands, if any, and their number.
valid=0
case $cmd in
'set')
(( $# <= 2 )) && {
valid=1
# If no image file was specified, the target file is assumed to be an image file itself whose image should be self-assigned as an icon.
(( $# == 2 )) && imgFile=$2 || imgFile=$1
# !! Apparently, a regular file is required - a process subsitution such
# !! as `<(base64 -D <encoded-file.txt)` is NOT supported by NSImage.initWithContentsOfFile()
[[ -f $imgFile && -r $imgFile ]] || die "Image file not found or not a (readable) regular file: $imgFile"
}
;;
'rm'|'test')
(( $# == 1 )) && valid=1
;;
'get')
(( $# == 1 || $# == 2 )) && {
valid=1
outFile=$2
if [[ $outFile == '-' ]]; then
outFile=/dev/stdout
else
# By default, we extract to a file with the same filename root + '.icns'
# in the current folder.
[[ -z $outFile ]] && outFile=${targetFileOrFolder##*/}
# Unless already specified, we append '.icns' to the output filename.
mustReset=$(shopt -q nocasematch; echo $?); shopt -s nocasematch
[[ $outFile =~ \.icns$ ]] || outFile+='.icns'
(( mustReset )) && shopt -u nocasematch
[[ -e $outFile && $force -eq 0 ]] && die "Output file '$outFile' already exists. To force its replacement, use -f."
fi
}
;;
esac
(( valid )) || dieSyntax "Unexpected number of operands."
case $cmd in
'set')
setCustomIcon "$targetFileOrFolder" "$imgFile" || die
(( quiet )) || echo "Custom icon assigned to '$targetFileOrFolder' based on '$imgFile'."
;;
'rm')
removeCustomIcon "$targetFileOrFolder" || die
(( quiet )) || echo "Custom icon removed from '$targetFileOrFolder'."
;;
'get')
getCustomIcon "$targetFileOrFolder" "$outFile" || die
(( quiet )) || { [[ $outFile != '/dev/stdout' ]] && echo "Custom icon extracted to '$outFile'."; }
exit 0
;;
'test')
testForCustomIcon "$targetFileOrFolder"
ec=$?
(( ec <= 1 )) || die
if (( ! quiet )); then
(( ec == 0 )) && echo "HAS custom icon: '$targetFileOrFolder'" || echo "Has NO custom icon: '$targetFileOrFolder'"
fi
exit $ec
;;
*)
die "DESIGN ERROR: unanticipated command: $cmd"
;;
esac
exit 0
####
# MAN PAGE MARKDOWN SOURCE
# - Place a Markdown-formatted version of the man page for this script
# inside the here-document below.
# The document must be formatted to look good in all 3 viewing scenarios:
# - as a man page, after conversion to ROFF with marked-man
# - as plain text (raw Markdown source)
# - as HTML (rendered Markdown)
# Markdown formatting tips:
# - GENERAL
# To support plain-text rendering in the terminal, limit all lines to 80 chars.,
# and, for similar rendering as HTML, *end every line with 2 trailing spaces*.
# - HEADINGS
# - For better plain-text rendering, leave an empty line after a heading
# marked-man will remove it from the ROFF version.
# - The first heading must be a level-1 heading containing the utility
# name and very brief description; append the manual-section number
# directly to the CLI name; e.g.:
# # foo(1) - does bar
# - The 2nd, level-2 heading must be '## SYNOPSIS' and the chapter's body
# must render reasonably as plain text, because it is printed to stdout
# when `-h`, `--help` is specified:
# Use 4-space indentation without markup for both the syntax line and the
# block of brief option descriptions; represent option-arguments and operands
# in angle brackets; e.g., '<foo>'
# - All other headings should be level-2 headings in ALL-CAPS.
# - TEXT
# - Use NO indentation for regular chapter text; if you do, it will
# be indented further than list items.
# - Use 4-space indentation, as usual, for code blocks.
# - Markup character-styling markup translates to ROFF rendering as follows:
# `...` and **...** render as bolded (red) text
# _..._ and *...* render as word-individually underlined text
# - LISTS
# - Indent list items by 2 spaces for better plain-text viewing, but note
# that the ROFF generated by marked-man still renders them unindented.
# - End every list item (bullet point) itself with 2 trailing spaces too so
# that it renders on its own line.
# - Avoid associating more than 1 paragraph with a list item, if possible,
# because it requires the following trick, which hampers plain-text readability:
# Use ' <space><space>' in lieu of an empty line.
####
: <<'EOF_MAN_PAGE'
# fileicon(1) - manage file and folder custom icons
## SYNOPSIS
Manage custom icons for files and folders on macOS.
SET a custom icon for a file or folder:
fileicon set <fileOrFolder> [<imageFile>]
REMOVE a custom icon from a file or folder:
fileicon rm <fileOrFolder>
GET a file or folder's custom icon:
fileicon get [-f] <fileOrFolder> [<iconOutputFile>]
-f ... force replacement of existing output file
TEST if a file or folder has a custom icon:
fileicon test <fileOrFolder>
All forms: option -q silences status output.
Standard options: `--help`, `--man`, `--version`, `--home`
## DESCRIPTION
`<fileOrFolder>` is the file or folder whose custom icon should be managed.
Note that symlinks are followed to their (ultimate target); that is, you
can only assign custom icons to regular files and folders, not to symlinks
to them.
`<imageFile>` can be an image file of any format supported by the system.
It is converted to an icon and assigned to `<fileOrFolder>`.
If you omit `<imageFile>`, `<fileOrFolder>` must itself be an image file whose
image should become its own icon.
`<iconOutputFile>` specifies the file to extract the custom icon to:
Defaults to the filename of `<fileOrFolder>` with extension `.icns` appended.
If a value is specified, extension `.icns` is appended, unless already present.
Either way, extraction fails if the target file already exists; use `-f` to
override.
Specify `-` to extract to stdout.
Command `test` signals with its exit code whether a custom icon is set (0)
or not (1); any other exit code signals an unexpected error.
**Options**:
* `-f`, `--force`
When getting (extracting) a custom icon, forces replacement of the
output file, if it already exists.
* `-q`, `--quiet`
Suppresses output of the status information that is by default output to
stdout.
Note that errors and warnings are still printed to stderr.
## NOTES
Custom icons are stored in extended attributes of the HFS+ filesystem.
Thus, if you copy files or folders to a different filesystem that doesn't
support such attributes, custom icons are lost; for instance, custom icons
cannot be stored in a Git repository.
To determine if a give file or folder has extended attributes, use
`ls -l@ <fileOrFolder>`.
When setting an image as a custom icon, a set of icons with several resolutions
is created, with the highest resolution at 512 x 512 pixels.
All icons created are square, so images with a non-square aspect ratio will
appear distorted; for best results, use square imges.
## STANDARD OPTIONS
All standard options provide information only.
* `-h, --help`
Prints the contents of the synopsis chapter to stdout for quick reference.
* `--man`
Displays this manual page, which is a helpful alternative to using `man`,
if the manual page isn't installed.
* `--version`
Prints version information.
* `--home`
Opens this utility's home page in the system's default web browser.
## LICENSE
For license information and more, visit the home page by running
`fileicon --home`
EOF_MAN_PAGE |
using System;
using System.IO;
using System.Threading;
public class VirtualConsole
{
private TextWriter _output;
private TextWriter _error;
private TextReader _input;
private CancellationToken _cancellationToken;
public VirtualConsole(TextReader input, TextWriter output, TextWriter error, CancellationToken cancellationToken)
{
_input = input;
_output = output;
_error = error;
_cancellationToken = cancellationToken;
}
public VirtualConsole(CancellationToken cancellationToken) : this(Console.In, Console.Out, Console.Error, cancellationToken)
{
}
public void WriteOutput(string message)
{
_output.Write(message);
}
public void WriteError(string message)
{
_error.Write(message);
}
public string ReadInput()
{
return _input.ReadLine();
}
public void RedirectOutputToMemory(MemoryStream memoryStream)
{
_output = new StreamWriter(memoryStream);
}
public void RedirectErrorToMemory(MemoryStream memoryStream)
{
_error = new StreamWriter(memoryStream);
}
public void RedirectInputFromMemory(MemoryStream memoryStream)
{
_input = new StreamReader(memoryStream);
}
} |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Windows.Threading;
public class CustomObservableCollection<T> : ObservableCollection<T>
{
public event EventHandler<T> ItemAdded;
public event EventHandler<T> ItemRemoved;
private Dispatcher _dispatcher;
public CustomObservableCollection()
{
_dispatcher = Dispatcher.CurrentDispatcher;
}
public new void Add(T item)
{
base.Add(item);
DispatchEvent(ItemAdded, item);
}
public new bool Remove(T item)
{
bool result = base.Remove(item);
if (result)
{
DispatchEvent(ItemRemoved, item);
}
return result;
}
private void DispatchEvent(EventHandler<T> handler, T item)
{
if (handler != null)
{
_dispatcher.Invoke(() => handler(this, item));
}
}
} |
#!/bin/bash
# SCRIPT TO DO STUFF
POSITIONAL=()
while [[ $# -gt 0 ]]; do
key="$1"
case $key in
-f|--filename)
FILENAME="$2"
shift # past argument
shift # past value
;;
-t|--text)
TEXT="$2"
shift # past argument
shift # past value
;;
-l|--logofile)
LOGOFILE="$2"
shift # past argument
shift # past value
;;
-g|--logogeo)
LOGOGEO="$2"
shift # past argument
shift # past value
;;
-s|--logoscale)
LOGOSCALE="$2"
shift # past argument
shift # past value
;;
-c|--bgcolor)
BGCOLOR="$2"
shift # past argument
shift # past value
;;
-b|--bottomcolor)
BOTTOMCOLOR="$2"
shift # past argument
shift # past value
;;
-d|--strokecolor)
STROKECOLOR="$2"
shift # past argument
shift # past value
;;
*) # unknown option
POSITIONAL+=("$1") # save it in an array for later
shift # past argument
;;
esac
done
set -- "${POSITIONAL[@]}" # restore positional parameters
DIR=output
echo $LOGOFILE
if [ ! -d $DIR ]; then
mkdir $DIR
fi
convert -size 1000x1500 -define png:color-type=2 canvas:$BGCOLOR back.png && \
convert back.png -size 3000x3000 radial-gradient:black-white -gravity west -geometry -2600-400 -contrast-stretch 0%x0% -compose copy_opacity -composite -fill none -stroke $STROKECOLOR -strokewidth 20 -draw "roundrectangle 20,20 980,1480,24,24" temp.png && \
convert $LOGOFILE'[600x600]' - | composite -gravity center - temp.png "output/$FILENAME.png"
rm -f back.png
rm -f temp.png
|
<reponame>Xi-Plus/OJ-Code
#include <algorithm>
#include <iostream>
const int K=10000000;
bool nisp[K]={false};
int p[664580]={0};
using namespace std;
int main(){
nisp[0] = nisp[1] = true;
int i = 2, j, k = 1;
while (i < K){
if (!nisp[i]){
p[k++] = i;
j = i * 2;
while (j < K){
nisp[j] = true;
j += i;
}
}
i++;
}
int a, b, m; char c;
while (cin >> a >> c >> b){
a=lower_bound(p,p+664580,a)-p;
b=lower_bound(p,p+664580,b)-p;
switch (c){
case '+':
cout << p[a + b] << endl;
break;
case '-':
cout << p[a - b] << endl;
break;
case '*':
cout << p[a*b] << endl;
break;
case '/':
cout << p[a / b] << endl;
break;
}
}
return 0;
} |
<reponame>carlos-sancho-ramirez/android-java-langbook
package sword.langbook3.android;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.os.Parcel;
import android.os.Parcelable;
import android.text.Editable;
import android.text.TextUtils;
import android.text.TextWatcher;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.EditText;
import android.widget.LinearLayout;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import sword.collections.ImmutableHashSet;
import sword.collections.ImmutableMap;
import sword.collections.ImmutableSet;
import sword.collections.List;
import sword.collections.MutableHashSet;
import sword.collections.MutableList;
import sword.collections.MutableSet;
import sword.langbook3.android.db.AcceptationId;
import sword.langbook3.android.db.AcceptationIdBundler;
import sword.langbook3.android.db.AgentId;
import sword.langbook3.android.db.AgentIdBundler;
import sword.langbook3.android.db.AlphabetId;
import sword.langbook3.android.db.BunchId;
import sword.langbook3.android.db.BunchIdBundler;
import sword.langbook3.android.db.BunchIdParceler;
import sword.langbook3.android.db.ConceptId;
import sword.langbook3.android.db.Correlation;
import sword.langbook3.android.db.CorrelationArrayParceler;
import sword.langbook3.android.db.CorrelationEntryListParceler;
import sword.langbook3.android.db.CorrelationId;
import sword.langbook3.android.db.ImmutableCorrelation;
import sword.langbook3.android.db.ImmutableCorrelationArray;
import sword.langbook3.android.db.LangbookDbChecker;
import sword.langbook3.android.db.LangbookDbManager;
import sword.langbook3.android.db.ParcelableCorrelationArray;
import sword.langbook3.android.db.RuleId;
import sword.langbook3.android.db.RuleIdManager;
import sword.langbook3.android.db.RuleIdParceler;
import sword.langbook3.android.models.AgentDetails;
import static sword.langbook3.android.db.BunchIdManager.conceptAsBunchId;
public final class AgentEditorActivity extends Activity implements View.OnClickListener {
private static final int REQUEST_CODE_DEFINE_START_ADDER = 1;
private static final int REQUEST_CODE_DEFINE_END_ADDER = 2;
private static final int REQUEST_CODE_PICK_TARGET_BUNCH = 3;
private static final int REQUEST_CODE_PICK_SOURCE_BUNCH = 4;
private static final int REQUEST_CODE_PICK_DIFF_BUNCH = 5;
private static final int REQUEST_CODE_PICK_RULE = 6;
private interface ArgKeys {
String AGENT = BundleKeys.AGENT;
String TARGET_BUNCH = BundleKeys.TARGET_BUNCH;
String SOURCE_BUNCH = BundleKeys.SOURCE_BUNCH;
String DIFF_BUNCH = BundleKeys.DIFF_BUNCH;
}
private interface SavedKeys {
String STATE = "st";
}
public static void open(Context context) {
final Intent intent = new Intent(context, AgentEditorActivity.class);
context.startActivity(intent);
}
public static void openWithTarget(Context context, BunchId targetBunch) {
final Intent intent = new Intent(context, AgentEditorActivity.class);
BunchIdBundler.writeAsIntentExtra(intent, ArgKeys.TARGET_BUNCH, targetBunch);
context.startActivity(intent);
}
public static void openWithSource(Context context, BunchId sourceBunch) {
final Intent intent = new Intent(context, AgentEditorActivity.class);
BunchIdBundler.writeAsIntentExtra(intent, ArgKeys.SOURCE_BUNCH, sourceBunch);
context.startActivity(intent);
}
public static void openWithDiff(Context context, BunchId diffBunch) {
final Intent intent = new Intent(context, AgentEditorActivity.class);
BunchIdBundler.writeAsIntentExtra(intent, ArgKeys.DIFF_BUNCH, diffBunch);
context.startActivity(intent);
}
public static void open(Context context, AgentId agentId) {
final Intent intent = new Intent(context, AgentEditorActivity.class);
AgentIdBundler.writeAsIntentExtra(intent, ArgKeys.AGENT, agentId);
context.startActivity(intent);
}
public static final class State implements Parcelable {
MutableList<BunchId> targetBunches = MutableList.empty();
MutableList<BunchId> sourceBunches = MutableList.empty();
MutableList<BunchId> diffBunches = MutableList.empty();
MutableList<Correlation.Entry<AlphabetId>> startMatcher = MutableList.empty();
ImmutableCorrelationArray<AlphabetId> startAdder = ImmutableCorrelationArray.empty();
MutableList<Correlation.Entry<AlphabetId>> endMatcher = MutableList.empty();
ImmutableCorrelationArray<AlphabetId> endAdder = ImmutableCorrelationArray.empty();
RuleId rule;
State() {
}
private State(Parcel in) {
final int targetBunchesCount = in.readInt();
for (int i = 0; i < targetBunchesCount; i++) {
targetBunches.append(BunchIdParceler.read(in));
}
final int sourceBunchesCount = in.readInt();
for (int i = 0; i < sourceBunchesCount; i++) {
sourceBunches.append(BunchIdParceler.read(in));
}
final int diffBunchesCount = in.readInt();
for (int i = 0; i < diffBunchesCount; i++) {
diffBunches.append(BunchIdParceler.read(in));
}
CorrelationEntryListParceler.readInto(in, startMatcher);
startAdder = CorrelationArrayParceler.read(in);
CorrelationEntryListParceler.readInto(in, endMatcher);
endAdder = CorrelationArrayParceler.read(in);
rule = RuleIdParceler.read(in);
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeInt(targetBunches.size());
for (BunchId value : targetBunches) {
BunchIdParceler.write(dest, value);
}
dest.writeInt(sourceBunches.size());
for (BunchId value : sourceBunches) {
BunchIdParceler.write(dest, value);
}
dest.writeInt(diffBunches.size());
for (BunchId value : diffBunches) {
BunchIdParceler.write(dest, value);
}
CorrelationEntryListParceler.write(dest, startMatcher);
CorrelationArrayParceler.write(dest, startAdder);
CorrelationEntryListParceler.write(dest, endMatcher);
CorrelationArrayParceler.write(dest, endAdder);
RuleIdParceler.write(dest, rule);
}
@Override
public int describeContents() {
return 0;
}
public static final Creator<State> CREATOR = new Creator<State>() {
@Override
public State createFromParcel(Parcel in) {
return new State(in);
}
@Override
public State[] newArray(int size) {
return new State[size];
}
};
}
private State _state;
private AlphabetId _preferredAlphabet;
private ImmutableMap<AlphabetId, String> _alphabets;
private boolean _enabledFlagAndRuleFields;
private LinearLayout _targetBunchesContainer;
private LinearLayout _sourceBunchesContainer;
private LinearLayout _diffBunchesContainer;
private LinearLayout _startMatchersContainer;
private LinearLayout _startAdderEntry;
private LinearLayout _endMatchersContainer;
private LinearLayout _endAdderEntry;
private AgentId getAgentId() {
return AgentIdBundler.readAsIntentExtra(getIntent(), ArgKeys.AGENT);
}
private BunchId getSourceBunch() {
return BunchIdBundler.readAsIntentExtra(getIntent(), ArgKeys.SOURCE_BUNCH);
}
private BunchId getDiffBunch() {
return BunchIdBundler.readAsIntentExtra(getIntent(), ArgKeys.DIFF_BUNCH);
}
private BunchId getTargetBunch() {
return BunchIdBundler.readAsIntentExtra(getIntent(), ArgKeys.TARGET_BUNCH);
}
private void updateBunchSet(LangbookDbChecker checker, ViewGroup container, MutableList<BunchId> bunches) {
final int currentBunchViewCount = container.getChildCount();
final int stateBunchCount = bunches.size();
for (int i = currentBunchViewCount - 1; i >= stateBunchCount; i--) {
container.removeViewAt(i);
}
for (int i = 0; i < stateBunchCount; i++) {
final BunchId bunch = bunches.valueAt(i);
if (i < currentBunchViewCount) {
bindBunch(container.getChildAt(i), checker, bunch, container, bunches);
}
else {
addBunch(checker, bunch, container, bunches);
}
}
}
private void updateCorrelation(ViewGroup container, MutableList<Correlation.Entry<AlphabetId>> correlation) {
final int currentEntryViewCount = container.getChildCount();
final int stateEntryCount = correlation.size();
for (int i = currentEntryViewCount - 1; i >= stateEntryCount; i--) {
container.removeViewAt(i);
}
for (int i = 0; i < stateEntryCount; i++) {
final Correlation.Entry<AlphabetId> entry = correlation.get(i);
if (i < currentEntryViewCount) {
bindEntry(container.getChildAt(i), entry, container, correlation);
}
else {
addEntry(entry, container, correlation);
}
}
}
private void setStateValues() {
final LangbookDbChecker checker = DbManager.getInstance().getManager();
updateBunchSet(checker, _targetBunchesContainer, _state.targetBunches);
updateBunchSet(checker, _sourceBunchesContainer, _state.sourceBunches);
updateBunchSet(checker, _diffBunchesContainer, _state.diffBunches);
updateCorrelation(_startMatchersContainer, _state.startMatcher);
bindAdder(_startAdderEntry, _state.startAdder);
updateCorrelation(_endMatchersContainer, _state.endMatcher);
bindAdder(_endAdderEntry, _state.endAdder);
if (_state.startMatcher.anyMatch(entry -> !TextUtils.isEmpty(entry.text)) || !_state.startAdder.isEmpty() ||
_state.endMatcher.anyMatch(entry -> !TextUtils.isEmpty(entry.text)) || !_state.endAdder.isEmpty()) {
enableFlagAndRuleFields();
}
final TextView textView = findViewById(R.id.ruleText);
textView.setText((_state.rule != null)? checker.readConceptText(_state.rule.getConceptId(), _preferredAlphabet) : null);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.agent_editor_activity);
_preferredAlphabet = LangbookPreferences.getInstance().getPreferredAlphabet();
final LangbookDbChecker checker = DbManager.getInstance().getManager();
_alphabets = checker.readAllAlphabets(_preferredAlphabet);
if (savedInstanceState != null) {
_state = savedInstanceState.getParcelable(SavedKeys.STATE);
}
else {
_state = new State();
final AgentId agentId = getAgentId();
final BunchId sourceBunch = getSourceBunch();
final BunchId diffBunch = getDiffBunch();
final BunchId targetBunch = getTargetBunch();
if (agentId != null) {
final AgentDetails<AlphabetId, CorrelationId, BunchId, RuleId> agentDetails = checker.getAgentDetails(agentId);
_state.targetBunches = agentDetails.targetBunches.toList().mutate();
_state.sourceBunches = agentDetails.sourceBunches.toList().mutate();
_state.diffBunches = agentDetails.diffBunches.toList().mutate();
_state.startMatcher = agentDetails.startMatcher.toCorrelationEntryList();
_state.startAdder = agentDetails.startAdder;
_state.endMatcher = agentDetails.endMatcher.toCorrelationEntryList();
_state.endAdder = agentDetails.endAdder;
_state.rule = agentDetails.rule;
}
else if (targetBunch != null) {
_state.targetBunches.append(targetBunch);
}
else if (sourceBunch != null) {
_state.sourceBunches.append(sourceBunch);
}
else if (diffBunch != null) {
_state.diffBunches.append(diffBunch);
}
}
findViewById(R.id.addTargetBunchButton).setOnClickListener(this);
findViewById(R.id.addSourceBunchButton).setOnClickListener(this);
findViewById(R.id.addDiffBunchButton).setOnClickListener(this);
_targetBunchesContainer = findViewById(R.id.targetBunchesContainer);
_sourceBunchesContainer = findViewById(R.id.sourceBunchesContainer);
_diffBunchesContainer = findViewById(R.id.diffBunchesContainer);
_startMatchersContainer = findViewById(R.id.startMatchersContainer);
_startAdderEntry = findViewById(R.id.startAdderEntry);
findViewById(R.id.startAdderRemoveButton).setOnClickListener(this);
_endMatchersContainer = findViewById(R.id.endMatchersContainer);
_endAdderEntry = findViewById(R.id.endAdderEntry);
findViewById(R.id.endAdderRemoveButton).setOnClickListener(this);
findViewById(R.id.addStartMatcherButton).setOnClickListener(this);
findViewById(R.id.addStartAdderButton).setOnClickListener(this);
findViewById(R.id.addEndMatcherButton).setOnClickListener(this);
findViewById(R.id.addEndAdderButton).setOnClickListener(this);
findViewById(R.id.ruleChangeButton).setOnClickListener(this);
findViewById(R.id.saveButton).setOnClickListener(this);
setStateValues();
}
private void addEntry(Correlation.Entry<AlphabetId> entry, ViewGroup container, MutableList<Correlation.Entry<AlphabetId>> entries) {
getLayoutInflater().inflate(R.layout.agent_editor_correlation_entry, container, true);
final View view = container.getChildAt(container.getChildCount() - 1);
bindEntry(view, entry, container, entries);
}
private void bindEntry(View view, Correlation.Entry<AlphabetId> entry, ViewGroup container, MutableList<Correlation.Entry<AlphabetId>> entries) {
final Spinner alphabetSpinner = view.findViewById(R.id.alphabet);
alphabetSpinner.setAdapter(new AlphabetAdapter());
final int position = _alphabets.keySet().indexOf(entry.alphabet);
if (position >= 0) {
alphabetSpinner.setSelection(position);
}
alphabetSpinner.setOnItemSelectedListener(new AlphabetSelectedListener(entry));
final EditText textField = view.findViewById(R.id.text);
textField.setText(entry.text);
textField.addTextChangedListener(new CorrelationTextWatcher(entry));
view.findViewById(R.id.removeButton).setOnClickListener(v -> removeEntry(entry, container, entries));
}
private static void removeEntry(Correlation.Entry<AlphabetId> entry, ViewGroup container, MutableList<Correlation.Entry<AlphabetId>> entries) {
final int position = entries.indexOf(entry);
if (position < 0) {
throw new AssertionError();
}
container.removeViewAt(position);
entries.removeAt(position);
}
private final class AlphabetSelectedListener implements AdapterView.OnItemSelectedListener {
private final Correlation.Entry<AlphabetId> _entry;
AlphabetSelectedListener(Correlation.Entry<AlphabetId> entry) {
if (entry == null) {
throw new IllegalArgumentException();
}
_entry = entry;
}
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
_entry.alphabet = _alphabets.keyAt(position);
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
// Nothing to be done
}
}
private final class CorrelationTextWatcher implements TextWatcher {
private final Correlation.Entry<AlphabetId> _entry;
CorrelationTextWatcher(Correlation.Entry<AlphabetId> entry) {
_entry = entry;
}
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
// Nothing to be done
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
// Nothing to be done
}
@Override
public void afterTextChanged(Editable s) {
_entry.text = s.toString();
enableFlagAndRuleFields();
}
}
private void enableFlagAndRuleFields() {
if (!_enabledFlagAndRuleFields) {
findViewById(R.id.rulePickerPanel).setVisibility(View.VISIBLE);
_enabledFlagAndRuleFields = true;
}
}
private final class AlphabetAdapter extends BaseAdapter {
private LayoutInflater _inflater;
@Override
public int getCount() {
return _alphabets.size();
}
@Override
public AlphabetId getItem(int position) {
return _alphabets.keyAt(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
final View view;
if (convertView == null) {
if (_inflater == null) {
_inflater = LayoutInflater.from(parent.getContext());
}
view = _inflater.inflate(R.layout.quiz_type_item, parent, false);
}
else {
view = convertView;
}
final TextView textView = view.findViewById(R.id.itemTextView);
textView.setText(_alphabets.valueAt(position));
return view;
}
}
private void addBunch(LangbookDbChecker checker, BunchId bunch, ViewGroup container, MutableList<BunchId> bunches) {
getLayoutInflater().inflate(R.layout.agent_editor_bunch_entry, container, true);
final View view = container.getChildAt(container.getChildCount() - 1);
bindBunch(view, checker, bunch, container, bunches);
}
private void bindBunch(View view, LangbookDbChecker checker, BunchId bunch, ViewGroup container, MutableList<BunchId> bunches) {
final TextView textView = view.findViewById(R.id.textView);
textView.setText(checker.readConceptText(bunch.getConceptId(), _preferredAlphabet));
view.findViewById(R.id.removeButton).setOnClickListener(v -> removeBunch(container, bunches, bunch));
}
private void bindAdder(View view, ImmutableCorrelationArray<AlphabetId> correlationArray) {
if (correlationArray.isEmpty()) {
view.setVisibility(View.GONE);
}
else {
final String text = CorrelationPickerAdapter.toPlainText(correlationArray, ImmutableHashSet.empty());
view.<TextView>findViewById(R.id.textView).setText(text);
view.setVisibility(View.VISIBLE);
}
}
private static void removeBunch(ViewGroup container, MutableList<BunchId> bunches, BunchId bunch) {
final int index = bunches.indexOf(bunch);
if (index < 0) {
throw new AssertionError();
}
container.removeViewAt(index);
bunches.removeAt(index);
}
private static ImmutableCorrelation<AlphabetId> buildCorrelation(List<Correlation.Entry<AlphabetId>> entries) {
final ImmutableCorrelation.Builder<AlphabetId> builder = new ImmutableCorrelation.Builder<>();
for (Correlation.Entry<AlphabetId> corrEntry : entries) {
builder.put(corrEntry.alphabet, corrEntry.text);
}
return builder.build();
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.addTargetBunchButton:
AcceptationPickerActivity.open(this, REQUEST_CODE_PICK_TARGET_BUNCH);
break;
case R.id.addSourceBunchButton:
AcceptationPickerActivity.open(this, REQUEST_CODE_PICK_SOURCE_BUNCH);
break;
case R.id.addDiffBunchButton:
AcceptationPickerActivity.open(this, REQUEST_CODE_PICK_DIFF_BUNCH);
break;
case R.id.addStartMatcherButton:
Correlation.Entry<AlphabetId> entry = new Correlation.Entry<>(_alphabets.keyAt(0), null);
_state.startMatcher.append(entry);
addEntry(entry, _startMatchersContainer, _state.startMatcher);
break;
case R.id.addStartAdderButton:
LanguagePickerActivity.open(this, REQUEST_CODE_DEFINE_START_ADDER);
break;
case R.id.addEndMatcherButton:
entry = new Correlation.Entry<>(_alphabets.keyAt(0), null);
_state.endMatcher.append(entry);
addEntry(entry, _endMatchersContainer, _state.endMatcher);
break;
case R.id.addEndAdderButton:
LanguagePickerActivity.open(this, REQUEST_CODE_DEFINE_END_ADDER);
break;
case R.id.ruleChangeButton:
AcceptationPickerActivity.open(this, REQUEST_CODE_PICK_RULE);
break;
case R.id.startAdderRemoveButton:
_state.startAdder = ImmutableCorrelationArray.empty();
bindAdder(_startAdderEntry, _state.startAdder);
break;
case R.id.endAdderRemoveButton:
_state.endAdder = ImmutableCorrelationArray.empty();
bindAdder(_endAdderEntry, ImmutableCorrelationArray.empty());
break;
case R.id.saveButton:
final String errorMessage = getErrorMessage();
if (errorMessage != null) {
Toast.makeText(this, errorMessage, Toast.LENGTH_SHORT).show();
}
else {
final ImmutableCorrelation<AlphabetId> startMatcher = buildCorrelation(_state.startMatcher);
final ImmutableCorrelationArray<AlphabetId> startAdder = _state.startAdder;
final ImmutableCorrelation<AlphabetId> endMatcher = buildCorrelation(_state.endMatcher);
final ImmutableCorrelationArray<AlphabetId> endAdder = _state.endAdder;
final RuleId rule = (startMatcher.equals(startAdder.concatenateTexts()) && endMatcher.equals(endAdder.concatenateTexts()))? null : _state.rule;
final AgentId givenAgentId = getAgentId();
final LangbookDbManager manager = DbManager.getInstance().getManager();
final ImmutableSet<BunchId> targetBunches = _state.targetBunches.toImmutable().toSet();
final ImmutableSet<BunchId> sourceBunches = _state.sourceBunches.toImmutable().toSet();
final ImmutableSet<BunchId> diffBunches = _state.diffBunches.toImmutable().toSet();
if (givenAgentId == null) {
final AgentId agentId = manager.addAgent(targetBunches, sourceBunches, diffBunches,
startMatcher, startAdder, endMatcher, endAdder, rule);
final int message = (agentId != null) ? R.string.newAgentFeedback : R.string.newAgentError;
Toast.makeText(this, message, Toast.LENGTH_SHORT).show();
if (agentId != null) {
finish();
}
}
else {
final boolean success = manager.updateAgent(givenAgentId, targetBunches, sourceBunches, diffBunches,
startMatcher, startAdder, endMatcher, endAdder, rule);
final int message = success? R.string.updateAgentFeedback : R.string.updateAgentError;
Toast.makeText(this, message, Toast.LENGTH_SHORT).show();
if (success) {
finish();
}
}
}
break;
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
final LangbookDbManager manager = DbManager.getInstance().getManager();
if (requestCode == REQUEST_CODE_DEFINE_START_ADDER && resultCode == RESULT_OK) {
final ParcelableCorrelationArray parcelableCorrelationArray = data.getParcelableExtra(LanguagePickerActivity.ResultKeys.CORRELATION_ARRAY);
_state.startAdder = parcelableCorrelationArray.get();
bindAdder(_startAdderEntry, _state.startAdder);
}
else if (requestCode == REQUEST_CODE_DEFINE_END_ADDER && resultCode == RESULT_OK) {
final ParcelableCorrelationArray parcelableCorrelationArray = data.getParcelableExtra(LanguagePickerActivity.ResultKeys.CORRELATION_ARRAY);
_state.endAdder = parcelableCorrelationArray.get();
bindAdder(_endAdderEntry, _state.endAdder);
}
else if (requestCode == REQUEST_CODE_PICK_TARGET_BUNCH && resultCode == RESULT_OK) {
final AcceptationId acceptation = AcceptationIdBundler.readAsIntentExtra(data, AcceptationPickerActivity.ResultKeys.STATIC_ACCEPTATION);
if (acceptation == null) {
throw new AssertionError();
}
final BunchId bunch = conceptAsBunchId(manager.conceptFromAcceptation(acceptation));
_state.targetBunches.append(bunch);
addBunch(DbManager.getInstance().getManager(), bunch, _targetBunchesContainer, _state.targetBunches);
}
else if (requestCode == REQUEST_CODE_PICK_SOURCE_BUNCH && resultCode == RESULT_OK) {
final AcceptationId acceptation = AcceptationIdBundler.readAsIntentExtra(data, AcceptationPickerActivity.ResultKeys.STATIC_ACCEPTATION);
if (acceptation == null) {
throw new AssertionError();
}
final BunchId concept = conceptAsBunchId(manager.conceptFromAcceptation(acceptation));
_state.sourceBunches.append(concept);
addBunch(DbManager.getInstance().getManager(), concept, _sourceBunchesContainer, _state.sourceBunches);
}
else if (requestCode == REQUEST_CODE_PICK_DIFF_BUNCH && resultCode == RESULT_OK) {
final AcceptationId acceptation = AcceptationIdBundler.readAsIntentExtra(data, AcceptationPickerActivity.ResultKeys.STATIC_ACCEPTATION);
if (acceptation == null) {
throw new AssertionError();
}
final BunchId bunch = conceptAsBunchId(manager.conceptFromAcceptation(acceptation));
_state.diffBunches.append(bunch);
addBunch(DbManager.getInstance().getManager(), bunch, _diffBunchesContainer, _state.diffBunches);
}
else if (requestCode == REQUEST_CODE_PICK_RULE && resultCode == RESULT_OK) {
final AcceptationId acceptation = AcceptationIdBundler.readAsIntentExtra(data, AcceptationPickerActivity.ResultKeys.STATIC_ACCEPTATION);
if (acceptation == null) {
throw new AssertionError();
}
final ConceptId concept = manager.conceptFromAcceptation(acceptation);
_state.rule = RuleIdManager.conceptAsRuleId(concept);
final String text = manager.readConceptText(concept, _preferredAlphabet);
final TextView textView = findViewById(R.id.ruleText);
textView.setText(text);
}
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putParcelable(SavedKeys.STATE, _state);
}
private String getErrorMessage() {
final ImmutableSet<BunchId> targets = _state.targetBunches.toSet().toImmutable();
final ImmutableSet<BunchId> sources = _state.sourceBunches.toSet().toImmutable();
final ImmutableSet<BunchId> diffs = _state.diffBunches.toSet().toImmutable();
if (targets.anyMatch(bunch -> bunch.isNoBunchForQuiz() || sources.contains(bunch) || diffs.contains(bunch))) {
return "Invalid target bunch selection";
}
if (sources.anyMatch(bunch -> bunch.isNoBunchForQuiz() || targets.contains(bunch) || diffs.contains(bunch))) {
return "Invalid target bunch selection";
}
if (diffs.anyMatch(bunch -> bunch.isNoBunchForQuiz() || targets.contains(bunch) || sources.contains(bunch))) {
return "Invalid bunch selection";
}
final MutableSet<AlphabetId> alphabets = MutableHashSet.empty();
final ImmutableCorrelation.Builder<AlphabetId> startMatcherBuilder = new ImmutableCorrelation.Builder<>();
for (Correlation.Entry<AlphabetId> entry : _state.startMatcher) {
if (alphabets.contains(entry.alphabet)) {
return "Unable to duplicate alphabet in start matcher";
}
alphabets.add(entry.alphabet);
startMatcherBuilder.put(entry.alphabet, entry.text);
if (TextUtils.isEmpty(entry.text)) {
return "Start matcher entries cannot be empty";
}
}
final ImmutableCorrelation<AlphabetId> startMatcher = startMatcherBuilder.build();
alphabets.clear();
final ImmutableCorrelation.Builder<AlphabetId> endMatcherBuilder = new ImmutableCorrelation.Builder<>();
for (Correlation.Entry<AlphabetId> entry : _state.endMatcher) {
if (alphabets.contains(entry.alphabet)) {
return "Unable to duplicate alphabet in end matcher";
}
alphabets.add(entry.alphabet);
endMatcherBuilder.put(entry.alphabet, entry.text);
if (TextUtils.isEmpty(entry.text)) {
return "End matcher entries cannot be empty";
}
}
final ImmutableCorrelation<AlphabetId> endMatcher = endMatcherBuilder.build();
if (sources.isEmpty() && _state.startMatcher.isEmpty() && _state.endMatcher.isEmpty()) {
// This would select all acceptations from the database, which has no sense
return "Source bunches and matchers cannot be both empty";
}
final boolean ruleRequired = !startMatcher.equals(_state.startAdder.concatenateTexts()) || !endMatcher.equals(_state.endAdder.concatenateTexts());
if (ruleRequired && _state.rule == null) {
return "Rule is required when matcher and adder do not match";
}
return null;
}
}
|
func reverse<T>(input: T) {
var reversedArray = input.split(separator: " ").reversed()
var reversedString = reversedArray.joined(separator: " ")
print(reversedString)
} |
export PATH=$PATH:../../bin/
package() {
export chaincodeId=diagnose
export chaincodePath=github.com/davidkhala/chaincode/golang/diagnose
export FABRIC_CFG_PATH=$PWD
export CORE_PEER_LOCALMSPID=astriMSP
export CORE_PEER_MSPCONFIGPATH=$HOME/Documents/delphi-fabric/config/ca-crypto-config/peerOrganizations/astri.org/peers/peer2.astri.org/msp
../peer.sh package
}
$1
|
<filename>src/main/java/org/terracottamc/network/raknet/protocol/ProtocolHandler.java
package org.terracottamc.network.raknet.protocol;
import io.netty.channel.Channel;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.SimpleChannelInboundHandler;
import org.terracottamc.entity.player.Player;
import org.terracottamc.network.packet.LoginPacket;
import org.terracottamc.network.packet.Packet;
import org.terracottamc.network.packet.handler.IPacketHandler;
import org.terracottamc.network.packet.handler.LoginPacketHandler;
import org.terracottamc.server.Server;
import java.net.InetSocketAddress;
/**
* Copyright (c) 2021, TerracottaMC
* All rights reserved.
*
* <p>
* This project is licensed under the BSD 3-Clause License which
* can be found in the root directory of this source tree
*
* @author Kaooot
* @version 1.0
*/
public class ProtocolHandler extends SimpleChannelInboundHandler<Packet> {
public static final String NAME = "protocol-handler";
private final Server server;
private static ChannelPipeline channelPipeline;
public ProtocolHandler() {
this.server = Server.getInstance();
}
@Override
public void channelRegistered(final ChannelHandlerContext ctx) throws Exception {
super.channelRegistered(ctx);
ProtocolHandler.channelPipeline = ctx.pipeline();
}
@Override
protected void channelRead0(final ChannelHandlerContext ctx, final Packet packet) {
final Channel channel = ctx.channel();
if (packet.getClass().equals(LoginPacket.class)) {
final LoginPacket loginPacket = (LoginPacket) packet;
final LoginPacketHandler loginPacketHandler = (LoginPacketHandler)
this.server.getPacketRegistry().retrievePacketHandlerByClass(LoginPacket.class);
loginPacketHandler.handleLogin(loginPacket, channel);
return;
}
final IPacketHandler<Packet> packetHandler = (IPacketHandler<Packet>) this.server.getPacketRegistry()
.retrievePacketHandlerByClass(packet.getClass());
if (packetHandler == null) {
return;
}
final Player player = this.server.getPlayerByAddress((InetSocketAddress) channel.remoteAddress());
if (player != null) {
packetHandler.handle(packet, player);
}
}
/**
* Obtains the {@link io.netty.channel.ChannelPipeline}
* of this {@link org.terracottamc.network.raknet.protocol.ProtocolHandler}
*
* @return a fresh {@link io.netty.channel.ChannelPipeline}
*/
public static ChannelPipeline getChannelPipeline() {
return ProtocolHandler.channelPipeline;
}
} |
import random
password_characters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%^&*().,?0123456789'
def generate_password(length):
return ''.join(random.choices(password_characters, k=length))
print(generate_password(10)) |
import type { ISanivaliDef } from '../types';
export declare type MinItemsParam = number;
export declare type MinItemsRuleItem = ['minItems', MinItemsParam];
export declare const minItemsDef: ISanivaliDef;
//# sourceMappingURL=minItems.d.ts.map |
#!/bin/sh
function runas_nginx() {
su - nginx -s /bin/sh -c "$1"
}
TZ=${TZ:-UTC}
MEMORY_LIMIT=${MEMORY_LIMIT:-256M}
UPLOAD_MAX_SIZE=${UPLOAD_MAX_SIZE:-16M}
OPCACHE_MEM_SIZE=${OPCACHE_MEM_SIZE:-128}
REAL_IP_FROM=${REAL_IP_FROM:-0.0.0.0/32}
REAL_IP_HEADER=${REAL_IP_HEADER:-X-Forwarded-For}
LOG_IP_VAR=${LOG_IP_VAR:-remote_addr}
LOG_LEVEL=${LOG_LEVEL:-WARN}
SIDECAR_CRON=${SIDECAR_CRON:-0}
SSMTP_PORT=${SSMTP_PORT:-25}
SSMTP_HOSTNAME=${SSMTP_HOSTNAME:-$(hostname -f)}
SSMTP_TLS=${SSMTP_TLS:-NO}
# Timezone
echo "Setting timezone to ${TZ}..."
ln -snf /usr/share/zoneinfo/${TZ} /etc/localtime
echo ${TZ} > /etc/timezone
# PHP
echo "Setting PHP-FPM configuration..."
sed -e "s/@MEMORY_LIMIT@/$MEMORY_LIMIT/g" \
-e "s/@UPLOAD_MAX_SIZE@/$UPLOAD_MAX_SIZE/g" \
/tpls/etc/php7/php-fpm.d/www.conf > /etc/php7/php-fpm.d/www.conf
# OpCache
echo "Setting OpCache configuration..."
sed -e "s/@OPCACHE_MEM_SIZE@/$OPCACHE_MEM_SIZE/g" \
/tpls/etc/php7/conf.d/opcache.ini > /etc/php7/conf.d/opcache.ini
# Nginx
echo "Setting Nginx configuration..."
sed -e "s#@UPLOAD_MAX_SIZE@#$UPLOAD_MAX_SIZE#g" \
-e "s#@REAL_IP_FROM@#$REAL_IP_FROM#g" \
-e "s#@REAL_IP_HEADER@#$REAL_IP_HEADER#g" \
-e "s#@LOG_IP_VAR@#$LOG_IP_VAR#g" \
/tpls/etc/nginx/nginx.conf > /etc/nginx/nginx.conf
# SSMTP
echo "Setting SSMTP configuration..."
if [ -z "$SSMTP_HOST" ] ; then
echo "WARNING: SSMTP_HOST must be defined if you want to send emails"
else
cat > /etc/ssmtp/ssmtp.conf <<EOL
mailhub=${SSMTP_HOST}:${SSMTP_PORT}
hostname=${SSMTP_HOSTNAME}
FromLineOverride=YES
AuthUser=${SSMTP_USER}
AuthPass=${SSMTP_PASSWORD}
UseTLS=${SSMTP_TLS}
UseSTARTTLS=${SSMTP_TLS}
EOL
fi
unset SSMTP_HOST
unset SSMTP_USER
unset SSMTP_PASSWORD
# Init Matomo
echo "Initializing Matomo files / folders..."
mkdir -p /data/config /data/misc /data/plugins /data/session /data/tmp /etc/supervisord /var/log/supervisord
# Copy global config
cp -Rf /var/www/config /data/
# Check plugins
echo "Checking Matomo plugins..."
plugins=$(ls -l /data/plugins | egrep '^d' | awk '{print $9}')
for plugin in ${plugins}; do
if [ -d /var/www/plugins/${plugin} ]; then
rm -rf /var/www/plugins/${plugin}
fi
echo " - Adding ${plugin}"
ln -sf /data/plugins/${plugin} /var/www/plugins/${plugin}
done
# Check user folder
echo "Checking Matomo user-misc folder..."
if [ ! -d /data/misc/user ]; then
if [[ ! -L /var/www/misc/user && -d /var/www/misc/user ]]; then
mv -f /var/www/misc/user /data/misc/
fi
ln -sf /data/misc/user /var/www/misc/user
fi
# Fix perms
echo "Fixing permissions..."
chown -R nginx. /data
# Sidecar cron container ?
if [ "$SIDECAR_CRON" = "1" ]; then
echo ">>"
echo ">> Sidecar cron container detected for Matomo"
echo ">>"
# Init
rm /etc/supervisord/nginx.conf /etc/supervisord/php.conf
rm -rf ${CRONTAB_PATH}
mkdir -m 0644 -p ${CRONTAB_PATH}
touch ${CRONTAB_PATH}/nginx
# GeoIP
if [ ! -z "$CRON_GEOIP" ]; then
echo "Creating GeoIP cron task with the following period fields : $CRON_GEOIP"
echo "${CRON_GEOIP} /usr/local/bin/update_geoip" >> ${CRONTAB_PATH}/nginx
else
echo "CRON_GEOIP env var empty..."
fi
# Archive
if [ ! -z "$CRON_ARCHIVE" ]; then
echo "Creating Matomo archive cron task with the following period fields : $CRON_ARCHIVE"
echo "${CRON_ARCHIVE} /usr/local/bin/matomo_archive" >> ${CRONTAB_PATH}/nginx
else
echo "CRON_ARCHIVE env var empty..."
fi
# Fix perms
echo "Fixing permissions..."
chmod -R 0644 ${CRONTAB_PATH}
else
rm /etc/supervisord/cron.conf
# Check if already installed
if [ -f /data/config/config.ini.php ]; then
echo "Setting Matomo log level to $LOG_LEVEL..."
runas_nginx "php /var/www/console config:set --section='log' --key='log_level' --value='$LOG_LEVEL'"
echo "Upgrading and setting Matomo configuration..."
runas_nginx "php /var/www/console core:update --yes --no-interaction"
runas_nginx "php /var/www/console config:set --section='General' --key='minimum_memory_limit' --value='-1'"
else
echo ">>"
echo ">> Open your browser to install Matomo through the wizard"
echo ">>"
fi
fi
exec "$@"
|
#!/bin/bash
# chainali_1a is as 1a except it uses chain alignments (using 1a system) instead of gmm alignments
# local/chain/compare_wer.sh exp/chain/cnn_chainali_1a
# System cnn_chainali_1a (dict_50k) cnn_chainali_1a(dict_50k + unk_model)
# WER 15.93 14.09
# CER 7.79 6.70
# WER val 15.10 12.63
# CER val 6.72 5.36
# Final train prob -0.0220
# Final valid prob -0.0157
# Final train prob (xent) -0.4238
# Final valid prob (xent) -0.6119
# Parameters 4.36M
# steps/info/chain_dir_info.pl exp/chain/cnn_chainali_1a
# exp/chain/cnn_chainali_1a: num-iters=42 nj=2..4 num-params=4.4M dim=40->368 combine=-0.020->-0.020 (over 2) xent:train/valid[27,41,final]=(-0.534,-0.425,-0.424/-0.659,-0.612,-0.612) logprob:train/valid[27,41,final]=(-0.026,-0.022,-0.022/-0.017,-0.016,-0.016)
set -e -o pipefail
stage=0
nj=30
train_set=train
decode_val=true
gmm=tri3 # this is the source gmm-dir that we'll use for alignments; it
# should have alignments for the specified training data.
nnet3_affix= # affix for exp dirs, e.g. it was _cleaned in tedlium.
affix=_1a #affix for TDNN+LSTM directory e.g. "1a" or "1b", in case we change the configuration.
ali=tri3_ali
chain_model_dir=exp/chain${nnet3_affix}/cnn_1a
common_egs_dir=
reporting_email=
# chain options
train_stage=-10
xent_regularize=0.1
chunk_width=340,300,200,100
num_leaves=500
tdnn_dim=450
lang_decode=lang_unk
if $decode_val; then maybe_val=val; else maybe_val= ; fi
# End configuration section.
echo "$0 $@" # Print the command line for logging
. ./cmd.sh
. ./path.sh
. ./utils/parse_options.sh
if ! cuda-compiled; then
cat <<EOF && exit 1
This script is intended to be used with GPUs but you have not compiled Kaldi with CUDA
If you want to use GPUs (and have them), go to src/, and configure and make on a machine
where "nvcc" is installed.
EOF
fi
gmm_dir=exp/${gmm}
ali_dir=exp/${ali}
lat_dir=exp/chain${nnet3_affix}/${gmm}_${train_set}_lats_chain
gmm_lat_dir=exp/chain${nnet3_affix}/${gmm}_${train_set}_lats
dir=exp/chain${nnet3_affix}/cnn_chainali${affix}
train_data_dir=data/${train_set}
tree_dir=exp/chain${nnet3_affix}/tree_chain
# the 'lang' directory is created by this script.
# If you create such a directory with a non-standard topology
# you should probably name it differently.
lang=data/lang_chain
for f in $train_data_dir/feats.scp \
$train_data_dir/feats.scp $gmm_dir/final.mdl \
$ali_dir/ali.1.gz $gmm_dir/final.mdl; do
[ ! -f $f ] && echo "$0: expected file $f to exist" && exit 1
done
if [ $stage -le 1 ]; then
echo "$0: creating lang directory $lang with chain-type topology"
# Create a version of the lang/ directory that has one state per phone in the
# topo file. [note, it really has two states.. the first one is only repeated
# once, the second one has zero or more repeats.]
if [ -d $lang ]; then
if [ $lang/L.fst -nt data/lang/L.fst ]; then
echo "$0: $lang already exists, not overwriting it; continuing"
else
echo "$0: $lang already exists and seems to be older than data/lang..."
echo " ... not sure what to do. Exiting."
exit 1;
fi
else
cp -r data/lang $lang
silphonelist=$(cat $lang/phones/silence.csl) || exit 1;
nonsilphonelist=$(cat $lang/phones/nonsilence.csl) || exit 1;
# Use our special topology... note that later on may have to tune this
# topology.
steps/nnet3/chain/gen_topo.py $nonsilphonelist $silphonelist >$lang/topo
fi
fi
if [ $stage -le 2 ]; then
# Get the alignments as lattices (gives the chain training more freedom).
# use the same num-jobs as the alignments
steps/nnet3/align_lats.sh --nj $nj --cmd "$cmd" \
--acoustic-scale 1.0 \
--scale-opts '--transition-scale=1.0 --self-loop-scale=1.0' \
$train_data_dir data/lang $chain_model_dir $lat_dir
cp $gmm_lat_dir/splice_opts $lat_dir/splice_opts
fi
if [ $stage -le 3 ]; then
# Build a tree using our new topology. We know we have alignments for the
# speed-perturbed data (local/nnet3/run_ivector_common.sh made them), so use
# those. The num-leaves is always somewhat less than the num-leaves from
# the GMM baseline.
if [ -f $tree_dir/final.mdl ]; then
echo "$0: $tree_dir/final.mdl already exists, refusing to overwrite it."
exit 1;
fi
steps/nnet3/chain/build_tree.sh \
--frame-subsampling-factor 4 \
--context-opts "--context-width=2 --central-position=1" \
--cmd "$cmd" $num_leaves $train_data_dir \
$lang $ali_dir $tree_dir
fi
if [ $stage -le 4 ]; then
mkdir -p $dir
echo "$0: creating neural net configs using the xconfig parser";
num_targets=$(tree-info $tree_dir/tree | grep num-pdfs | awk '{print $2}')
learning_rate_factor=$(echo "print (0.5/$xent_regularize)" | python)
common1="height-offsets=-2,-1,0,1,2 num-filters-out=36"
common2="height-offsets=-2,-1,0,1,2 num-filters-out=70"
common3="height-offsets=-1,0,1 num-filters-out=70"
mkdir -p $dir/configs
cat <<EOF > $dir/configs/network.xconfig
input dim=40 name=input
conv-relu-batchnorm-layer name=cnn1 height-in=40 height-out=40 time-offsets=-3,-2,-1,0,1,2,3 $common1
conv-relu-batchnorm-layer name=cnn2 height-in=40 height-out=20 time-offsets=-2,-1,0,1,2 $common1 height-subsample-out=2
conv-relu-batchnorm-layer name=cnn3 height-in=20 height-out=20 time-offsets=-4,-2,0,2,4 $common2
conv-relu-batchnorm-layer name=cnn4 height-in=20 height-out=10 time-offsets=-4,-2,0,2,4 $common2 height-subsample-out=2
relu-batchnorm-layer name=tdnn1 input=Append(-4,-2,0,2,4) dim=$tdnn_dim
relu-batchnorm-layer name=tdnn2 input=Append(-4,0,4) dim=$tdnn_dim
relu-batchnorm-layer name=tdnn3 input=Append(-4,0,4) dim=$tdnn_dim
relu-batchnorm-layer name=tdnn4 input=Append(-4,0,4) dim=$tdnn_dim
## adding the layers for chain branch
relu-batchnorm-layer name=prefinal-chain dim=$tdnn_dim target-rms=0.5
output-layer name=output include-log-softmax=false dim=$num_targets max-change=1.5
# adding the layers for xent branch
# This block prints the configs for a separate output that will be
# trained with a cross-entropy objective in the 'chain' mod?els... this
# has the effect of regularizing the hidden parts of the model. we use
# 0.5 / args.xent_regularize as the learning rate factor- the factor of
# 0.5 / args.xent_regularize is suitable as it means the xent
# final-layer learns at a rate independent of the regularization
# constant; and the 0.5 was tuned so as to make the relative progress
# similar in the xent and regular final layers.
relu-batchnorm-layer name=prefinal-xent input=tdnn4 dim=$tdnn_dim target-rms=0.5
output-layer name=output-xent dim=$num_targets learning-rate-factor=$learning_rate_factor max-change=1.5
EOF
steps/nnet3/xconfig_to_configs.py --xconfig-file $dir/configs/network.xconfig --config-dir $dir/configs/
fi
if [ $stage -le 5 ]; then
if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then
utils/create_split_dir.pl \
/export/b0{3,4,5,6}/$USER/kaldi-data/egs/iam-$(date +'%m_%d_%H_%M')/s5/$dir/egs/storage $dir/egs/storage
fi
steps/nnet3/chain/train.py --stage=$train_stage \
--cmd="$cmd" \
--feat.cmvn-opts="--norm-means=false --norm-vars=false" \
--chain.xent-regularize $xent_regularize \
--chain.leaky-hmm-coefficient=0.1 \
--chain.l2-regularize=0.00005 \
--chain.apply-deriv-weights=false \
--chain.lm-opts="--num-extra-lm-states=500" \
--chain.frame-subsampling-factor=4 \
--chain.alignment-subsampling-factor=1 \
--trainer.srand=0 \
--trainer.max-param-change=2.0 \
--trainer.num-epochs=4 \
--trainer.frames-per-iter=1000000 \
--trainer.optimization.num-jobs-initial=2 \
--trainer.optimization.num-jobs-final=4 \
--trainer.optimization.initial-effective-lrate=0.001 \
--trainer.optimization.final-effective-lrate=0.0001 \
--trainer.optimization.shrink-value=1.0 \
--trainer.num-chunk-per-minibatch=64,32 \
--egs.chunk-width=$chunk_width \
--egs.dir="$common_egs_dir" \
--egs.opts="--frames-overlap-per-eg 0" \
--cleanup.remove-egs=false \
--use-gpu=true \
--reporting.email="$reporting_email" \
--feat-dir=$train_data_dir \
--tree-dir=$tree_dir \
--lat-dir=$lat_dir \
--dir=$dir || exit 1;
fi
if [ $stage -le 6 ]; then
# The reason we are using data/lang here, instead of $lang, is just to
# emphasize that it's not actually important to give mkgraph.sh the
# lang directory with the matched topology (since it gets the
# topology file from the model). So you could give it a different
# lang directory, one that contained a wordlist and LM of your choice,
# as long as phones.txt was compatible.
utils/mkgraph.sh \
--self-loop-scale 1.0 data/$lang_decode \
$dir $dir/graph || exit 1;
fi
if [ $stage -le 7 ]; then
frames_per_chunk=$(echo $chunk_width | cut -d, -f1)
for decode_set in test $maybe_val; do
steps/nnet3/decode.sh --acwt 1.0 --post-decode-acwt 10.0 \
--frames-per-chunk $frames_per_chunk \
--nj $nj --cmd "$cmd" \
$dir/graph data/$decode_set $dir/decode_$decode_set || exit 1;
done
fi
echo "$0 Done. Date: $(date). Results:"
local/chain/compare_wer.sh $dir
|
#!/bin/bash
set -ex
# Note that we update the `PATH` to include
# `~/cached-deps` in `.travis.yml`, but this doesn't update the PATH for
# calls using `sudo`. If you need to make a `sudo` call to a binary in
# `~/cached-deps`, you'll need to explicitly set the path like so:
#
# sudo env "PATH=$PATH" minikube foo
kubectl version --client
etcdctl --version
minikube delete || true # In case we get a recycled machine
make launch-kube
sleep 5
# Wait until a connection with kubernetes has been established
echo "Waiting for connection to kubernetes..."
max_t=90
WHEEL='\|/-';
until {
minikube status 2>&1 >/dev/null
kubectl version 2>&1 >/dev/null
}; do
if ((max_t-- <= 0)); then
echo "Could not connect to minikube"
echo "minikube status --alsologtostderr --loglevel=0 -v9:"
echo "==================================================="
minikube status --alsologtostderr --loglevel=0 -v9
exit 1
fi
echo -en "\e[G$${WHEEL:0:1}";
WHEEL="$${WHEEL:1}$${WHEEL:0:1}";
sleep 1;
done
minikube status
kubectl version
echo "Running test suite based on BUCKET=$BUCKET"
make install
make docker-build
for i in $(seq 3); do
make clean-launch-dev || true # may be nothing to delete
make launch-dev && break
(( i < 3 )) # false if this is the last loop (causes exit)
sleep 10
done
pachctl config update context `pachctl config get active-context` --pachd-address=$(minikube ip):30650
function test_bucket {
set +x
package="${1}"
target="${2}"
bucket="${3}"
num_buckets="${4}"
if (( bucket == 0 )); then
echo "Error: bucket should be > 0, but was 0" >/dev/stderr
exit 1
fi
echo "Running bucket $bucket of $num_buckets"
tests=( $(go test -v "${package}" -list ".*" | grep -v ok | grep -v Benchmark) )
total_tests="${#tests[@]}"
# Determine the offset and length of the sub-array of tests we want to run
# The last bucket may have a few extra tests, to accommodate rounding errors from bucketing:
let \
"bucket_size=total_tests/num_buckets" \
"start=bucket_size * (bucket-1)" \
"bucket_size+=bucket < num_buckets ? 0 : total_tests%num_buckets"
test_regex="$(IFS=\|; echo "${tests[*]:start:bucket_size}")"
echo "Running ${bucket_size} tests of ${total_tests} total tests"
make RUN="-run=\"${test_regex}\"" "${target}"
set -x
}
# Clean cached test results
go clean -testcache
case "${BUCKET}" in
MISC)
if [[ "$TRAVIS_SECURE_ENV_VARS" == "true" ]]; then
echo "Running the full misc test suite because secret env vars exist"
make lint
make enterprise-code-checkin-test
make test-cmds
make test-libs
make test-tls
make test-vault
make test-enterprise
make test-worker
make test-s3gateway-integration
make test-proto-static
make test-transaction
else
echo "Running the misc test suite with some tests disabled because secret env vars have not been set"
make lint
make enterprise-code-checkin-test
make test-cmds
make test-libs
make test-tls
fi
;;
ADMIN)
make test-admin
;;
EXAMPLES)
echo "Running the example test suite"
./etc/testing/examples.sh
;;
PFS)
make test-pfs-server
make test-pfs-storage
;;
PPS?)
make docker-build-kafka
bucket_num="${BUCKET#PPS}"
test_bucket "./src/server" test-pps "${bucket_num}" "${PPS_BUCKETS}"
if [[ "${bucket_num}" -eq "${PPS_BUCKETS}" ]]; then
go test -v -count=1 ./src/server/pps/server -timeout 3600s
fi
;;
AUTH?)
bucket_num="${BUCKET#AUTH}"
test_bucket "./src/server/auth/server" test-auth "${bucket_num}" "${AUTH_BUCKETS}"
set +x
;;
*)
echo "Unknown bucket"
exit 1
;;
esac
|
package socketgameserver.model;
public interface IGame {
void loginPlayer(String userName, String password, String sessionId);
void registerNewPlayer(String username, String password, String sessionId);
void processClientDisconnect(String sessionId);
void startPath(String sessionId, double x, double y, double colorR, double colorG, double colorB);
void addPointToPath(String sessionId, double x, double y);
void clearCanvas(String sessionId);
void guessWord(String sessionId, String guess);
int getNumberOfPlayers();
}
|
from bs4 import BeautifulSoup
import requests
def get_html_tags(url):
'''Scrape a web page and extract all HTML tags.'''
page = requests.get(url)
soup = BeautifulSoup(page.content, 'html.parser')
return [tag.name for tag in soup.find_all()] |
from scrapy import Spider
from scrapy.http import Request
from firmware.items import FirmwareImage
from firmware.loader import FirmwareLoader
import json
import urllib.parse
class TPLinkENSpider(Spider):
name = "tp-link_en"
vendor = "tp-link"
allowed_domains = ["tp-link.com"]
start_urls = ["http://www.tp-link.com/en/download-center.html"]
def parse(self, response):
for cid in response.xpath(
"//select[@id='slcProductCat']//option/@value").extract():
yield Request(
url=urllib.parse.urljoin(
response.url, "handlers/handler.ashx?action=getsubcatlist&catid=%s" % cid),
meta={"cid": cid},
headers={"Referer": response.url,
"X-Requested-With": "XMLHttpRequest"},
callback=self.parse_json)
def parse_json(self, response):
json_response = json.loads(response.body_as_unicode())
if json_response:
for entry in json_response:
yield Request(
url=urllib.parse.urljoin(
response.url, "handler.ashx?action=getsubcatlist&catid=%s" % entry["id"]),
meta={"cid": entry["id"]},
headers={"Referer": response.url,
"X-Requested-With": "XMLHttpRequest"},
callback=self.parse_json)
else:
yield Request(
url=urllib.parse.urljoin(
response.url, "../download-center.html?async=1&showEndLife=true&catid=%s" % response.meta["cid"]),
headers={"Referer": response.url,
"X-Requested-With": "XMLHttpRequest"},
callback=self.parse_products)
def parse_products(self, response):
for link in response.xpath("//a"):
yield Request(
url=urllib.parse.urljoin(
response.url, link.xpath("./@href").extract()[0]),
meta={"product": link.xpath("./@data-model").extract()[0]},
headers={"Referer": response.url},
callback=self.parse_product)
def parse_product(self, response):
if response.xpath(
"//dl[@id='dlDropDownBox']") and "build" not in response.meta:
for entry in response.xpath("//dl[@id='dlDropDownBox']//li/a"):
href = entry.xpath("./@href").extract()[0]
text = entry.xpath(".//text()").extract()[0]
yield Request(
url=urllib.parse.urljoin(response.url, href),
meta={"product": response.meta["product"], "build": text},
headers={"Referer": response.url},
callback=self.parse_product)
else:
sdk = None
for href in reversed(response.xpath(
"//div[@id='content_gpl_code']//a/@href").extract()):
sdk = href
for entry in response.xpath(
"//div[@id='content_firmware']//table"):
href = entry.xpath("./tbody/tr[1]/th[1]//a/@href").extract()[0]
text = entry.xpath(
"./tbody/tr[1]/th[1]//a//text()").extract()[0]
date = entry.xpath("./tbody/tr[1]/td[1]//text()").extract()
item = FirmwareLoader(
item=FirmwareImage(), response=response, date_fmt=["%d/%m/%y"])
item.add_value("url", href)
item.add_value("date", item.find_date(date))
item.add_value("description", text)
item.add_value("product", response.meta["product"])
item.add_value("build", response.meta["build"] if "build" in response.meta else None)
item.add_value("vendor", self.vendor)
item.add_value("sdk", sdk)
yield item.load_item()
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/kms/v1/service.proto
package com.google.cloud.kms.v1;
public interface GenerateRandomBytesResponseOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.kms.v1.GenerateRandomBytesResponse)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* The generated data.
* </pre>
*
* <code>bytes data = 1;</code>
*
* @return The data.
*/
com.google.protobuf.ByteString getData();
/**
*
*
* <pre>
* Integrity verification field. A CRC32C checksum of the returned
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]. An integrity check of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data] can be performed by computing the
* CRC32C checksum of [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data] and comparing your
* results to this field. Discard the response in case of non-matching
* checksum values, and perform a limited number of retries. A persistent
* mismatch may indicate an issue in your computation of the CRC32C checksum.
* Note: This field is defined as int64 for reasons of compatibility across
* different languages. However, it is a non-negative integer, which will
* never exceed 2^32-1, and can be safely downconverted to uint32 in languages
* that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value data_crc32c = 3;</code>
*
* @return Whether the dataCrc32c field is set.
*/
boolean hasDataCrc32C();
/**
*
*
* <pre>
* Integrity verification field. A CRC32C checksum of the returned
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]. An integrity check of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data] can be performed by computing the
* CRC32C checksum of [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data] and comparing your
* results to this field. Discard the response in case of non-matching
* checksum values, and perform a limited number of retries. A persistent
* mismatch may indicate an issue in your computation of the CRC32C checksum.
* Note: This field is defined as int64 for reasons of compatibility across
* different languages. However, it is a non-negative integer, which will
* never exceed 2^32-1, and can be safely downconverted to uint32 in languages
* that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value data_crc32c = 3;</code>
*
* @return The dataCrc32c.
*/
com.google.protobuf.Int64Value getDataCrc32C();
/**
*
*
* <pre>
* Integrity verification field. A CRC32C checksum of the returned
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]. An integrity check of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data] can be performed by computing the
* CRC32C checksum of [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data] and comparing your
* results to this field. Discard the response in case of non-matching
* checksum values, and perform a limited number of retries. A persistent
* mismatch may indicate an issue in your computation of the CRC32C checksum.
* Note: This field is defined as int64 for reasons of compatibility across
* different languages. However, it is a non-negative integer, which will
* never exceed 2^32-1, and can be safely downconverted to uint32 in languages
* that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value data_crc32c = 3;</code>
*/
com.google.protobuf.Int64ValueOrBuilder getDataCrc32COrBuilder();
}
|
<reponame>PureIso/exchange.core
import { Action } from "redux";
import { DisplayContainer } from "@interfaces/display-container.interface";
import { Display } from "@interfaces/display.interface";
export const CRUDDISPLAYCONTAINER = "CRUDDISPLAYCONTAINER";
export class CRUDDisplayContainer implements Action {
readonly type = CRUDDISPLAYCONTAINER;
constructor(public payload: DisplayContainer) {
this.payload = Object.assign({}, payload);
}
updateDisplayState(display: Display, productId: string, application: string) {
this.payload.display = display;
if(display.showProductTradeView || display.showFillsView){
this.payload.selected_product_id = productId;
this.payload.application_name = application;
}else{
this.payload.selected_product_id = undefined;
this.payload.application_name = undefined;
}
}
}
export type Actions = CRUDDisplayContainer;
|
/**
* Copyright 2020 <NAME>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { FlowKind, ShapeBpmnElementKind } from '../../src/bpmn-visualization';
import {
expectEndEventBpmnElement,
expectPoolBpmnElement,
expectSequenceFlowBpmnElement,
expectServiceTaskBpmnElement,
expectStartEventBpmnElement,
expectTaskBpmnElement,
} from './helpers/semantic-with-svg-utils';
import { initializeBpmnVisualization } from './helpers/bpmn-visualization-initialization';
import { readFileSync } from '../helpers/file-helper';
describe('Bpmn Elements registry - retrieve BPMN elements', () => {
const bpmnVisualization = initializeBpmnVisualization();
describe('Get by ids', () => {
it('Pass several existing ids', async () => {
bpmnVisualization.load(readFileSync('../fixtures/bpmn/simple-start-task-end.bpmn'));
const bpmnElements = bpmnVisualization.bpmnElementsRegistry.getElementsByIds(['StartEvent_1', 'Flow_2']);
expect(bpmnElements).toHaveLength(2);
expectStartEventBpmnElement(bpmnElements[0], { id: 'StartEvent_1', name: 'Start Event 1' });
expectSequenceFlowBpmnElement(bpmnElements[1], { id: 'Flow_2' });
});
it('Pass a single non existing id', async () => {
bpmnVisualization.load(readFileSync('../fixtures/bpmn/simple-start-task-end.bpmn'));
const bpmnElements = bpmnVisualization.bpmnElementsRegistry.getElementsByIds('unknown');
expect(bpmnElements).toHaveLength(0);
});
it('Pass existing and non existing ids', async () => {
bpmnVisualization.load(readFileSync('../fixtures/bpmn/simple-start-task-end.bpmn'));
const bpmnElements = bpmnVisualization.bpmnElementsRegistry.getElementsByIds(['StartEvent_1', 'unknown', 'Flow_1', 'another_unknown']);
expect(bpmnElements).toHaveLength(2);
});
});
describe('Get by kinds', () => {
it('Pass a single kind related to a single existing element', async () => {
bpmnVisualization.load(readFileSync('../fixtures/bpmn/simple-start-task-end.bpmn'));
const bpmnElements = bpmnVisualization.bpmnElementsRegistry.getElementsByKinds(ShapeBpmnElementKind.TASK);
expect(bpmnElements).toHaveLength(1);
expectTaskBpmnElement(bpmnElements[0], { id: 'Activity_1', name: 'Task 1' });
});
it('Pass a single kind related to several existing elements', async () => {
bpmnVisualization.load(readFileSync('../fixtures/bpmn/simple-start-task-end.bpmn'));
const bpmnElements = bpmnVisualization.bpmnElementsRegistry.getElementsByKinds(FlowKind.SEQUENCE_FLOW);
expect(bpmnElements).toHaveLength(2);
expectSequenceFlowBpmnElement(bpmnElements[0], { id: 'Flow_1', name: 'Sequence Flow 1' });
expectSequenceFlowBpmnElement(bpmnElements[1], { id: 'Flow_2' });
});
it('No elements for this kind', async () => {
bpmnVisualization.load(readFileSync('../fixtures/bpmn/simple-start-task-end.bpmn'));
const bpmnElements = bpmnVisualization.bpmnElementsRegistry.getElementsByKinds(ShapeBpmnElementKind.SUB_PROCESS);
expect(bpmnElements).toHaveLength(0);
});
it('Pass a several kinds that all match existing elements', async () => {
bpmnVisualization.load(readFileSync('../fixtures/bpmn/registry/1-pool-3-lanes-message-start-end-intermediate-events.bpmn'));
const bpmnElements = bpmnVisualization.bpmnElementsRegistry.getElementsByKinds([ShapeBpmnElementKind.EVENT_END, ShapeBpmnElementKind.POOL]);
expect(bpmnElements).toHaveLength(3);
expectEndEventBpmnElement(bpmnElements[0], { id: 'endEvent_terminate_1', name: 'terminate end 1' });
expectEndEventBpmnElement(bpmnElements[1], { id: 'endEvent_message_1', name: 'message end 2' });
expectPoolBpmnElement(bpmnElements[2], { id: 'Participant_1', name: 'Pool 1' });
});
it('Pass a several kinds that match existing and non-existing elements', async () => {
bpmnVisualization.load(readFileSync('../fixtures/bpmn/registry/1-pool-3-lanes-message-start-end-intermediate-events.bpmn'));
const bpmnElements = bpmnVisualization.bpmnElementsRegistry.getElementsByKinds([ShapeBpmnElementKind.CALL_ACTIVITY, ShapeBpmnElementKind.TASK_SERVICE]);
expect(bpmnElements).toHaveLength(2);
expectServiceTaskBpmnElement(bpmnElements[0], { id: 'serviceTask_1_2', name: 'Service Task 1.2' });
expectServiceTaskBpmnElement(bpmnElements[1], { id: 'serviceTask_2_1', name: 'Service Task 2.1' });
});
});
});
|
def calculate_mean(array):
return sum(array) / len(array) |
const countWords = (string) => {
const words = string.split(/[^A-Za-z]/).filter(word => word !== '');
return words.length;
};
console.log(countWords('Hello, this is a test!')); // Output: 4 |
<reponame>OSWeDev/oswedev<gh_stars>0
import { ActionContext, ActionTree, GetterTree, MutationTree } from "vuex";
import { Action, Getter, namespace } from 'vuex-class/lib/bindings';
import { getStoreAccessors } from "vuex-typescript";
import IStoreModule from "../../../store/IStoreModule";
export type DroppableVoFieldsContext = ActionContext<IDroppableVoFieldsState, any>;
export interface IDroppableVoFieldsState {
selected_fields: { [api_type_id: string]: { [field_id: string]: boolean } };
filter_by_field_id_or_api_type_id: string;
}
export default class DroppableVoFieldsStore implements IStoreModule<IDroppableVoFieldsState, DroppableVoFieldsContext> {
public static getInstance(): DroppableVoFieldsStore {
if (!DroppableVoFieldsStore.instance) {
DroppableVoFieldsStore.instance = new DroppableVoFieldsStore();
}
return DroppableVoFieldsStore.instance;
}
private static instance: DroppableVoFieldsStore;
public module_name: string;
public state: any;
public getters: GetterTree<IDroppableVoFieldsState, DroppableVoFieldsContext>;
public mutations: MutationTree<IDroppableVoFieldsState>;
public actions: ActionTree<IDroppableVoFieldsState, DroppableVoFieldsContext>;
public namespaced: boolean = true;
protected constructor() {
let self = this;
this.module_name = "DroppableVoFieldsStore";
this.state = {
selected_fields: {},
filter_by_field_id_or_api_type_id: null
};
this.getters = {
get_selected_fields(state: IDroppableVoFieldsState): { [api_type_id: string]: { [field_id: string]: boolean } } {
return state.selected_fields;
},
get_filter_by_field_id_or_api_type_id(state: IDroppableVoFieldsState): string {
return state.filter_by_field_id_or_api_type_id;
},
};
this.mutations = {
set_selected_fields(state: IDroppableVoFieldsState, selected_fields: { [api_type_id: string]: { [field_id: string]: boolean } }) {
state.selected_fields = selected_fields;
},
set_filter_by_field_id_or_api_type_id(state: IDroppableVoFieldsState, filter_by_field_id_or_api_type_id: string) {
state.filter_by_field_id_or_api_type_id = filter_by_field_id_or_api_type_id;
},
switch_selected_field(state: IDroppableVoFieldsState, infos: { api_type_id: string, field_id: string }) {
if ((!infos) || (!infos.api_type_id) || (!infos.field_id) || (!state.selected_fields[infos.api_type_id])) {
return;
}
state.selected_fields[infos.api_type_id][infos.field_id] = !state.selected_fields[infos.api_type_id][infos.field_id];
},
};
this.actions = {
set_selected_fields(context: DroppableVoFieldsContext, selected_fields: { [api_type_id: string]: { [field_id: string]: boolean } }) {
commit_set_selected_fields(context, selected_fields);
},
set_filter_by_field_id_or_api_type_id(context: DroppableVoFieldsContext, filter_by_field_id_or_api_type_id: string) {
commit_set_filter_by_field_id_or_api_type_id(context, filter_by_field_id_or_api_type_id);
},
switch_selected_field(context: DroppableVoFieldsContext, infos: { api_type_id: string, field_id: string }) {
commit_switch_selected_field(context, infos);
}
};
}
}
export const DroppableVoFieldsStoreInstance = DroppableVoFieldsStore.getInstance();
const { commit, read, dispatch } =
getStoreAccessors<IDroppableVoFieldsState, any>("DroppableVoFieldsStore"); // We pass namespace here, if we make the module namespaced: true.
export const ModuleDroppableVoFieldsGetter = namespace('DroppableVoFieldsStore', Getter);
export const ModuleDroppableVoFieldsAction = namespace('DroppableVoFieldsStore', Action);
export const commit_set_selected_fields = commit(DroppableVoFieldsStoreInstance.mutations.set_selected_fields);
export const commit_switch_selected_field = commit(DroppableVoFieldsStoreInstance.mutations.switch_selected_field);
export const commit_set_filter_by_field_id_or_api_type_id = commit(DroppableVoFieldsStoreInstance.mutations.set_filter_by_field_id_or_api_type_id); |
import React, { useState } from 'react';
import axios from 'axios';
const App = () => {
const [city, setCity] = useState('');
const [weather, setWeather] = useState(null);
const handleSearch = async () => {
const response = await axios.get(
`https://api.openweathermap.org/data/2.5/weather?q=${city}&units=metric&appid=YOUR_API_KEY`
);
setWeather(response.data);
};
return (
<div>
<input
type="text"
value={city}
onChange={e => setCity(e.target.value)}
/>
<button onClick={handleSearch}>Search</button>
{weather && (
<div>
<h2>Temperature: {weather.main.temp}°C</h2>
<h3>Description: {weather.weather[0].description}</h3>
</div>
)}
</div>
);
};
export default App; |
const App = {
address_details: {},
change_events: 'change keyup paste',
check_for_swap_interval_ms: 3000,
check_for_swap_interval: null,
grace_ms: 1800 * 1e3,
invoice_details: {},
invoice_refund_keypairs: {},
rates: {},
swaps: {},
};
/** Changed the currency
*/
App.changedCurrencySelection = function({}) {
let fiatCode;
const swap = $(this).closest('.create-swap-quote');
if (!swap) {
return;
}
const network = $(this).val();
const iconToShow = `.icon-${network}`;
swap.find('.coin-icon').prop('hidden', true);
swap.find(iconToShow).removeAttr('hidden');
App.updateInvoiceDetails({swap});
switch (network) {
case 'bch':
case 'bitcoin':
case 'litecoin':
fiatCode = 'USD';
break;
case 'bchtestnet':
case 'ltctestnet':
case 'testnet':
fiatCode = 'tUSD';
break;
default:
return console.log([400, 'UnexpectedNetworkSelected']);
}
swap.find('.chain-fiat-code').text(fiatCode);
return;
};
/** Change the invoice
Update the details of the payment based on the entered invoice.
*/
App.changedInvoice = function({}) {
const swap = $(this).closest('.create-swap-quote');
App.updateInvoiceDetails({swap});
return;
};
/** Changed the refund address
*/
App.changedRefundAddress = function({}) {
const input = $(this);
const address = input.val().trim();
const swap = input.closest('.create-swap-quote');
// Exit early when the address has not changed
if (!swap.data() || swap.data().address === address) {
return;
}
swap.data({address});
swap.find('.address-currency-label').removeClass('text-danger');
swap.find('.required-address-type').removeClass('text-danger');
// Exit early when the invoice has been removed
if (!address) {
return input.removeClass('is-invalid').removeClass('is-valid');
}
const network = swap.find('.select-currency').val();
return App.getAddressDetails({address, network}, (err, details) => {
if (input.val().trim() !== address) {
return;
}
if (!!err) {
input.addClass('is-invalid');
const addrErr = err.message === 'ExpectedPublicKeyHash';
const netErr = err.message === 'ExpectedTestnetAddress';
swap.find('.required-address-type').toggleClass('text-danger', addrErr);
swap.find('.address-currency-label').toggleClass('text-danger', netErr);
return;
}
App.address_details[address] = details;
App.updatedSwapDetails({swap});
return;
});
};
/** Changed refund key generation preference
*/
App.changedRefundPreference = function({}) {
const swap = $(this).closest('.create-swap-quote');
const isPaperWallet = !!$(this).is(':checked');
swap.find('.paper-wallet-label').toggleClass('text-muted', !isPaperWallet);
swap.find('.refund-address-entry').prop('hidden', isPaperWallet);
if (isPaperWallet) {
swap.find('.refund-address').removeAttr('required');
} else {
swap.find('.refund-address').prop('required', true);
}
App.updatedSwapDetails({swap});
return;
};
/** Changed the refund script
*/
App.changedRefundScript = function({}) {
const script = $(this).val().trim();
const network = $('.select-currency').val();
// Exit early when the refund address is blanked
if (!script) {
$('.dump-refund-address').text('');
$('.refund-p2pkh .refund-p2wpkh').val('');
return $('.redeem-refund-address, .timeout-block-height').val('');
}
const details = blockchain.swapScriptDetails({network, script});
$('.dump-refund-address').text(details.refund_p2wpkh_address);
$('.timeout-block-height').val(details.timelock_block_height);
$('.redeem-refund-address').val(details.refund_p2wpkh_address);
$('.refund-p2pkh').text(details.refund_p2pkh_address);
$('.refund-p2wpkh').text(details.refund_p2wpkh_address);
$('.refund-p2sh-p2wsh-swap-address').text(details.p2sh_p2wsh_address);
$('.refund-p2sh-swap-address').text(details.p2sh_address);
$('.refund-p2wsh-swap-address').text(details.p2wsh_address);
if (!!details.bch_p2sh_address && !!details.p2sh_output_script) {
$('.bch-p2sh-address').prop('value', details.bch_p2sh_address);
$('.bch-p2sh-address').text(details.bch_p2sh_address);
$('.select-swap-address').data(
details.bch_p2sh_address,
details.p2sh_output_script
);
} else {
$('.bch-p2sh-address').prop('hidden', true);
}
$('.p2sh-address').prop('value', details.p2sh_address);
$('.p2sh-address').text(details.p2sh_address);
$('.select-swap-address').data(
details.p2sh_address,
details.p2sh_output_script
);
if (!!details.p2sh_p2wsh_address && !!details.p2sh_p2wsh_output_script) {
$('.p2sh-p2wsh-address').prop('value', details.p2sh_p2wsh_address);
$('.p2sh-p2wsh-address').text(details.p2sh_p2wsh_address);
$('.select-swap-address').data(
details.p2sh_p2wsh_address,
details.p2sh_p2wsh_output_script
);
} else {
$('.p2sh-p2wsh-address').prop('hidden', true);
}
if (!!details.p2wsh_address && !!details.p2wsh_output_script) {
$('.p2wsh-address').prop('value', details.p2wsh_address);
$('.p2wsh-address').text(details.p2wsh_address);
$('.select-swap-address').data(
details.p2wsh_address,
details.p2wsh_output_script
);
} else {
$('.p2wsh-address').prop('hidden', true);
}
return;
};
/** Check on a swap
{
[button]: <Checking Button Dom Object>
id: <Payment Hash String>
quote: <Quote Object>
}
*/
App.checkSwap = ({button, id, quote}) => {
const {network} = App.swaps[id];
return App.getSwap({
network,
invoice: App.swaps[id].invoice,
redeem_script: App.swaps[id].redeem_script,
swap_key_index: App.swaps[id].swap_key_index,
},
(err, res) => {
if (!!err && err.message === 'Gone') {
quote.find('.failure').collapse('show');
return;
}
if (!!App.swaps[id].is_completed) {
return;
}
// Reset the check swap button title to its normal state
if (!!button) {
$(button).text($(button).prop('title'));
}
if (!!err) {
return;
}
quote.find('.chain-link').addClass('disabled');
const invoice = App.swaps[id].invoice;
const sentAmount = (res.output_tokens / 1e8).toFixed(8);
let txUrl = '#';
quote.find('.delete-swap').prop('disabled', true).addClass('disabled');
quote.find('.download-file .label').text('Save Full Refund Details');
quote.find('.refund-output-index').val(res.output_index);
quote.find('.refund-tokens-total').val(sentAmount);
quote.find('.swap-transaction-id').val(res.transaction_id);
App.swaps[id].transaction_id = res.transaction_id;
App.swaps[id].transaction_output_index = res.output_index;
switch (network) {
case 'bch':
txUrl = `https://www.blocktrail.com/BCC/tx/${res.transaction_id}`;
break;
case 'bchtestnet':
txUrl = `https://www.blocktrail.com/tBCC/tx/${res.transaction_id}`;
break;
case 'bitcoin':
txUrl = `https://smartbit.com.au/tx/${res.transaction_id}`;
break;
case 'testnet':
txUrl = `https://testnet.smartbit.com.au/tx/${res.transaction_id}`;
break;
case 'ltc':
txUrl = `https://chain.so/tx/LTC/${res.transaction_id}`;
break;
case 'ltctestnet':
txUrl = `https://chain.so/tx/LTCTEST/${res.transaction_id}`;
break;
default:
console.log([0, 'ExpectedTxUrl']);
break;
}
// Exit early when the deposit is found but more confs are needed
if (!res.payment_secret) {
// Display min 1 block waiting, since 0 blocks means swap is happening
const confCount = res.conf_wait_count || 1;
const isPluralConfs = confCount !== 1;
if (!!res.attempts) {
quote.find('.attempting-sends').prop('hidden', false);
quote.find('.waiting-for-confirmations').prop('hidden', true);
quote.find('.routes-attempted').text(res.attempts);
quote.find('.needed-confirmations-count').text('');
} else {
quote.find('.waiting-for-confirmations').prop('hidden', false);
quote.find('.needed-confirmations-count').text(confCount);
}
quote.find('.found-waiting').collapse('show');
quote.find('.deposit-transaction-id').prop('href', txUrl);
quote.find('.plural-confirmation').prop('hidden', !isPluralConfs);
quote.find('.tx-found').collapse('show');
quote.find('.waiting-label').collapse('hide');
quote.find('.qr-code').hide()
quote.find('.waiting-notification')
.removeClass('alert-secondary')
.addClass('alert-primary');
quote.find('.swap-payment-details').hide();
return;
}
App.swaps[id].is_completed = true;
return App.presentCompletedSwap({
invoice,
network: App.swaps[id].network,
payment_secret: res.payment_secret,
presented_quote: quote,
swap_amount: App.swaps[id].swap_amount,
swap_fee: App.swaps[id].swap_fee,
transaction_id: res.transaction_id,
});
});
};
/** Clicked check swap button
*/
App.clickedCheckSwap = function(event) {
event.preventDefault();
const button = $(this);
const quote = button.closest('.swap-quote');
const id = quote.data().payment_hash;
button.text(button.data().pending_title);
return App.checkSwap({button, id, quote});
};
/** Clicked delete swap
*/
App.clickedDeleteSwap = function(event) {
event.preventDefault();
if (!!App.check_for_swap) {
clearInterval(App.check_for_swap);
}
const swap = $(this).closest('.swap-quote');
swap.remove();
$('.create-swap-quote').collapse('show');
return;
};
/** Clicked new swap button
*/
App.clickedNewSwap = function(event) {
event.preventDefault();
// Exit early when the swap button is not pressable
if ($(this).is('.disabled')) {
return;
}
$('.create-swap-quote').collapse('show');
$('.new-swap').addClass('disabled');
$('.presented.swap-success').remove();
return;
};
/** Clicked show refund pane
*/
App.clickedShowRefund = function(event) {
event.preventDefault();
const swap = $(this).closest('.swap-quote');
swap.find('.toggle-refund').removeClass('active').removeClass('inactive');
$(this).addClass('active');
swap.find('.send-to-swap').collapse('hide');
swap.find('.refund-details').collapse('show');
return;
};
/** Clicked show swap
*/
App.clickedShowSwap = function(event) {
event.preventDefault();
const swap = $(this).closest('.swap-quote');
swap.find('.toggle-refund').removeClass('active').removeClass('inactive');
$(this).addClass('active');
swap.find('.send-to-swap').collapse('show');
swap.find('.refund-details').collapse('hide');
return;
};
/** Create swap
{
invoice: <Bolt 11 Invoice String>
network: <Network Name String>
refund: <Refund Address String>
}
@returns via cbk
{
destination_public_key: <Destination Public Key Hex String>
fee_tokens_per_vbyte: <Fee Rate Tokens Per Virtual Byte Number>
invoice: <Lightning Invoice String>
payment_hash: <Payment Hash Hex String>
redeem_script: <Redeem Script Hex String>
refund_address: <Refund Address String>
refund_public_key_hash: <Refund Public Key Hash Hex String>
swap_amount: <Swap Amount Number>
swap_fee: <Swap Fee Tokens Number>
swap_key_index: <Swap Key Index Number>
swap_p2sh_address: <Swap Chain Legacy P2SH Base58 Address String>
swap_p2sh_p2wsh_address: <Swap Chain P2SH Nested SegWit Address String>
swap_p2wsh_address: <Swap Chain P2WSH Bech32 Address String>
timeout_block_height: <Swap Expiration Date Number>
}
*/
App.createSwap = ({invoice, network, refund}, cbk) => {
if (!invoice) {
return cbk([0, 'ExpectedInvoice']);
}
if (!network) {
return cbk([0, 'ExpectedNetworkForSwapCreation']);
}
if (!refund) {
return cbk([0, 'ExpectedRefundAddress']);
}
App.makeRequest({post: {invoice, network, refund}, api: 'swaps/'})
.then(details => {
if (!App.invoice_details[invoice]) {
throw new Error('ExpectedInvoiceDetails');
}
if (!details.destination_public_key) {
throw new Error('ExpectedDestinationPublicKey');
}
if (!details.fee_tokens_per_vbyte) {
throw new Error('ExpectedFeeTokensPerVirtualByte');
}
if (!details.invoice) {
throw new Error('ExpectedInvoice');
}
if (!details.payment_hash) {
throw new Error('ExpectedPaymentHash');
}
if (!details.refund_address) {
throw new Error('ExpectedRefundAddress');
}
if (!details.refund_public_key_hash) {
throw new Error('ExpectedRefundPublicKeyHash');
}
if (!details.redeem_script) {
throw new Error('ExpectedRedeemScript');
}
if (!details.swap_amount) {
throw new Error('ExpectedSwapAmount');
}
if (!details.swap_fee) {
throw new Error('ExpectedSwapFee');
}
if (!details.swap_key_index) {
throw new Error('ExpectedSwapKeyIndex');
}
if (!details.swap_p2sh_address) {
throw new Error('ExpectedSwapP2shAddress');
}
if (!details.timeout_block_height) {
throw new Error('ExpectedTimeoutBlockHeight');
}
return details;
})
.then(details => cbk(null, details))
.catch(err => cbk(err));
return;
};
/** Format tokens as a display string
{
tokens: <Tokens Number>
}
*/
App.format = ({tokens}) => {
const bigUnitDivisibility = 8;
const tokensPerBigUnit = 1e8;
return (tokens / tokensPerBigUnit).toFixed(bigUnitDivisibility);
};
/** Get address details
{
address: <Chain Address String>
}
@returns via cbk
{
type: <Address Type String>
}
*/
App.getAddressDetails = ({address, network}, cbk) => {
App.makeRequest({api: `address_details/${network}/${address}`})
.then(details => {
if (details.type !== 'p2pkh' && details.type !== 'p2wpkh') {
throw new Error('ExpectedPublicKeyHash');
}
return details
})
.then(details => cbk(null, details))
.catch(err => cbk(err));
return;
};
/** Get invoice details in the context of a swap
{
invoice: <Bolt 11 Invoice String>
network: <Swapping with Network Name String>
}
@returns via cbk
{
created_at: <Created At ISO 8601 Date String>
description: <Payment Description String>
destination_public_key: <Destination Public Key Hex String>
[expires_at]: <Expires At ISO 8601 Date String>
fee: <Fee Tokens Number>
[fee_fiat_value]: <Fee Fiat Value in Cents Number>
[fiat_currency_code]: <Fiat Currency Code String>
[fiat_value]: <Fiat Value in Cents Number>
id: <Invoice Id String>
network: <Network Name String>
tokens: <Tokens to Send Number>
}
*/
App.getInvoiceDetails = ({invoice, network}, cbk) => {
return fetch(`/api/v0/invoice_details/${network}/${invoice}`)
.then(r => {
switch (r.status) {
case 200:
return Promise.resolve(r);
default:
return Promise.reject(r);
}
})
.then(r => r.json())
.then(details => {
if (!details.created_at) {
throw new Error('ExpectedCreatedAt');
}
if (!details.destination_public_key) {
throw new Error('ExpectedDestinationPublicKey');
}
if (!details.expires_at) {
throw new Error('ExpectedExpiresAt');
}
if (details.fee === undefined) {
throw new Error('ExpectedFeeTokensAmount');
}
if (!details.id) {
throw new Error('ExpectedId');
}
if (details.is_expired !== false) {
throw new Error('ExpectedUnexpiredInvoice');
}
switch (details.network) {
case 'bitcoin':
case 'ltc':
case 'testnet':
break;
default:
throw new Error('UnrecognizedDestinationNetwork');
}
if (!details.tokens) {
throw new Error('ExpectedTokens');
}
return details;
})
.then(details => cbk(null, {
created_at: details.created_at,
destination_public_key: details.destination_public_key,
expires_at: details.expires_at,
fee: details.fee,
fee_fiat_value: details.fee_fiat_value || null,
fiat_currency_code: details.fiat_currency_code,
fiat_value: details.fiat_value || null,
id: details.id,
is_expired: details.is_expired,
network: details.network,
tokens: details.tokens,
}))
.catch(err => {
if (!!err.text) {
return err.text().then(text => cbk([err.status, text]));
}
return cbk([500, err.statusText]);
});
};
/** Get the status of a swap
{
invoice: <Invoice BOLT 11 String>
network: <Network Name String>
redeem_script: <Redeem Script String>
}
@returns via cbk
{
[conf_wait_count]: <Confirmations to Wait Number>
[payment_secret]: <Payment Secret Hex String>
[transaction_id]: <Transaction Id Hex String>
}
*/
App.getSwap = (args, cbk) => {
if (!args.invoice) {
return cbk([0, 'ExpectedInvoice']);
}
if (!args.redeem_script) {
return cbk([0, 'ExpectedRedeemScript']);
}
const post = {
invoice: args.invoice,
network: args.network,
redeem_script: args.redeem_script,
};
App.makeRequest({post, api: `swaps/check`})
.then(details => cbk(null, details))
.catch(err => cbk(err));
return;
};
/** Init App
*/
App.init = args => {
$('.create-swap-quote').submit(App.submitCreateSwapQuote);
$('.enter-refund-details').submit(App.submitRefundRecovery);
$('.new-swap').click(App.clickedNewSwap);
$('.pay-to-lightning-invoice').on(App.change_events, App.changedInvoice);
$('.refund-address').on(App.change_events, App.changedRefundAddress);
$('.sign-with-refund-details').submit(App.submitSignWithRefundDetails);
$('.refund-details-script').on(App.change_events, App.changedRefundScript);
$('.create-swap-quote .select-currency').change(App.changedCurrencySelection);
$('#use-paper-wallet').change(App.changedRefundPreference);
App.initActiveChains({}, err => !!err ? console.log(err) : null);
App.initExchangeRates({}, (err, res) => {
if (!!err) {
return console.log(err);
}
res.rates.forEach(({cents, fees, network}) => {
return App.rates[network] = {cents, fees};
});
App.updatedSwapDetails({swap: $('.create-swap-quote')});
return;
});
return;
};
/** Initialize active chains
{}
*/
App.initActiveChains = ({}, cbk) => {
App.makeRequest({api: 'networks/'})
.then(res => {
if (!res || !Array.isArray(res.networks)) {
throw new Error('ExpectedActiveNetworks');
}
$('.pay-to-lightning-invoice').prop('readonly', false);
$('.select-currency').prop('disabled', false);
res.networks.forEach(n => $(`.${n}-chain`).prop('hidden', false));
res.networks.forEach(network => {
let optionName;
const option = $('<option></option>');
option.addClass(`${network}-chain`);
option.attr('value', network);
switch (network) {
case 'bch':
optionName = 'Bcash';
break;
case 'bchtestnet':
optionName = 'Bcash Testnet';
break;
case 'bitcoin':
optionName = 'Bitcoin';
break;
case 'ltc':
optionName = 'Litecoin';
break;
case 'ltctestnet':
optionName = 'Litecoin Testnet'
break;
case 'testnet':
optionName = 'Bitcoin Testnet';
break;
default:
break;
}
if (!optionName) {
return;
}
option.text(optionName);
$('.create-swap-quote .select-currency').append(option);
return;
});
// Select a currency
if (!$('.select-currency').val()) {
['testnet', 'bitcoin']
.filter(n => res.networks.indexOf(n) !== -1)
.forEach(n => $('.select-currency').val(n));
}
App.initFromQueryParams({});
return cbk();
})
.catch(err => cbk(err));
return;
};
/** Initialize exchange rates
{}
@returns via cbk
{
rates: [{
cents: <Cents Per Token Number>
network: <Network Name String>
}]
}
*/
App.initExchangeRates = ({}, cbk) => {
App.makeRequest({api: 'exchange_rates/'})
.then(res => {
if (!res || !Array.isArray(res.rates)) {
throw new Error('ExpectedExchangeRates');
}
return res.rates;
})
.then(rates => cbk(null, {rates}))
.catch(err => cbk(err));
return;
};
/** Init state using query parameters
{}
*/
App.initFromQueryParams = ({}) => {
let invoice;
let network;
try {
invoice = new URL(location.href).searchParams.get('invoice');
network = new URL(location.href).searchParams.get('network');
if (!invoice) {
return;
}
history.replaceState({}, 'some title', '/');
} catch (err) {
return;
}
// Set the currency to use
$('.select-currency').val(network).trigger('change');
// Add in the LN invoice
$('.pay-to-lightning-invoice').val(invoice).trigger('change');
return;
};
/** Make a request
{
api: <API Path String>
[post]: <Post JSON Object>
}
@returns
<Fetch Promise Object>
*/
App.makeRequest = ({api, post}) => {
const body = !!post ? JSON.stringify(post) : null;
const headers = {'content-type': 'application/json'};
const method = !post ? 'GET' : 'POST';
return fetch(`/api/v0/${api}`, {body, headers, method})
.then(r => {
switch (r.status) {
case 200:
return Promise.resolve(r);
default:
return Promise.reject(new Error(r.statusText));
}
})
.then(r => r.json());
};
/** Present completed swap
{
invoice: <Bolt 11 Invoice String>
network: <Chain Network Name String>
payment_secret: <Payment Secret String>
presented_quote: <Presented Quote DOM Object>
swap_amount: <On-Chain Swap Amount Tokens Number>
swap_fee: <Swap Fee Tokens Number>
transaction_id: <Transaction Id String>
}
*/
App.presentCompletedSwap = args => {
if (!!App.check_for_swap) {
clearInterval(App.check_for_swap);
}
args.presented_quote.remove();
let href = '#';
let onChainCurrency = '';
const swap = $('.swap-success').clone();
swap.addClass('presented').removeClass('template');
switch (args.network) {
case 'bch':
href = `https://www.blocktrail.com/BCC/tx/${args.transaction_id}`;
onChainCurrency = 'BCH';
break;
case 'bchtestnet':
href = `https://www.blocktrail.com/tBCC/tx/${args.transaction_id}`;
onChainCurrency = 'tBCH';
break;
case 'bitcoin':
href = `https://smartbit.com.au/tx/${args.transaction_id}`;
onChainCurrency = 'BTC';
break;
case 'testnet':
href = `https://testnet.smartbit.com.au/tx/${args.transaction_id}`;
onChainCurrency = 'tBTC';
break;
case 'ltc':
href = `https://chain.so/tx/LTC/${args.transaction_id}`;
onChainCurrency = 'LTC';
break;
case 'ltctestnet':
href = `https://chain.so/tx/LTCTEST/${args.transaction_id}`;
onChainCurrency = 'tLTC';
break;
default:
console.log([0, 'UnexpectedNetworkForHref']);
break;
}
swap.find('.chain-amount').text(App.format({tokens: args.swap_amount}));
swap.find('.payment-secret').text(args.payment_secret);
swap.find('.send-network-code').text(onChainCurrency);
swap.find('.swap-date').text(new Intl.DateTimeFormat().format(new Date()));
swap.find('.swap-fee').text(App.format({tokens: args.swap_fee}));
swap.find('.transaction-id').text(args.transaction_id);
swap.find('.transaction-id').prop('href', href);
$('.quotes').prepend(swap);
swap.collapse('show');
App.showInvoice({swap, invoice: args.invoice});
$('.new-swap').removeClass('disabled');
return;
};
/** Get qr code
{
address: <Address String>
amount: <Amount String>
scheme: <Scheme String>
}
@returns
<QR Code Img Object>
*/
App.qrCode = ({address, amount, scheme}) => {
const addressLink = `${scheme}:${address}?amount=${amount}`;
const back = 'rgb(250, 250, 250)';
const rounded = 100;
const size = 300;
const qr = kjua({back, rounded, size, text: addressLink});
$(qr).css({height: 'auto', 'max-width': '160px', width: '100%'});
return qr;
};
/** Show invoice
{
invoice: <Lightning Invoice String>
swap: <Swap DOM Object>
}
*/
App.showInvoice = args => {
const cents = 100;
const details = args.swap.find('.invoice-details');
const invoice = App.invoice_details[args.invoice];
const currencyFormatter = new Intl.NumberFormat('en-US', {
currency: invoice.fiat_currency_code,
style: 'currency',
});
const fiat = invoice.fiat_value / cents;
const hasDestinationUrl = !!invoice.destination_url;
let symbolForFiat;
let symbolForNetwork;
switch (invoice.network) {
case 'bitcoin':
symbolForFiat = 'USD';
symbolForNetwork = 'Lightning BTC';
break;
case 'ltc':
symbolForFiat = 'USD';
symbolForNetwork = 'Lightning LTC';
break;
case 'testnet':
symbolForFiat = 'tUSD';
symbolForNetwork = 'Lightning tBTC';
break;
default:
symbolForNetwork = '';
symbolForNetwork = '';
break;
}
details.find('.destination-url').prop('href', invoice.destination_url);
details.find('.destination-url').text(invoice.destination_label);
details.find('.destination-url').prop('hidden', !hasDestinationUrl);
details.find('.payment-public-key').text(invoice.destination_public_key);
details.find('.payment-public-key').prop('hidden', !!hasDestinationUrl);
details.find('.fiat-currency-code').text(symbolForFiat);
details.find('.fiat-send-amount').text(currencyFormatter.format(fiat));
details.find('.send-amount').text(App.format({tokens: invoice.tokens}));
details.find('.send-currency-code').text(symbolForNetwork);
details.collapse('show');
return;
};
/** Submit the create swap quote form
*/
App.submitCreateSwapQuote = function(event) {
event.preventDefault();
const swap = $(this).closest('.create-swap-quote');
const addressInput = swap.find('.refund-address');
const invoiceInput = swap.find('.pay-to-lightning-invoice');
const isPaperWallet = !!swap.find('#use-paper-wallet').is(':checked');
const address = addressInput.val().trim();
const invoice = invoiceInput.val().trim();
const network = swap.find('.select-currency').val();
if (!App.invoice_details[invoice]) {
return;
}
if (!isPaperWallet && !App.address_details[address]) {
return;
}
if (!network) {
return console.log([500, 'ExpectedNetworkValue']);
}
swap.collapse('hide');
const quote = $('.swap-quote').clone();
quote.addClass('presented').removeClass('template');
quote.find('.check-swap').click(App.clickedCheckSwap);
quote.find('.delete-swap').click(App.clickedDeleteSwap);
quote.find('.show-payment').click(App.clickedShowSwap);
quote.find('.show-refund').click(App.clickedShowRefund);
$('.quotes').prepend(quote);
const refundKey = App.invoice_refund_keypairs[invoice];
const refund = !isPaperWallet ? address : refundKey.p2pkh_address;
return App.createSwap({invoice, network, refund}, (err, details) => {
if (!!err) {
return console.log('CREATE SWAP FAILURE', err);
}
swap.find('.invoice-details').collapse('hide');
addressInput.removeClass('is-valid').val('');
invoiceInput.removeClass('is-valid').val('');
App.swaps[details.payment_hash] = {
network,
destination_public_key: details.destination_public_key,
fee_tokens_per_vbyte: details.fee_tokens_per_vbyte,
invoice: details.invoice,
payment_hash: details.payment_hash,
redeem_script: details.redeem_script,
refund_address: details.refund_address,
refund_public_key_hash: details.refund_public_key_hash,
swap_amount: details.swap_amount,
swap_fee: details.swap_fee,
swap_key_index: details.swap_key_index,
swap_p2sh_address: details.swap_p2sh_address,
swap_p2sh_p2wsh_address: details.swap_p2sh_p2wsh_address,
swap_p2wsh_address: details.swap_p2wsh_address,
timeout_block_height: details.timeout_block_height,
};
const redeemInfoJsonSpacing = 2;
let swapAddress = details.swap_p2sh_p2wsh_address;
const swapAmount = App.format({tokens: details.swap_amount});
const qs = $.param({amount: swapAmount, message: details.redeem_script});
let scheme;
switch (network) {
case 'bch':
case 'bchtestnet':
scheme = 'bitcoincash';
swapAddress = details.swap_p2sh_address;
break;
case 'bitcoin':
case 'testnet':
scheme = 'bitcoin';
break;
case 'ltc':
case 'ltctestnet':
scheme = 'litecoin';
break;
default:
console.log([0, 'UnexpectedNetworkForScheme']);
}
const addr = `${scheme}:${swapAddress}?${qs}`;
const qrCode = App.qrCode({
scheme,
address: swapAddress,
amount: swapAmount,
});
quote.data({payment_hash: details.payment_hash});
quote.find('.chain-link').prop('href', addr);
quote.find('.qr-code').append(qrCode);
quote.find('.redeem-script').val(details.redeem_script);
quote.find('.swap-address').val(swapAddress);
quote.find('.swap-amount').val(swapAmount);
quote.find('.timeout-block-height').val(details.timeout_block_height);
quote.find('.save-redeem-script').click(e => {
const anchor = document.createElement('a');
const encoding = 'data:text/plain;charset=utf-8';
const txDetails = App.swaps[details.payment_hash] || {};
const refundData = {
network,
private_key: !isPaperWallet ? undefined : refundKey.private_key,
redeem_script: details.redeem_script,
refund_address: address,
refund_fee_tokens_per_vbyte: details.fee_tokens_per_vbyte,
refund_after: details.timeout_block_height,
swap_address: swapAddress,
swap_amount: swapAmount,
swap_quote_received_at: new Date().toISOString(),
transaction_id: txDetails.transaction_id,
transaction_output_index: txDetails.transaction_output_index,
};
const text = JSON.stringify(
refundData,
null,
redeemInfoJsonSpacing
);
anchor.setAttribute('download', `${swapAddress}.redeem_script.json`);
anchor.setAttribute('href', `${encoding},${encodeURIComponent(text)}`);
if (!!document.createEvent) {
const event = document.createEvent('MouseEvents');
event.initEvent('click', true, true);
anchor.dispatchEvent(event);
} else {
anchor.click();
}
quote.find('.make-payment').collapse('show');
quote.find('.chain-link').removeClass('disabled');
return;
});
const invoiceDetails = App.invoice_details[invoice];
App.showInvoice({invoice, swap: quote});
quote.collapse('show');
App.check_for_swap = setInterval(() => {
return App.checkSwap({quote, id: details.payment_hash});
},
App.check_for_swap_interval_ms);
return;
});
};
/** Submit refund details derivation
*/
App.submitRefundRecovery = function(event) {
event.preventDefault();
$('.claimed-balance').removeClass('show').addClass('hide');
$('.no-balance').removeClass('show').addClass('hide');
$('.refund-details-not-found').collapse('hide');
$('.refund-tx-failure').collapse('hide');
const clearFields = [
'.refund-key',
'.sign-with-refund-details input',
'.sign-with-refund-details textarea',
'.signed-refund-transaction',
];
clearFields.forEach(n => $(n).val(''));
const refundDetails = ($('.refund-details').val() || '').trim();
let details;
try {
details = JSON.parse(refundDetails);
} catch (err) {
return console.log([400, 'FailedToParseRefundDetails']);
}
if (!!details.network) {
$('.select-currency').val(details.network);
}
if (!!details.redeem_script) {
$('.refund-details-script').val(details.redeem_script);
$('.refund-details-script').trigger('change');
}
if (!!details.swap_address) {
$('.select-swap-address').val(details.swap_address);
const address = details.swap_address;
switch (details.network) {
case 'bch':
case 'bchtestnet':
const bchNet = details.network === 'bch' ? 'bch' : 'test-bch';
fetch(`https://${bchNet}-insight.bitpay.com/api/addrs/${address}/utxo`)
.then(r => r.json())
.then(transactions => {
if (!Array.isArray(transactions) || !transactions.length) {
return $('.no-balance').collapse('show');
}
const [tx] = transactions;
if (!!tx && !!tx.txid && !$('.refund-transaction-id').val()) {
$('.refund-transaction-id').val(tx.txid);
}
if (!!tx && tx.vout !== undefined && !$('.refund-tx-vout').val()) {
$('.refund-tx-vout').val(tx.vout);
}
return;
})
.catch(err => {
console.log([503, 'FailedToFetchAddressDetails']);
return;
});
break;
case 'bitcoin':
case 'testnet':
const bNet = details.network === 'bitcoin' ? 'main' : 'test3';
fetch(`https://api.blockcypher.com/v1/btc/${bNet}/addrs/${address}/full`)
.then(r => r.json())
.then(details => {
if (!details || !Array.isArray(details.txs) || !details.txs.length) {
return $('.no-balance').collapse('show');
}
if (!details.balance) {
return $('.claimed-balance').collapse('show');
}
let payouts = {};
details.txs.forEach(({hash, outputs}) => {
return outputs.forEach(({addresses}, vout) => {
if (!Array.isArray(addresses)) {
return;
}
return addresses.forEach(addr => payouts[addr] = {hash, vout});
});
});
const tx = payouts[address];
if (!!tx && !!tx.hash && !$('.refund-transaction-id').val()) {
$('.refund-transaction-id').val(tx.hash);
}
if (!!tx && tx.vout !== undefined && !$('.refund-tx-vout').val()) {
$('.refund-tx-vout').val(tx.vout);
}
return;
})
.catch(err => {
console.log([503, 'FailedToFetchAddressDetails', err]);
return;
});
break;
case 'ltc':
case 'ltctestnet':
const lNet = details.network === 'ltc' ? 'LTC' : 'LTCTEST';
fetch(`https://chain.so/api/v2/get_tx_received/${lNet}/${address}`)
.then(r => r.json())
.then(details => {
if (!details || !details.data || !Array.isArray(details.data.txs)) {
return;
}
if (!details.data.txs.length) {
return;
}
const [tx] = details.data.txs;
if (!tx) {
return;
}
if (!!tx.txid && !$('.refund-transaction-id').val()) {
$('.refund-transaction-id').val(tx.txid);
}
if (tx.output_no !== undefined && !$('.refund-tx-vout').val()) {
$('.refund-tx-vout').val(tx.output_no);
}
return;
})
.catch(err => {
console.log([503, 'FailedToFetchAddressDetails']);
return;
});
default:
break;
}
}
if (!!details.transaction_id) {
$('.refund-transaction-id').val(details.transaction_id);
}
if (details.transaction_output_index !== undefined) {
$('.refund-tx-vout').val(details.transaction_output_index);
}
if (!!details.swap_amount) {
$('.tokens-total').val(details.swap_amount);
}
if (!!details.refund_fee_tokens_per_vbyte) {
$('.refund-fee').val(details.refund_fee_tokens_per_vbyte);
} else {
$('.refund-fee').val(1);
}
if (!!details.private_key) {
$('.refund-key').val(details.private_key);
}
if (!!details.refund_address) {
$('.refund-address').val(details.refund_address);
}
$('#tx-details-refund-tab').tab('show');
$('.generic.refund-tx-failure').collapse('hide');
$('.refund-tx-success').collapse('hide');
return;
};
/** Submit sign refund transaction with details form
*/
App.submitSignWithRefundDetails = function(e) {
e.preventDefault();
const redeemScript = $(this).find('.refund-details-script').val().trim();
if (!redeemScript) {
return console.log([0, 'ExpectedRedeemScript']);
}
let swapDetails;
try {
swapDetails = blockchain.swapScriptDetails({
network: $('.select-currency').val(),
script: redeemScript,
});
} catch (err) {
return console.log([0, 'FailedToDeriveSwapDetails'], err);
}
const network = $('.select-currency').val();
const refundAddress = $('.refund-address').val().trim();
const refundFee = parseInt($('.refund-fee').val().trim(), 10);
const refundKey = $('.refund-key').val().trim();
const refundAmount = $('.tokens-total').val().trim();
const refundTxId = $('.refund-transaction-id').val().trim();
const refundTxVout = parseInt($('.refund-tx-vout').val().trim(), 10);
const swapAddress = $('.select-swap-address').val();
if (!refundKey) {
$('.signed-refund-transaction').val('');
$('.generic.refund-tx-failure').collapse('show');
return;
}
const refundTokens = parseInt(
(parseFloat(refundAmount, 10) * 1e8).toFixed(),
10
);
let refund;
try {
refund = blockchain.refundTransaction({
network,
destination: refundAddress,
fee_tokens_per_vbyte: refundFee,
is_public_key_hash_refund: true,
private_key: refundKey,
timelock_block_height: swapDetails.timelock_block_height,
utxos: [{
redeem: redeemScript,
script: $('.select-swap-address').data()[swapAddress],
tokens: refundTokens,
transaction_id: refundTxId,
vout: refundTxVout,
}],
});
} catch (err) {
$('.signed-refund-transaction').val('');
console.log([400, 'ERROR', err]);
switch (e.message) {
case 'RefundValueTooSmall':
$('.output-too-small.refund-tx-failure').collapse('show');
break;
default:
$('.generic.refund-tx-failure').collapse('show');
break;
}
return;
}
$('.refund-tx-failure').collapse('hide');
$('.signed-refund-transaction').val(refund.transaction);
$('.refund-tx-success').collapse('hide');
App.makeRequest({
post: {network, transaction: refund.transaction},
api: 'transactions/',
})
.then(details => {
if (!details.id) {
return;
}
let txUrl;
switch (network) {
case 'bch':
txUrl = `https://www.blocktrail.com/BCC/tx/${details.id}`;
break;
case 'bchtestnet':
txUrl = `https://www.blocktrail.com/tBCC/tx/${details.id}`;
break;
case 'bitcoin':
txUrl = `https://smartbit.com.au/tx/${details.id}`;
break;
case 'testnet':
txUrl = `https://testnet.smartbit.com.au/tx/${details.id}`;
break;
case 'ltc':
txUrl = `https://chain.so/tx/LTC/${details.id}`;
break;
case 'ltctestnet':
txUrl = `https://chain.so/tx/LTCTEST/${details.id}`;
break;
}
$('.refund-tx-success').collapse('show');
$('.refund-tx-success .refund-address').text(refundAddress);
$('.refund-tx-success .link-to-refund').text(details.id);
$('.refund-tx-success .link-to-refund').prop('href', txUrl);
return;
})
.catch(err => {
switch (err.message) {
case 'ChainHeightNotReached':
$('.height-not-reached.refund-tx-failure').collapse('show');
break;
default:
console.log([503, 'FailedToBroadcastTransaction', err]);
break;
}
return;
});
return;
};
/** Update the swap details
{
swap: <Swap DOM Object>
}
*/
App.updatedSwapDetails = ({swap}) => {
if (!swap.find('.pay-to-lightning-invoice').length) {
return;
}
const invoice = swap.find('.pay-to-lightning-invoice').val().trim();
const network = swap.find('.select-currency').val();
if (!invoice.length || !network) {
return;
}
const address = swap.find('.refund-address').val().trim();
const hasAddress = !!App.address_details[address];
const hasInvoiceDetails = !!App.invoice_details[invoice];
const keyPair = blockchain.generateKeyPair({network});
if (!!hasInvoiceDetails && !!swap.find('.refund-address-entry.hide')) {
swap.find('.refund-address-entry').collapse('show');
}
const isPaperRefund = !!swap.find('#use-paper-wallet').is(':checked');
if (!!hasAddress && !isPaperRefund) {
swap.find('.refund-address')
.addClass('is-valid')
.removeClass('is-invalid');
}
swap.find('.pay-to-lightning-invoice')
.toggleClass('is-valid', !!hasInvoiceDetails)
.toggleClass('is-invalid', !hasInvoiceDetails && !!invoice);
const isReady = (!!hasAddress || !!isPaperRefund) && !!hasInvoiceDetails;
App.invoice_refund_keypairs[invoice] = keyPair;
let baseFee = 0;
let feePercentage = '';
let fiatPrice = '';
let networkAddressName = '';
let totalFee = 0;
let conversionRate = 1;
switch (network) {
case 'bch':
if (!App.rates['bch']) {
break;
}
baseFee = App.rates['bch'].fees[0].base;
conversionRate = App.rates['bitcoin'].cents / App.rates['bch'].cents;
feePercentage = App.rates['bch'].fees[0].rate / 1e6 * 100;
fiatPrice = (App.rates['bch'].cents) * 1e8 / 100;
networkAddressName = 'Bcash';
break;
case 'bchtestnet':
if (!App.rates['bchtestnet']) {
break;
}
baseFee = App.rates['bchtestnet'].fees[0].base;
conversionRate = App.rates['testnet'].cents / App.rates['bchtestnet'].cents;
feePercentage = App.rates['bchtestnet'].fees[0].rate / 1e6 * 100;
fiatPrice = (App.rates['bchtestnet'].cents) * 1e8 / 100;
networkAddressName = 'Bcash testnet';
break;
case 'bitcoin':
if (!App.rates['bitcoin']) {
break;
}
baseFee = App.rates['bitcoin'].fees[0].base;
feePercentage = App.rates['bitcoin'].fees[0].rate / 1e6 * 100;
fiatPrice = (App.rates['bitcoin'].cents) * 1e8 / 100;
networkAddressName = 'Bitcoin';
break;
case 'ltc':
if (!App.rates['ltc']) {
break;
}
baseFee = App.rates['ltc'].fees[0].base;
conversionRate = App.rates['bitcoin'].cents / App.rates['ltc'].cents;
feePercentage = App.rates['ltc'].fees[0].rate / 1e6 * 100;
fiatPrice = (App.rates['ltc'].cents) * 1e8 / 100;
networkAddressName = 'Litecoin';
break;
case 'ltctestnet':
if (!App.rates['ltctestnet']) {
break;
}
baseFee = App.rates['ltctestnet'].fees[0].base;
conversionRate = App.rates['testnet'].cents / App.rates['ltctestnet'].cents;
feePercentage = App.rates['ltctestnet'].fees[0].rate / 1e6 * 100;
fiatPrice = (App.rates['ltctestnet'].cents) * 1e8 / 100;
networkAddressName = 'Litecoin testnet';
break;
case 'testnet':
if (!App.rates['testnet']) {
break;
}
baseFee = App.rates['testnet'].fees[0].base;
feePercentage = App.rates['testnet'].fees[0].rate / 1e6 * 100;
fiatPrice = (App.rates['testnet'].cents) * 1e8 / 100;
networkAddressName = 'Bitcoin testnet';
break;
default:
return console.log([0, 'UnexpectedNetworkName']);
break;
}
if (!!App.invoice_details[invoice]) {
const {tokens} = App.invoice_details[invoice];
totalFee = App.invoice_details[invoice].fee_fiat_value / 100;
}
swap.find('.address-currency-label').text(networkAddressName);
swap.find('.fee-percentage').text(feePercentage.toFixed(2));
swap.find('.fiat-fee-total').text(totalFee.toFixed(2));
swap.find('.final-fee').prop('hidden', !totalFee);
swap.find('.make').toggleClass('disabled', !isReady);
if (!!fiatPrice) {
swap.find('.current-fiat-price').text(fiatPrice.toFixed(2));
}
return;
};
/** Update invoice details
{
swap: <Create Swap Quote Swap DOM Object>
}
*/
App.updateInvoiceDetails = ({swap}) => {
const input = swap.find('.pay-to-lightning-invoice');
const detailsDisplay = swap.find('.invoice-details');
const invoice = input.val().trim();
const network = swap.find('.select-currency').val();
const quoteFor = [network, invoice].join('/');
// Exit early when the invoice has not changed
if (swap.data().quote_for === quoteFor) {
return;
}
swap.data({invoice, quote_for: quoteFor});
detailsDisplay.collapse('hide');
$('.has-invoice-problem').prop('hidden', true);
// Exit early when the invoice has been removed
if (!invoice) {
App.updatedSwapDetails({swap});
$('.looking-up-invoice').prop('hidden', true);
$('.not-looking-up-invoice').prop('hidden', false);
$('.has-invoice-problem').prop('hidden', true);
return input.removeClass('is-invalid').removeClass('is-valid');
}
$('.looking-up-invoice').prop('hidden', false);
$('.not-looking-up-invoice').prop('hidden', true);
return App.getInvoiceDetails({invoice, network}, (err, details) => {
// Exit early when input has changed while the fetch was happening.
if (input.val().trim() !== invoice) {
return;
}
$('.looking-up-invoice').prop('hidden', true);
$('.not-looking-up-invoice').prop('hidden', false);
$('.has-invoice-problem').prop('hidden', true);
if (!!err) {
const [errCode, errMessage] = err;
detailsDisplay.collapse('hide');
$('.has-invoice-problem').prop('hidden', false);
$('.not-looking-up-invoice').prop('hidden', true);
input.addClass('is-invalid');
let text;
switch (errMessage) {
case 'ExpectedBaseFeeRate':
text = 'Using this currency to pay this invoice is not yet supported.';
break;
case 'ChainFeesTooHighToSwap':
text = 'Value too low for a chain swap. Use a higher value invoice?';
break;
case 'DecodeInvoiceFailure':
text = 'Couldn\'t read this invoice. Try a different one?';
break;
case 'Failed to fetch':
text = `Couldn\'t connect to swap server. Try again?`;
break;
case 'FoundExistingFundingForInvoice':
text = 'Existing swap found for this invoice. Try a new invoice?';
break;
case 'InsufficientCapacityForSwap':
text = 'No route found to execute this swap. Try a smaller amount?';
break;
case 'InvoiceExpiresTooSoon':
text = 'This invoice expires too soon, get a fresh invoice?';
break;
case 'NoCapacityToDestination':
text = 'Can\'t send to this destination, establishing connectivity...';
break;
case 'PendingChannelToDestination':
text = 'Channel to destination is still opening, try again later...';
break;
default:
console.log('ERR', err);
text = 'Unexpected error :( try again or with a different invoice?';
break;
}
swap.find('.invoice-issue').text(text);
return;
}
App.invoice_details[invoice] = details;
App.showInvoice({invoice, swap});
App.updatedSwapDetails({swap});
return;
});
};
$(() => App.init({}));
|
<filename>modules/caas/backend/src/main/java/io/cattle/platform/process/secret/SecretRemove.java
package io.cattle.platform.process.secret;
import io.cattle.platform.core.model.Secret;
import io.cattle.platform.engine.handler.HandlerResult;
import io.cattle.platform.engine.handler.ProcessHandler;
import io.cattle.platform.engine.process.ProcessInstance;
import io.cattle.platform.engine.process.ProcessState;
import io.cattle.platform.framework.secret.SecretsService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
public class SecretRemove implements ProcessHandler {
private static final Logger log = LoggerFactory.getLogger(SecretRemove.class);
SecretsService secretsService;
public SecretRemove(SecretsService secretsService) {
this.secretsService = secretsService;
}
@Override
public HandlerResult handle(ProcessState state, ProcessInstance process) {
Secret secret = (Secret)state.getResource();
try {
secretsService.delete(secret.getValue());
} catch (IOException e) {
log.error("Failed to delete secret from storage [{}]",
secret.getId(), e);
throw new IllegalStateException(e);
}
return null;
}
} |
<gh_stars>1-10
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const parser_1 = require("@babel/parser");
const babel_walk_1 = require("babel-walk");
const t = __importStar(require("@babel/types"));
const globals_1 = __importDefault(require("./globals"));
const parseOptions = {
allowReturnOutsideFunction: true,
allowImportExportEverywhere: true,
};
/**
* Mimic `with` as far as possible but at compile time
*
* @param obj The object part of a with expression
* @param src The body of the with expression
* @param exclude A list of variable names to explicitly exclude
*/
function addWith(obj, src, exclude = []) {
// tslint:disable-next-line: no-parameter-reassignment
obj = obj + '';
// tslint:disable-next-line: no-parameter-reassignment
src = src + '';
let ast;
try {
ast = parser_1.parse(src, parseOptions);
}
catch (e) {
throw Object.assign(new Error('Error parsing body of the with expression'), {
component: 'src',
babylonError: e,
});
}
let objAst;
try {
objAst = parser_1.parse(obj, parseOptions);
}
catch (e) {
throw Object.assign(new Error('Error parsing object part of the with expression'), {
component: 'obj',
babylonError: e,
});
}
const excludeSet = new Set([
'undefined',
'this',
...exclude,
...globals_1.default(objAst).map((g) => g.name),
]);
const vars = new Set(globals_1.default(ast)
.map((global) => global.name)
.filter((v) => !excludeSet.has(v)));
if (vars.size === 0)
return src;
let declareLocal = '';
let local = 'locals_for_with';
let result = 'result_of_with';
if (t.isValidIdentifier(obj)) {
local = obj;
}
else {
while (vars.has(local) || excludeSet.has(local)) {
local += '_';
}
declareLocal = `var ${local} = (${obj});`;
}
while (vars.has(result) || excludeSet.has(result)) {
result += '_';
}
const args = [
'this',
...Array.from(vars).map((v) => `${JSON.stringify(v)} in ${local} ?
${local}.${v} :
typeof ${v} !== 'undefined' ? ${v} : undefined`),
];
const unwrapped = unwrapReturns(ast, src, result);
return `;
${declareLocal}
${unwrapped.before}
(function (${Array.from(vars).join(', ')}) {
${unwrapped.body}
}.call(${args.join(', ')}));
${unwrapped.after};`;
}
exports.default = addWith;
const unwrapReturnsVisitors = babel_walk_1.recursive({
Function(_node, _state, _c) {
// returns in these functions are not applicable
},
ReturnStatement(node, state) {
state.hasReturn = true;
let value = '';
if (node.argument) {
value = `value: (${state.source(node.argument)})`;
}
state.replace(node, `return {${value}};`);
},
});
/**
* Take a self calling function, and unwrap it such that return inside the function
* results in return outside the function
*
* @param src Some JavaScript code representing a self-calling function
* @param result A temporary variable to store the result in
*/
function unwrapReturns(ast, src, result) {
const charArray = src.split('');
const state = {
hasReturn: false,
source(node) {
return src.slice(node.start, node.end);
},
replace(node, str) {
charArray.fill('', node.start, node.end);
charArray[node.start] = str;
},
};
unwrapReturnsVisitors(ast, state);
return {
before: state.hasReturn ? `var ${result} = ` : '',
body: charArray.join(''),
after: state.hasReturn ? `;if (${result}) return ${result}.value` : '',
};
}
module.exports = addWith;
module.exports.default = addWith;
//# sourceMappingURL=index.js.map |
<reponame>nokok/Draft
import junit.framework.Test;
import junit.framework.TestCase;
import net.nokok.draft.Injector;
import net.nokok.testdata.JSRModule;
import org.atinject.tck.Tck;
import org.atinject.tck.auto.Car;
public class JSR330Tck extends TestCase {
public static Test suite() {
Car car = Injector.fromModule(JSRModule.class).getInstance(Car.class);
return Tck.testsFor(car, false, true);
}
}
|
<reponame>johannalee/react-vtree
import {mount, ReactWrapper} from 'enzyme';
import * as React from 'react';
import {FixedSizeList} from 'react-window';
import {Row} from '../src';
import FixedSizeTree, {
FixedSizeNodeComponentProps,
FixedSizeNodeData,
FixedSizeTreeProps,
FixedSizeTreeState,
} from '../src/FixedSizeTree';
type DataNode = {
children?: DataNode[];
id: string;
name: string;
};
type StackElement = {
nestingLevel: number;
node: DataNode;
};
type ExtendedData = {
readonly name: string;
readonly nestingLevel: number;
};
describe('FixedSizeTree', () => {
const Node: React.FunctionComponent<FixedSizeNodeComponentProps<
ExtendedData
>> = () => null;
let component: ReactWrapper<
FixedSizeTreeProps<ExtendedData>,
FixedSizeTreeState<ExtendedData>,
FixedSizeTree<ExtendedData>
>;
let tree: DataNode;
let treeWalkerSpy: jest.Mock;
let isOpenByDefault: boolean;
function* treeWalker(
refresh: boolean,
): Generator<
FixedSizeNodeData<ExtendedData> | string | symbol,
void,
boolean
> {
const stack: StackElement[] = [];
stack.push({
nestingLevel: 0,
node: tree,
});
while (stack.length !== 0) {
const {node, nestingLevel} = stack.pop()!;
const id = node.id.toString();
const childrenCount = node.children ? node.children.length : 0;
const isOpened = yield refresh
? {
id,
isOpenByDefault,
name: node.name,
nestingLevel,
}
: id;
if (childrenCount && isOpened) {
// tslint:disable-next-line:increment-decrement
for (let i = childrenCount - 1; i >= 0; i--) {
stack.push({
nestingLevel: nestingLevel + 1,
node: node.children![i],
});
}
}
}
}
beforeEach(() => {
tree = {
children: [
{id: 'foo-2', name: 'Foo #2'},
{id: 'foo-3', name: 'Foo #3'},
],
id: 'foo-1',
name: 'Foo #1',
};
isOpenByDefault = true;
treeWalkerSpy = jest.fn(treeWalker);
component = mount(
<FixedSizeTree<ExtendedData>
itemSize={30}
treeWalker={treeWalkerSpy}
height={500}
width={500}
>
{Node}
</FixedSizeTree>,
);
});
it('renders a component', () => {
expect(component).not.toBeUndefined();
});
it('contains a FixedSizeList component', () => {
const list = component.find(FixedSizeList);
expect(list).toHaveLength(1);
expect(list.props()).toMatchObject({
children: Row,
itemCount: 3,
itemData: {
component: Node,
order: ['foo-1', 'foo-2', 'foo-3'],
records: {
'foo-1': {
data: {
id: 'foo-1',
isOpenByDefault: true,
name: 'Foo #1',
nestingLevel: 0,
},
isOpen: true,
toggle: expect.any(Function),
},
'foo-2': {
data: {
id: 'foo-2',
isOpenByDefault: true,
name: 'Foo #2',
nestingLevel: 1,
},
isOpen: true,
toggle: expect.any(Function),
},
'foo-3': {
data: {
id: 'foo-3',
isOpenByDefault: true,
name: 'Foo #3',
nestingLevel: 1,
},
isOpen: true,
toggle: expect.any(Function),
},
},
},
itemSize: 30,
});
});
it('allows providing custom row component', () => {
const rowComponent = () => null;
component = mount(
<FixedSizeTree {...component.props()} rowComponent={rowComponent} />,
);
expect(component.find(FixedSizeList).prop('children')).toBe(rowComponent);
});
it('recomputes on new treeWalker', () => {
treeWalkerSpy = jest.fn(treeWalker);
component.setProps({
treeWalker: treeWalkerSpy,
});
expect(treeWalkerSpy).toHaveBeenCalledWith(true);
});
it('does not recompute if treeWalker is the same', () => {
treeWalkerSpy.mockClear();
component.setProps({
treeWalker: treeWalkerSpy,
});
expect(treeWalkerSpy).not.toHaveBeenCalled();
});
describe('component instance', () => {
let treeInstance: FixedSizeTree<ExtendedData>;
beforeEach(() => {
treeInstance = component.instance();
});
describe('scrollTo', () => {
let listInstance: FixedSizeList;
beforeEach(() => {
listInstance = component
.find(FixedSizeList)
.instance() as FixedSizeList;
});
it('can scroll to a specific offset', () => {
const scrollToSpy = spyOn(listInstance, 'scrollTo');
treeInstance.scrollTo(200);
expect(scrollToSpy).toHaveBeenCalledWith(200);
});
it('can scroll to an item', () => {
const scrollToItemSpy = spyOn(listInstance, 'scrollToItem');
treeInstance.scrollToItem('foo-3', 'auto');
expect(scrollToItemSpy).toHaveBeenCalledWith(2, 'auto');
});
});
describe('recomputeTree', () => {
it('updates tree order', async () => {
tree = {
children: [
{id: 'foo-3', name: 'Foo #3'},
{id: 'foo-2', name: 'Foo #2'},
],
id: 'foo-1',
name: 'Foo #1',
};
await treeInstance.recomputeTree();
component.update(); // update the wrapper to get the latest changes
expect(component.find(FixedSizeList).prop('itemData')).toMatchObject({
component: Node,
order: ['foo-1', 'foo-3', 'foo-2'],
records: {
'foo-1': {
data: {
id: 'foo-1',
isOpenByDefault: true,
name: 'Foo #1',
nestingLevel: 0,
},
isOpen: true,
toggle: expect.any(Function),
},
'foo-2': {
data: {
id: 'foo-2',
isOpenByDefault: true,
name: 'Foo #2',
nestingLevel: 1,
},
isOpen: true,
toggle: expect.any(Function),
},
'foo-3': {
data: {
id: 'foo-3',
isOpenByDefault: true,
name: 'Foo #3',
nestingLevel: 1,
},
isOpen: true,
toggle: expect.any(Function),
},
},
treeData: undefined,
});
});
it('updates tree nodes metadata', async () => {
tree = {
children: [
{id: 'foo-3', name: 'Foo #3 Bar'},
{id: 'foo-2', name: 'Foo #2 Bar'},
],
id: 'foo-1',
name: 'Foo #1 Bar',
};
await treeInstance.recomputeTree({refreshNodes: true});
component.update(); // update the wrapper to get the latest changes
expect(component.find(FixedSizeList).prop('itemData')).toMatchObject({
component: Node,
order: ['foo-1', 'foo-3', 'foo-2'],
records: {
'foo-1': {
data: {
id: 'foo-1',
isOpenByDefault: true,
name: 'Foo #1 Bar',
nestingLevel: 0,
},
isOpen: true,
toggle: expect.any(Function),
},
'foo-2': {
data: {
id: 'foo-2',
isOpenByDefault: true,
name: 'Foo #2 Bar',
nestingLevel: 1,
},
isOpen: true,
toggle: expect.any(Function),
},
'foo-3': {
data: {
id: 'foo-3',
isOpenByDefault: true,
name: 'Foo #3 Bar',
nestingLevel: 1,
},
isOpen: true,
toggle: expect.any(Function),
},
},
treeData: undefined,
});
});
it('resets current openness to default', async () => {
const records = component.state('records');
for (const id in records) {
records[id].isOpen = false;
}
// Imitate closing the foo-1 node
component.setState({
order: ['foo-1'],
records,
});
await treeInstance.recomputeTree({useDefaultOpenness: true});
component.update(); // update the wrapper to get the latest changes
// foo-1 node is open again
expect(component.find(FixedSizeList).prop('itemData')).toMatchObject({
component: Node,
order: ['foo-1', 'foo-2', 'foo-3'],
records: {
'foo-1': {
data: {
id: 'foo-1',
isOpenByDefault: true,
name: 'Foo #1',
nestingLevel: 0,
},
isOpen: true,
toggle: expect.any(Function),
},
'foo-2': {
data: {
id: 'foo-2',
isOpenByDefault: true,
name: 'Foo #2',
nestingLevel: 1,
},
isOpen: true,
toggle: expect.any(Function),
},
'foo-3': {
data: {
id: 'foo-3',
isOpenByDefault: true,
name: 'Foo #3',
nestingLevel: 1,
},
isOpen: true,
toggle: expect.any(Function),
},
},
treeData: undefined,
});
});
it('resets current openness to the new default provided by the node refreshing', async () => {
isOpenByDefault = false;
await treeInstance.recomputeTree({
refreshNodes: true,
useDefaultOpenness: true,
});
component.update(); // update the wrapper to get the latest changes
expect(component.find(FixedSizeList).prop('itemData')).toMatchObject({
component: Node,
order: ['foo-1'],
records: {
'foo-1': {
data: {
id: 'foo-1',
isOpenByDefault: false,
name: 'Foo #1',
nestingLevel: 0,
},
isOpen: false,
toggle: expect.any(Function),
},
// Child nodes of the closed one are omitted
'foo-2': {
data: {
id: 'foo-2',
isOpenByDefault: true,
name: 'Foo #2',
nestingLevel: 1,
},
isOpen: true,
toggle: expect.any(Function),
},
'foo-3': {
data: {
id: 'foo-3',
isOpenByDefault: true,
name: '<NAME>',
nestingLevel: 1,
},
isOpen: true,
toggle: expect.any(Function),
},
},
treeData: undefined,
});
});
it('provides a toggle function that changes openness state of the specific node', async () => {
const foo1 = component.state('records')['foo-1'];
treeWalkerSpy.mockClear();
// Imitate the behavior of Node component where toggle is sent without
// context
const {toggle} = foo1;
await toggle();
expect(treeWalkerSpy).toHaveBeenCalledWith(false);
expect(foo1.isOpen).toBeFalsy();
});
});
});
});
|
package io.opensphere.mantle.plugin.queryregion.impl;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathExpressionException;
import org.easymock.EasyMock;
import org.easymock.IArgumentMatcher;
import org.junit.Assert;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import io.opensphere.core.datafilter.DataFilter;
import io.opensphere.core.geometry.PolygonGeometry;
import io.opensphere.core.geometry.constraint.Constraints;
import io.opensphere.core.geometry.renderproperties.DefaultPolygonRenderProperties;
import io.opensphere.core.geometry.renderproperties.PolygonRenderProperties;
import io.opensphere.core.geometry.renderproperties.ZOrderRenderProperties;
import io.opensphere.core.model.Altitude.ReferenceLevel;
import io.opensphere.core.model.GeographicPosition;
import io.opensphere.core.model.GeographicPositionArrayList;
import io.opensphere.core.model.time.TimeSpan;
import io.opensphere.core.modulestate.ModuleStateController;
import io.opensphere.core.modulestate.StateXML;
import io.opensphere.core.units.duration.Days;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.collections.petrifyable.PetrifyableTDoubleArrayList;
import io.opensphere.core.util.xml.MutableNamespaceContext;
import io.opensphere.mantle.plugin.queryregion.QueryRegion;
import io.opensphere.mantle.plugin.queryregion.QueryRegionManager;
/** Tests for {@link QueryRegionStateController}. */
public class QueryRegionStateControllerTest
{
/**
* Test for
* {@link QueryRegionStateController#activateState(String, String, Collection, Node)}
* and {@link QueryRegionStateController#deactivateState(String, Node)}.
*
* @throws ParserConfigurationException If the test fails.
*/
@Test
public void testActivateDeactivate() throws ParserConfigurationException
{
double[] vertices = getTestVertices1();
String coordinates = getTestKmlCoordinates(vertices);
Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().newDocument();
Node queryAreasNode = doc.appendChild(StateXML.createElement(doc, ModuleStateController.STATE_NAME))
.appendChild(StateXML.createElement(doc, "queryAreas"));
Node queryAreaNode = queryAreasNode.appendChild(StateXML.createElement(doc, "queryArea"));
queryAreaNode.appendChild(createKmlElement(doc, "Polygon")).appendChild(createKmlElement(doc, "outerBoundaryIs"))
.appendChild(createKmlElement(doc, "LinearRing")).appendChild(createKmlElement(doc, "coordinates"))
.setTextContent(coordinates);
final String layer1 = "layer1";
queryAreaNode.appendChild(StateXML.createElement(doc, "layer")).setTextContent(layer1);
final String layer2 = "layer2";
queryAreaNode.appendChild(StateXML.createElement(doc, "layer")).setTextContent(layer2);
Collection<GeographicPosition> positions = getTestPositions(vertices);
Collection<? extends TimeSpan> validTimes = Collections.singleton(TimeSpan.TIMELESS);
QueryRegionManager queryRegionManager = EasyMock.createMock(QueryRegionManager.class);
QueryRegion region = EasyMock.createMock(QueryRegion.class);
Map<String, DataFilter> dataTypeToFilterMap = New.map();
Arrays.asList(layer1 + "!!stateId", layer2 + "!!stateId").stream().forEach(k -> dataTypeToFilterMap.put(k, null));
EasyMock.expect(
queryRegionManager.addQueryRegion(matches(positions), EasyMock.eq(validTimes), EasyMock.eq(dataTypeToFilterMap)))
.andReturn(region);
EasyMock.replay(queryRegionManager, region);
QueryRegionStateController controller = new QueryRegionStateController(queryRegionManager, null);
String stateId = "stateId";
controller.activateState(stateId, null, null, doc.getDocumentElement());
EasyMock.verify(queryRegionManager, region);
EasyMock.reset(queryRegionManager);
queryRegionManager.removeQueryRegions(Collections.singletonList(region));
EasyMock.replay(queryRegionManager);
controller.deactivateState(stateId, queryAreaNode);
EasyMock.verify(queryRegionManager, region);
}
/**
* Test for {@link QueryRegionStateController#canActivateState(Node)}.
*
* @throws ParserConfigurationException If the test fails.
* @throws XPathExpressionException If the test fails.
*/
@Test
public void testCanActivateState() throws XPathExpressionException, ParserConfigurationException
{
Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().newDocument();
QueryRegionStateController controller = new QueryRegionStateController(null, null);
Assert.assertFalse(controller.canActivateState(doc));
doc.appendChild(StateXML.createElement(doc, ModuleStateController.STATE_NAME))
.appendChild(StateXML.createElement(doc, "queryAreas"));
Assert.assertTrue(controller.canActivateState(doc));
}
/**
* Test for {@link QueryRegionStateController#saveState(Node)}.
*
* @throws ParserConfigurationException If the test fails.
* @throws XPathExpressionException If the test fails.
*/
@Test
public void testSaveState() throws ParserConfigurationException, XPathExpressionException
{
PolygonGeometry.Builder<GeographicPosition> builder = new PolygonGeometry.Builder<>();
double[] vertices1 = getTestVertices1();
double[] vertices2 = getTestVertices2();
PolygonRenderProperties renderProperties = new DefaultPolygonRenderProperties(ZOrderRenderProperties.TOP_Z, true, true);
builder.setVertices(getTestPositions(vertices1));
PolygonGeometry geom1 = new PolygonGeometry(builder, renderProperties, (Constraints)null);
QueryRegion region1 = EasyMock.createMock(QueryRegion.class);
region1.getGeometries();
EasyMock.expectLastCall().andReturn(Collections.singletonList(geom1));
region1.getValidTimes();
EasyMock.expectLastCall().andReturn(Collections.singletonList(TimeSpan.TIMELESS)).anyTimes();
List<String> typeKeys1 = Arrays.asList("type1", "type2");
region1.getTypeKeys();
EasyMock.expectLastCall().andReturn(typeKeys1);
Map<String, DataFilter> filterMap = New.map();
filterMap.put("type1", null);
filterMap.put("type2", null);
region1.getTypeKeyToFilterMap();
EasyMock.expectLastCall().andReturn(filterMap);
builder.setVertices(getTestPositions(vertices2));
PolygonGeometry geom2 = new PolygonGeometry(builder, renderProperties, (Constraints)null);
QueryRegion region2 = EasyMock.createMock(QueryRegion.class);
region2.getGeometries();
EasyMock.expectLastCall().andReturn(Collections.singletonList(geom2));
region2.getValidTimes();
Calendar cal = Calendar.getInstance();
cal.set(Calendar.MILLISECOND, 0);
TimeSpan validTime = TimeSpan.get(cal.getTime(), Days.ONE);
EasyMock.expectLastCall().andReturn(Collections.singletonList(validTime)).anyTimes();
region2.getTypeKeys();
List<String> typeKeys2 = Arrays.asList("type1", "type2");
EasyMock.expectLastCall().andReturn(typeKeys2);
region2.getTypeKeyToFilterMap();
EasyMock.expectLastCall().andReturn(filterMap);
List<QueryRegion> regions = New.list();
regions.add(region1);
regions.add(region2);
QueryRegionManager queryRegionManager = EasyMock.createMock(QueryRegionManager.class);
queryRegionManager.getQueryRegions();
EasyMock.expectLastCall().andReturn(regions);
EasyMock.replay(region1, region2, queryRegionManager);
Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().newDocument();
Node stateNode = doc.appendChild(StateXML.createElement(doc, ModuleStateController.STATE_NAME));
QueryRegionStateController controller = new QueryRegionStateController(queryRegionManager, null);
controller.saveState(stateNode);
EasyMock.verify(region1, region2, queryRegionManager);
XPath xpath = StateXML.newXPath();
((MutableNamespaceContext)xpath.getNamespaceContext()).addNamespace("kml", "http://www.opengis.net/kml/2.2");
Assert.assertEquals(getTestKmlCoordinates(vertices1),
xpath.evaluate(
"/" + ModuleStateController.STATE_QNAME
+ "/:queryAreas/:queryArea/kml:Polygon/kml:outerBoundaryIs/kml:LinearRing/kml:coordinates",
stateNode));
Assert.assertEquals("",
xpath.evaluate("/" + ModuleStateController.STATE_QNAME + "/:queryAreas/:queryArea[1]/:validTime", stateNode));
Assert.assertEquals(validTime.toISO8601String(),
xpath.evaluate("/" + ModuleStateController.STATE_QNAME + "/:queryAreas/:queryArea[2]/:validTime", stateNode));
Assert.assertEquals(typeKeys1.get(0),
xpath.evaluate("/" + ModuleStateController.STATE_QNAME + "/:queryAreas/:queryArea[1]/:layer[1]", stateNode));
Assert.assertEquals(typeKeys1.get(1),
xpath.evaluate("/" + ModuleStateController.STATE_QNAME + "/:queryAreas/:queryArea[1]/:layer[2]", stateNode));
Assert.assertEquals(typeKeys2.get(0),
xpath.evaluate("/" + ModuleStateController.STATE_QNAME + "/:queryAreas/:queryArea[2]/:layer[1]", stateNode));
Assert.assertEquals(typeKeys2.get(1),
xpath.evaluate("/" + ModuleStateController.STATE_QNAME + "/:queryAreas/:queryArea[2]/:layer[2]", stateNode));
}
/**
* Create a KML element with the proper namespace.
*
* @param doc The owner of the element.
* @param name The name of the element.
* @return The element.
*/
private Element createKmlElement(Document doc, String name)
{
Element polygonElement = doc.createElementNS("http://www.opengis.net/kml/2.2", name);
polygonElement.setPrefix("kml");
return polygonElement;
}
/**
* Convert the input array of vertices (lon/lat/alt) to a KML coordinate
* string.
*
* @param vertices The vertices.
* @return The KML coordinates.
*/
private String getTestKmlCoordinates(double[] vertices)
{
StringBuilder sb = new StringBuilder();
for (int index = 0; index < vertices.length;)
{
sb.append(vertices[index++]).append(',').append(vertices[index++]);
double alt = vertices[index++];
if (alt > 0.)
{
sb.append(',').append(vertices[index++]);
}
sb.append(' ');
}
sb.setLength(sb.length() - 1);
return sb.toString();
}
/**
* Convert the input array of vertices (lon/lat/alt) to geographic
* positions.
*
* @param vertices The vertices.
* @return The positions.
*/
private List<GeographicPosition> getTestPositions(double[] vertices)
{
// Remove the duplicate coordinate and switch lon/lat to lat/lon.
PetrifyableTDoubleArrayList data = new PetrifyableTDoubleArrayList();
for (int index = 0; index < vertices.length - 3; index += 3)
{
data.add(vertices[index + 1]);
data.add(vertices[index]);
data.add(vertices[index + 2]);
}
List<GeographicPosition> positions = GeographicPositionArrayList.createFromDegreesMeters(data, ReferenceLevel.TERRAIN);
return positions;
}
/**
* Get the test vertices in lon/lat/alt order.
*
* @return The vertices.
*/
private double[] getTestVertices1()
{
return new double[] { 10., 10., 0., 10., 11., 0., 11., 11., 0., 11., 10., 0., 10., 10., 0. };
}
/**
* Get the test vertices in lon/lat/alt order.
*
* @return The vertices.
*/
private double[] getTestVertices2()
{
return new double[] { 15., 15., 1., 15., 16., 2., 16., 16., 2., 16., 15., 1., 15., 15., 1. };
}
/**
* Set up an EasyMock matcher for the polygon vertices.
*
* @param positions The vertices.
* @return {@code null}
*/
private Collection<? extends PolygonGeometry> matches(final Collection<GeographicPosition> positions)
{
EasyMock.reportMatcher(new IArgumentMatcher()
{
@Override
public void appendTo(StringBuffer buffer)
{
buffer.append("matches(Collection<GeographicPosition>)");
}
@Override
public boolean matches(Object argument)
{
if (argument instanceof Iterable)
{
for (Object obj : (Iterable<?>)argument)
{
if (obj instanceof PolygonGeometry)
{
return ((PolygonGeometry)obj).getVertices().equals(positions);
}
return false;
}
return true;
}
return false;
}
});
return null;
}
}
|
import React from 'react';
import { connect } from 'react-redux';
import { saveDelay } from 'config';
import { clear, selectBookmark } from 'redux/modules/inspector';
import { resetEditState, edit } from 'redux/modules/list';
import { SidebarListViewerUI } from 'components/controls';
const mapStateToProps = (outerState) => {
const { app } = outerState;
const bkidx = app.getIn(['controls', 'activeBookmarkId']);
const bookmarks = app.getIn(['list', 'bookmarks']);
return {
activeBookmark: bkidx ? bookmarks.findIndex(o => o.get('id') === bkidx) : -1,
bookmarks,
collection: app.get('collection'),
list: app.get('list'),
listEdited: app.getIn(['list', 'edited']),
timestamp: app.getIn(['controls', 'timestamp']),
url: app.getIn(['controls', 'url'])
};
};
const mapDispatchToProps = (dispatch) => {
return {
clearInspector: () => dispatch(clear()),
editList: (user, coll, listId, data) => {
dispatch(edit(user, coll, listId, data))
.then(() => setTimeout(() => dispatch(resetEditState()), saveDelay), () => {});
},
setInspector: bk => dispatch(selectBookmark(bk)),
dispatch
};
};
export default connect(
mapStateToProps,
mapDispatchToProps
)(SidebarListViewerUI);
|
<gh_stars>100-1000
from distutils.core import setup
setup(name='best',
description='Bayesian estimation supersedes the t test',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/strawlab/best',
version='1.0',
packages=['best'],
license='MIT',
)
|
#!/usr/bin/env bash
# Copyright 2020 Amazon.com Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -x
set -o errexit
set -o nounset
set -o pipefail
REPO="$1"
CLONE_URL="$2"
TAG="$3"
BIN_ROOT="_output/bin"
BIN_PATH=$BIN_ROOT/$REPO
GOLANG_VERSION="1.14"
readonly SUPPORTED_PLATFORMS=(
linux/amd64
linux/arm64
)
MAKE_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd -P)"
source "${MAKE_ROOT}/../../../build/lib/common.sh"
function build::etcd::binaries(){
mkdir -p $BIN_PATH
git clone $CLONE_URL $REPO
cd $REPO
git checkout $TAG
build::common::use_go_version $GOLANG_VERSION
go mod vendor
for platform in "${SUPPORTED_PLATFORMS[@]}";
do
OS="$(cut -d '/' -f1 <<< ${platform})"
ARCH="$(cut -d '/' -f2 <<< ${platform})"
GOOS=$OS GOARCH=$ARCH GO_LDFLAGS="-s -w -buildid=''" ./build
mkdir -p ../${BIN_PATH}/${OS}-${ARCH}
mv bin/* ../${BIN_PATH}/${OS}-${ARCH}
make clean
done
build::gather_licenses ./ $MAKE_ROOT/LICENSES
cd ..
rm -rf $REPO
}
build::etcd::binaries
|
package com.honyum.elevatorMan.net;
import com.honyum.elevatorMan.data.BranchInfo;
import com.honyum.elevatorMan.net.base.Response;
import java.util.List;
/**
* Created by Star on 2017/6/15.
*/
public class GetBranchResponse extends Response {
public List<BranchInfo> getBody() {
return body;
}
public void setBody(List<BranchInfo> body) {
this.body = body;
}
private List<BranchInfo> body;
/**
* 根据json生成对象
* @param json
* @return
*/
public static GetBranchResponse getGetBranchResponse(String json) {
return (GetBranchResponse) parseFromJson(GetBranchResponse.class, json);
}
}
|
package com.mblinn.mbfpp.oo.fi.personexpanded
object ClosureExample {
case class Person(firstName: String, middleName: String, lastName: String)
def makeComposedComparison(comparisons: (Person, Person) => Int*) =
(p1: Person, p2: Person) =>
comparisons.map(cmp => cmp(p1, p2)).find(_ != 0).getOrElse(0)
def firstNameComparison(p1: Person, p2: Person) =
p1.firstName.compareTo(p2.firstName)
def lastNameComparison(p1: Person, p2: Person) =
p1.lastName.compareTo(p2.lastName)
val firstAndLastNameComparison = makeComposedComparison(
firstNameComparison, lastNameComparison
)
val p1 = Person("John", "", "Adams")
val p2 = Person("John", "Quincy", "Adams")
} |
# Generated by Django 2.2.3 on 2019-08-06 10:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('antiqueProjectApp', '0003_auto_20190806_1052'),
]
operations = [
migrations.AlterField(
model_name='antique',
name='AntiqueType',
field=models.ManyToManyField(help_text='Select a type for this antique', to='antiqueProjectApp.AntiqueType'),
),
]
|
<reponame>bcgov/citz-imb-sp-vdr
import React from 'react'
export const SelectColumnFilter = (props) => {
const {
column: { filterValue, setFilter, preFilteredRows, id },
} = props
// Calculate the options for filtering
// using the preFilteredRows
const options = React.useMemo(() => {
const options = new Set()
preFilteredRows.forEach((row) => {
if(row.values[id] === null) return
options.add(row.values[id])
})
return [...options.values()]
}, [id, preFilteredRows])
// Render a multi-select box
return (
<select
value={filterValue}
onChange={(e) => {
setFilter(e.target.value || undefined)
}}>
<option value=''>All</option>
{options.map((option, i) => (
<option key={i} value={option}>
{option}
</option>
))}
</select>
)
}
|
<div class="myclass" style="background-color:#000000; filter:alpha(opacity=100); zoom:1;">
<span>My Text</span>
</div> |
#!/usr/bin/env bash
set -e
set -u
set -o pipefail
IMAGE="${1}"
docker run --rm --entrypoint=php "${IMAGE}" -v | grep -E '^PHP 7\.4'
docker run --rm --entrypoint=php-fpm "${IMAGE}" -v | grep -E '^PHP 7\.4'
|
<gh_stars>0
package ua.artcode.market.interfaces;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import ua.artcode.market.controllers.IAppDbImpl;
import ua.artcode.market.controllers.ITerminalControllerImpl;
import ua.artcode.market.exclude.exception.*;
import ua.artcode.market.models.AbstractProduct;
import ua.artcode.market.models.Bill;
import ua.artcode.market.models.employee.Salesman;
import static org.junit.Assert.*;
public class IAppDbTest {
private IAppDb iAppDb;
private ITerminalController iTerminalController;
@Before
public void setUp() throws Exception {
iAppDb = new IAppDbImpl();
iTerminalController = new ITerminalControllerImpl(iAppDb);
}
@After
public void tearDown() throws Exception {
iAppDb = null;
iTerminalController = null;
}
@Test
public void findBillById() throws Exception {
iTerminalController.createBill(new Salesman());
iTerminalController.createBill(new Salesman());
Bill bill3 = iTerminalController.createBill(new Salesman());
iTerminalController.createBill(new Salesman());
// System.out.println(bill1.toString() + bill2.toString() +
// bill4.toString());
Bill bill = iAppDb.findBillById(3);
// System.out.println(bill);
assertTrue(bill3.getId() == bill.getId());
}
@Test
public void findBillByIdNeg() throws Exception {
Bill bill = null;
try {
bill = iAppDb.findBillById(1);
} catch (BillNotFoundException ignored) {
/*System.out.println("Bill not found");
ignored.printStackTrace();*/
assertNull(bill);
}
}
@Test
public void findProductByIdNeg() throws Exception {
AbstractProduct expected = null;
AbstractProduct actual = null;
try {
actual = iAppDb.findProductById(10);
} catch (ProductNotFoundException e) {
assertEquals(expected, actual);
}
}
} |
<reponame>hyqLeonardo/dsa
#include <stdio.h>
#include "graph_adj.h"
void init_graph(DirGraph *graph) {
graph->vertex_num = 0;
graph->edge_num = 0;
}
void visit_vertex(VertexNode *v, int print) {
if (print) printf("%d ", v->data);
}
void visit_edge(VertexNode *v, EdgeNode *e, int print) {
if (print) printf("(%d -> %d) ", v->data, e->vertex->data);
}
void print_graph_vertex(DirGraph *graph) {
int i;
for (i = 0; i < graph->vertex_num; i++) /* traverse vertex */
visit_vertex(graph->vertices[i], 1);
}
void print_graph_edge(DirGraph *graph) {
int i;
VertexNode *vertex = NULL;
for (i = 0; i < graph->vertex_num; i++) { /* traverse vertex */
vertex = graph->vertices[i];
EdgeNode *edge = vertex->first;
while (edge) { /* traverse edge */
visit_edge(vertex, edge, 1);
edge = edge->next;
}
}
}
/*
* return value : vertex index in vertices array, -1 if not found
*/
int vertex_index(DirGraph *graph, int v) {
int i;
VertexNode *vertex = NULL;
/* traverse all vertices */
for (i = 0; i < graph->vertex_num; i++) {
vertex = graph->vertices[i];
if (vertex->data == v) return i; /* found, return index */
}
return -1;
}
/*
* return value : 1 if found, -1 if not
*/
int has_edge(DirGraph *graph, int v1, int v2) {
/* set vertex point to v1 */
int v1_index = vertex_index(graph, v1);
if (v1_index == -1) return -1; /* v does not exist */
VertexNode *vertex = graph->vertices[v1_index];
/* find edge */
EdgeNode *edge = vertex->first; /* first neighbor */
while (edge) {
if (edge->vertex->data == v2) /* edge (v1, v2) found */
return TRUE;
edge = edge->next;
}
return -1;
}
/*
* return value : first neighbor's value if found, -1 if not
*/
int first_neigh(DirGraph *graph, int v) {
/* set vertex point to v */
int v_index = vertex_index(graph, v);
if (v_index == -1) return -1; /* v does not exist */
VertexNode *vertex = graph->vertices[v_index];
if (vertex->first) /* has neighbor */
return vertex->first->vertex->data; /* return data of frist neighbour */
return -1; /* failed */
}
/*
* return value : next neighbor's data of v if found, -1 if not
*/
int next_neigh(DirGraph *graph, int v, int c) { /* c means current */
/* set vertex point to v */
int v_index = vertex_index(graph, v);
if (v_index == -1) return -1; /* v does not exist */
VertexNode *vertex = graph->vertices[v_index];
if (vertex->first) { /* found vertex and has neighbor */
EdgeNode *edge = vertex->first; /* first neighbor */
while (edge) {
if (edge->vertex->data == c && edge->next) /* c is not last neighbor of vertex */
return edge->next->vertex->data;
}
return -1;
}
return -1; /* failed */
}
/*
* return new v's index if succeed
*/
int add_vertex(DirGraph *graph, int v) {
/* create new node */
VertexNode *node = (VertexNode *)malloc(sizeof(VertexNode));
node->data = v;
node->first = NULL;
if (vertex_index(graph, v) != -1) /* v already exist */
return -1;
// Add vertex AND update vertex count,
// carful, vertices are NOT indexed by its data
graph->vertices[graph->vertex_num++] = node;
return graph->vertex_num - 1; /* careful! */
}
int add_edge(DirGraph *graph, int v1, int v2) {
if (has_edge(graph, v1, v2) != -1) /* edge (v1, v2) already exists */
return -1;
/* add vertex if doesn't exists */
int v1_index = vertex_index(graph, v1);
int v2_index = vertex_index(graph, v2);
if (v1_index == -1) v1_index = add_vertex(graph, v1);
if (v2_index == -1) v2_index = add_vertex(graph, v2);
/* create edge v1 -> v2 */
EdgeNode *edge_v1 = (EdgeNode *)malloc(sizeof(EdgeNode));
edge_v1->vertex = graph->vertices[v2_index];
edge_v1->next = NULL;
/* set vertex1 point to v1 */
VertexNode *vertex1 = graph->vertices[v1_index];
/* add edge (v1, v2) to v1's edge list */
if (vertex1->first == NULL) { /* if v1 has no out edge */
vertex1->first = edge_v1;
graph->edge_num++;
return TRUE; /* done */
} else { /* if v1 has at least 1 out edge */
EdgeNode *edge = vertex1->first;
while (edge->next) edge = edge->next; /* insert edge node found */
edge->next = edge_v1; /* insert */
graph->edge_num++;
return TRUE;
}
return -1;
} |
/*
* Copyright (c) Microsoft Corporation
* SPDX-License-Identifier: MIT
*/
#include "ebpf_platform.h"
#include "ebpf_program_types_c.c"
ebpf_error_code_t
ebpf_program_information_encode(
const ebpf_program_information_t* program_information, uint8_t** buffer, unsigned long* buffer_size)
{
handle_t handle = NULL;
ebpf_program_information_pointer_t local_program_information = (ebpf_program_information_t*)program_information;
RPC_STATUS status = MesEncodeDynBufferHandleCreate((char**)buffer, buffer_size, &handle);
if (status != RPC_S_OK)
return EBPF_ERROR_OUT_OF_RESOURCES;
RpcTryExcept { ebpf_program_information_pointer_t_Encode(handle, &local_program_information); }
RpcExcept(RpcExceptionFilter(RpcExceptionCode())) { status = RpcExceptionCode(); }
RpcEndExcept;
if (handle)
MesHandleFree(handle);
return status == RPC_S_OK ? EBPF_ERROR_SUCCESS : EBPF_ERROR_INVALID_PARAMETER;
}
ebpf_error_code_t
ebpf_program_information_decode(
ebpf_program_information_t** program_information, const uint8_t* buffer, unsigned long buffer_size)
{
handle_t handle = NULL;
ebpf_program_information_pointer_t local_program_information = NULL;
RPC_STATUS status = MesDecodeBufferHandleCreate((char*)buffer, buffer_size, &handle);
if (status != RPC_S_OK)
return EBPF_ERROR_OUT_OF_RESOURCES;
RpcTryExcept { ebpf_program_information_pointer_t_Decode(handle, &local_program_information); }
RpcExcept(RpcExceptionFilter(RpcExceptionCode())) { status = RpcExceptionCode(); }
RpcEndExcept;
if (handle)
MesHandleFree(handle);
if (status != RPC_S_OK)
return EBPF_ERROR_INVALID_PARAMETER;
*program_information = local_program_information;
return EBPF_ERROR_SUCCESS;
}
void* __RPC_USER
MIDL_user_allocate(size_t size)
{
return ebpf_allocate(size, EBPF_MEMORY_NO_EXECUTE);
}
void __RPC_USER
MIDL_user_free(void* p)
{
ebpf_free(p);
}
|
CURRENT_DIR=`pwd`
export BERT_BASE_DIR=$CURRENT_DIR/pretrained_models/bert-base
export DATA_DIR=$CURRENT_DIR/dataset
export OUTPUR_DIR=$CURRENT_DIR/outputs
export TASK_NAME=cner
python task_sequence_labeling_layer_lr_ner_crf.py \
--model_type=bert \
--model_path=$BERT_BASE_DIR \
--model_name=bert-crf-layer-lr \
--task_name=${TASK_NAME} \
--do_train \
--gpu=0,1 \
--use_crf \
--do_lower_case \
--monitor=eval_f1 \
--data_dir=$DATA_DIR/${TASK_NAME}/ \
--train_max_seq_length=128 \
--eval_max_seq_length=512 \
--per_gpu_train_batch_size=24 \
--per_gpu_eval_batch_size=24 \
--learning_rate=3e-5 \
--crf_learning_rate=1e-3 \
--num_train_epochs=4.0 \
--logging_steps=80 \
--save_steps=80 \
--output_dir=$OUTPUR_DIR/${TASK_NAME}_output/ \
--overwrite_output_dir \
--seed=42
|
def remove_duplicates(head):
curr = head
while curr is not None and curr.next is not None:
if curr.data == curr.next.data:
curr.next = curr.next.next
else:
curr = curr.next
return head |
#!/bin/bash
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# Prepares a Ubuntu Server 2x.xx guest operating system.
### Set the environmental variables. ###
export BUILD_USERNAME
export BUILD_KEY
export ANSIBLE_USERNAME
export ANSIBLE_KEY
### Update the default local user. ###
echo '> Updating the default local user ...'
echo '> Adding authorized_keys for the default local user ...'
sudo mkdir -p /home/"$BUILD_USERNAME"/.ssh
sudo tee /home/"$BUILD_USERNAME"/.ssh/authorized_keys << EOF
$BUILD_KEY
EOF
sudo chown -R "$BUILD_USERNAME" /home/"$BUILD_USERNAME"/.ssh
sudo chmod 700 /home/"$BUILD_USERNAME"/.ssh
sudo chmod 644 /home/"$BUILD_USERNAME"/.ssh/authorized_keys
echo '> Adding the default local user to passwordless sudoers...'
sudo bash -c "echo \"""$BUILD_USERNAME"" ALL=(ALL) NOPASSWD:ALL\" >> /etc/sudoers"
### Create a local user for Ansible. ###
echo '> Creating a local user for Ansible ...'
sudo groupadd "$ANSIBLE_USERNAME"
sudo useradd -g "$ANSIBLE_USERNAME" -m -s /bin/bash "$ANSIBLE_USERNAME"
sudo usermod -aG sudo "$ANSIBLE_USERNAME"
echo "$ANSIBLE_USERNAME":"$(openssl rand -base64 14)" | sudo chpasswd
echo '> Adding authorized_keys for local Ansible user ...'
sudo mkdir /home/"$ANSIBLE_USERNAME"/.ssh
sudo tee /home/"$ANSIBLE_USERNAME"/.ssh/authorized_keys << EOF
$ANSIBLE_KEY
EOF
sudo chown -R "$ANSIBLE_USERNAME":"$ANSIBLE_USERNAME" /home/"$ANSIBLE_USERNAME"/.ssh
sudo chmod 700 /home/"$ANSIBLE_USERNAME"/.ssh
sudo chmod 600 /home/"$ANSIBLE_USERNAME"/.ssh/authorized_keys
echo '> Adding local Ansible user to passwordless sudoers...'
sudo bash -c "echo \"""$ANSIBLE_USERNAME"" ALL=(ALL) NOPASSWD:ALL\" >> /etc/sudoers"
### Configure SSH for Public Key Authentication. ###
echo '> Configuring SSH for Public Key Authentication ...'
sudo sed -i 's/.*PubkeyAuthentication.*/PubkeyAuthentication yes/' /etc/ssh/sshd_config
### Uncomment the line below to to disable Password Authentication and enforce _only_ Public Key Authentication. ###
### sudo sed -i 's/#PasswordAuthentication.*/PasswordAuthentication no/' /etc/ssh/sshd_config
### Restart the SSH daemon. ###
echo '> Restarting the SSH daemon. ...'
sudo systemctl restart sshd
### Disable and clean tmp. ###
echo '> Disabling and clean tmp ...'
sudo sed -i 's/D/#&/' /usr/lib/tmpfiles.d/tmp.conf
### Create the clean script. ###
echo '> Creating the clean script ...'
sudo tee /home/"$BUILD_USERNAME"/clean.sh << EOF
#!/bin/bash
### Cleans all audit logs. ###
echo '> Cleaning all audit logs ...'
if [ -f /var/log/audit/audit.log ]; then
cat /dev/null > /var/log/audit/audit.log
fi
if [ -f /var/log/wtmp ]; then
cat /dev/null > /var/log/wtmp
fi
if [ -f /var/log/lastlog ]; then
cat /dev/null > /var/log/lastlog
fi
### Cleans persistent udev rules. ###
echo '> Cleaning persistent udev rules ...'
if [ -f /etc/udev/rules.d/70-persistent-net.rules ]; then
rm /etc/udev/rules.d/70-persistent-net.rules
fi
### Clean the /tmp directories. ###
echo '> Cleaning /tmp directories ...'
rm -rf /tmp/*
rm -rf /var/tmp/*
### Clean the SSH keys. ###
echo '> Cleaning the SSH keys ...'
rm -f /etc/ssh/ssh_host_*
### Set the hostname to localhost. ###
echo '> Setting the hostname to localhost ...'
cat /dev/null > /etc/hostname
hostnamectl set-hostname localhost
### Clean apt cache. ###
echo '> Cleaning apt cache ...'
apt-get autoremove
apt-get clean
### Clean the machine-id. ###
echo '> Cleaning the machine-id ...'
truncate -s 0 /etc/machine-id
rm /var/lib/dbus/machine-id
ln -s /etc/machine-id /var/lib/dbus/machine-id
### Clean the shell history. ###
echo '> Cleaning the shell history ...'
unset HISTFILE
history -cw
echo > ~/.bash_history
rm -fr /root/.bash_history
### Prepare cloud-init ###
echo '> Preparing cloud-init ...'
rm -rf /etc/cloud/cloud.cfg.d/subiquity-disable-cloudinit-networking.cfg
rm -rf /etc/cloud/cloud.cfg.d/99-installer.cfg
rm -rf /etc/netplan/00-installer-config.yaml
echo "disable_vmware_customization: false" >> /etc/cloud/cloud.cfg
echo "datasource_list: [ VMware, OVF, None ]" > /etc/cloud/cloud.cfg.d/90_dpkg.cfg
# Uncomment below if guest customization will be performed by VMware Tools.
# touch /etc/cloud/cloud.cfg.d/99.disable-network-config.cfg
# echo "network: {config: disabled}" >> /etc/cloud/cloud.cfg.d/99.disable-network-config.cfg
### Modify GRUB ###
echo '> Modifying GRUB ...'
sed -i -e "s/GRUB_CMDLINE_LINUX_DEFAULT=\"\(.*\)\"/GRUB_CMDLINE_LINUX_DEFAULT=\"\"/" /etc/default/grub
update-grub
EOF
### Change the permissions on /home/"$BUILD_USERNAME"/clean.sh . ###
echo "> Changing the permissions on /home/""$BUILD_USERNAME""/clean.sh ..."
sudo chmod +x /home/"$BUILD_USERNAME"/clean.sh
### Run the clean script. ###
echo '> Running the clean script ...'
sudo /home/"$BUILD_USERNAME"/clean.sh
### Set check for ssh keys on reboot; regenerate on reboot if neccessary. ###
echo '> Setting check for ssh keys on reboot; will regenerate on reboot if neccessary. ...'
sudo tee /etc/rc.local << EOF
#!/bin/bash
test -f /etc/ssh/ssh_host_dsa_key || dpkg-reconfigure openssh-server
exit 0
EOF
sudo chmod +x /etc/rc.local
### Done. ###
echo '> Done.'
|
<reponame>dhyanaswain/weather_card
export const environment = {
production: true,
appID: '8e4edaddb94e480ae1d3c04b8761b3e1',
gKey: '',
config: {
apiKey: "<KEY>",
authDomain: "weather-22167.firebaseapp.com",
databaseURL: "https://weather-22167.firebaseio.com",
projectId: "weather-22167",
storageBucket: "weather-22167.appspot.com",
messagingSenderId: "37243553012",
appId: "1:37243553012:web:fae9f10c56f8bca98d8aa1",
measurementId: "G-Z2P7NFXP4M"
}
};
|
<reponame>mh-cbon/mdl-go-components<gh_stars>1-10
package components
import (
mgc "github.com/mh-cbon/mdl-go-components"
base "github.com/mh-cbon/mdl-go-components/components_common"
)
type InputConfirm struct {
mgc.ViewComponent
base.NodeType
base.NodePlaceholder
base.NodeSingleError
Attr base.AttrList
Classes base.ClassList
InputLeft *PartialInputConfirm
InputRight *PartialInputConfirm
}
func NewInputConfirm() *InputConfirm {
ret := &InputConfirm{}
ret.SetBlock("mgc/form_input_confirm")
ret.InputLeft = &PartialInputConfirm{}
ret.InputRight = &PartialInputConfirm{}
return ret
}
func NewInputConfirmText() *InputConfirm {
ret := NewInputConfirm()
ret.SetType("text")
return ret
}
func NewInputConfirmPassword() *InputConfirm {
ret := NewInputConfirm()
ret.SetType("password")
return ret
}
func (view *InputConfirm) Render(args ...interface{}) (string, error) {
if view.InputLeft.GetId() == "" {
view.InputLeft.SetId("left-" + view.GetRenderContext().GetId())
}
if view.InputRight.GetId() == "" {
view.InputRight.SetId("right-" + view.GetRenderContext().GetId())
}
return view.GetRenderContext().RenderComponent(view, args)
}
func (view *InputConfirm) Translate(t base.Translator) {
view.InputLeft.Translate(t)
view.InputRight.Translate(t)
}
func (view *InputConfirm) SetErrors(p base.ErrorProvider) {
err := p.GetError(view.InputLeft.GetName())
if err != nil {
view.SetError(err)
}
}
func (view *InputConfirm) SetName(b string) {
view.InputLeft.SetName(b)
view.InputRight.SetName("Confirm" + b)
}
func (view *InputConfirm) GetName() string {
return view.InputLeft.GetName()
}
type PartialInputConfirm struct {
base.Node
base.NodeLabel
base.NodeSingleValue
InputAttr base.AttrList
InputClasses base.ClassList
}
func (view *PartialInputConfirm) SetName(b string) {
view.InputAttr.Set("name", b)
}
func (view *PartialInputConfirm) GetName() string {
return view.InputAttr.GetValue("name")
}
func (view *PartialInputConfirm) SetId(b string) {
view.InputAttr.Set("id", b)
}
func (view *PartialInputConfirm) GetId() string {
return view.InputAttr.GetValue("id")
}
|
#!/bin/bash
export HOME=/root/
source $HOME/.bashrc
source $HOME/conda/bin/activate
conda activate tali
cd $CODE_DIR
git pull
pip install -r $CODE_DIR/requirements.txt
source $CODE_DIR/setup_scripts/setup_base_experiment_disk.sh
source $CODE_DIR/setup_scripts/setup_wandb_credentials.sh
cd $CODE_DIR
fuser -k /dev/nvidia*; \
python $CODE_DIR/run.py \
hydra.verbose=True \
trainer=default \
resume=True \
batch_size=2 \
trainer.gpus=1 \
trainer.auto_scale_batch_size=True \
datamodule.dataset_config.rescan_paths=True \
datamodule.prefetch_factor=3 \
datamodule.num_workers=12 \
model=deci_modus_prime_vi-transformer16 \
datamodule.dataset_config.dataset_size_identifier=milli \
datamodule.dataset_config.modality_config.image=True \
datamodule.dataset_config.modality_config.text=True \
datamodule.dataset_config.modality_config.audio=True \
datamodule.dataset_config.modality_config.video=True
|
<gh_stars>10-100
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Function to sum frequency spectra."""
from astropy import log
from .io import save_pds, get_file_type
from .io import HEN_FILE_EXTENSION
from .base import _assign_value_if_none
from .fspec import average_periodograms
def sum_fspec(files, outname=None):
"""Take a bunch of (C)PDSs and sums them."""
# Read first file
ftype0, contents = get_file_type(files[0])
pdstype = ftype0.replace("reb", "")
outname = _assign_value_if_none(
outname, "tot_" + ftype0 + HEN_FILE_EXTENSION
)
def check_and_distribute_files(files):
for i, f in enumerate(files):
ftype, contents = get_file_type(files[0])
if i == 0:
contents0, ftype0 = contents, ftype
else:
assert ftype == ftype0, "Files must all be of the same kind"
contents.fftlen = contents.segment_size
yield contents
tot_contents = average_periodograms(check_and_distribute_files(files))
log.info("Saving %s to %s" % (pdstype, outname))
save_pds(tot_contents, outname)
return tot_contents
def main(args=None):
"""Main function called by the `HENsumfspec` command line script."""
import argparse
description = "Sum (C)PDSs contained in different files"
parser = argparse.ArgumentParser(description=description)
parser.add_argument("files", help="List of light curve files", nargs="+")
parser.add_argument(
"-o",
"--outname",
type=str,
default=None,
help="Output file name for summed (C)PDS. Default:"
+ " tot_(c)pds"
+ HEN_FILE_EXTENSION,
)
args = parser.parse_args(args)
sum_fspec(args.files, args.outname)
|
#!/bin/sh
set -o errexit -o nounset -o pipefail
rm index.js
cat <<EOF >> index.js
let contracts = {
EOF
for file in ./artifacts/*.json; do
echo " '$(basename $file .json)': require('$file'),"
done >> index.js
cat <<EOF >> index.js
};
module.exports = contracts;
EOF
|
script_dir=$(dirname "$(readlink -f "$0")")
export KB_DEPLOYMENT_CONFIG=$script_dir/../deploy.cfg
WD=/kb/module/work
if [ -f $WD/token ]; then
cat $WD/token | xargs sh $script_dir/../bin/run_pavel_sdk_test_java_async_job.sh $WD/input.json $WD/output.json
else
echo "File $WD/token doesn't exist, aborting."
exit 1
fi
|
# sudo modprobe bcm2835-v4l2
#python3 controller.py > logs/controller.txt &
#python3 autopilot_web_server.py > logs/autopilot_web_server.txt &
#python3 recorder.py > logs/recorder.txt &
#cd training_data;
#python3 -m http.server > ../logs/recorder.txt &
python3 runner.py > logs/LOGS.txt & |
/*
* This Java program provides a brief overview of the project.
*
* This project is a web application that allows users to manage their daily
* tasks. It provides an easy to use user interface that allows users to create, update,
* delete and view their tasks. The application also has a calendar view, to allow users
* to view all their tasks in a single place.
*
* The application is written in Java and uses the Spring Boot framework for the backend
* layer and the React library for the frontend layer. The database used is MySQL.
*
* It also has features such as user authentication, user roles, and support for multiple
* languages.
*/ |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.