text stringlengths 1 1.05M |
|---|
<reponame>lydxwj/da-cli<filename>lib/site/add.js
const chalk = require('chalk')
const inquirer = require('inquirer')
const fs = require('fs')
const configJson = require(`${__dirname}/../../config/config`)
const sites = configJson.sites
function siteAdd() {
let question = [
{
name: "siteName",
type: 'input',
message: "请输入仓库地址类型名",
validate (val) {
if (val === '') {
return 'siteName is required!'
} else {
return true
}
}
},
{
name: "siteKey",
type: 'input',
message: "请输入存储仓库地址类型的键(key)",
validate (val) {
if (val === '') return 'siteKey is required!'
if (sites[val]) return 'siteKey has already existed!'
if (!/^[a-zA-Z]+$/.test(val)) {
return 'It can only contain English letters!'
}
return true
}
},
{
name: "siteUrl",
type: 'input',
message: "请输入仓库地址URL",
suffix: '例如:https://my.sitecode.com',
validate (val) {
if (val === '') return 'The siteUrl is required!'
if (!/(http|https):\/\/[\w\-_]+(\.[\w\-_]+)+([\w\-\.,@?^=%&:/~\+#]*[\w\-\@?^=%&/~\+#])?/.test(val)) return 'Please enter the correct URL!'
return true
}
}
]
inquirer.prompt(question).then(answers => {
let { siteName, siteKey, siteUrl } = answers;
const newSite = {
name: siteName,
value: siteKey,
short: siteName,
url: 'direct:' + siteUrl + '/{tplOwner}/{tplName}.git'
}
sites[siteKey] = newSite
fs.writeFile(`${__dirname}/../../config/config.json`, JSON.stringify({ ...configJson, sites }, null, 2), 'utf-8', err => {
if (err) {
console.log(chalk.red('Error\n'))
console.log(err)
return
}
console.log('\n')
console.log(chalk.green('Added successfully!\n'))
console.log(chalk.grey('The latest sites list is: \n'))
Object.keys(sites).forEach(item => {
console.log(chalk.green(' ' + item + ': \n'));
console.log(' 仓库地址类型名:' + sites[item].name + ' \n');
console.log(' 仓库地址URL:' + sites[item].url + ' \n');
})
console.log('\n')
})
})
}
module.exports = siteAdd |
package main
import (
"context"
_ "github.com/lib/pq"
"github.com/the-gigi/delinkcious/pkg/db_util"
"github.com/the-gigi/delinkcious/pkg/social_graph_client"
. "github.com/the-gigi/delinkcious/pkg/test_util"
"log"
)
func check(err error) {
if err != nil {
panic(err)
}
}
func initDB() {
db, err := db_util.RunLocalDB("social_graph_manager")
check(err)
// Ignore if table doesn't exist (will be created by service)
err = db_util.DeleteFromTableIfExist(db, "social_graph")
check(err)
}
func main() {
initDB()
ctx := context.Background()
defer StopService(ctx)
RunService(ctx, ".", "social_graph_service")
// Run some tests with the client
cli, err := social_graph_client.NewClient("localhost:9090")
check(err)
following, err := cli.GetFollowing("gigi")
check(err)
log.Print("gigi is following:", following)
followers, err := cli.GetFollowers("gigi")
check(err)
log.Print("gigi is followed by:", followers)
err = cli.Follow("gigi", "liat")
check(err)
err = cli.Follow("gigi", "guy")
check(err)
err = cli.Follow("guy", "gigi")
check(err)
err = cli.Follow("saar", "gigi")
check(err)
err = cli.Follow("saar", "ophir")
check(err)
following, err = cli.GetFollowing("gigi")
check(err)
log.Print("gigi is following:", following)
followers, err = cli.GetFollowers("gigi")
check(err)
log.Print("gigi is followed by:", followers)
}
|
# Load our own completion functions
fpath=(~/.zsh/completion $fpath)
# Completion; use cache if updated within 24h
autoload -Uz compinit
if [[ -n $HOME/.zcompdump(#qN.mh+24) ]]; then
compinit -d $HOME/.zcompdump
else
compinit -C
fi
# Menus & auto-complete setup
# Case-insensitive (all),partial-word and then substring completion
zstyle ':completion:*' matcher-list 'm:{a-zA-Z}={A-Za-z}' 'r:|[._-]=* r:|=*' 'l:|=* r:|=*'
# Pasting with tabs doesn't perform completion
zstyle ':completion:*' insert-tab pending
# Colorize the auto-complete menu
zstyle ':completion:*' list-colors ''
# Highlight the currently selected item in the auto-complete menu
# and make the menu navigable by arrow keys
zstyle ':completion:*:*:*:*:*' menu select
zstyle ':completion:*:*:kill:*:processes' list-colors '=(#b) #([0-9]#) ([0-9a-z-]#)*=01;34=0=01'
|
<gh_stars>0
package com.github.yupc.admin.repository.authority.service;
import com.github.yupc.admin.entity.authority.po.AdminRole;
import com.github.yupc.admin.repository.authority.example.AdminRoleExample;
import com.github.yupc.base.service.BaseService;
import java.util.List;
/**
* @author yupc
* @createTime 2017-12-15 10:45
*/
public interface AdminRoleService extends BaseService<Long, AdminRole, AdminRoleExample> {
/**
* 检测code是否存在
* @param appId appId
* @param code 角色code
* @return 存在返回true ,不存在返回false
*/
boolean check(String appId, String code);
/**
* 给角色授权应用
* @param appId appId
* @param roleId 角色id
* @param applicationIdList 应用id
*/
void authorityAdmin(String appId, Long roleId, List<Long> applicationIdList);
/**
* 给角色授权资源+菜单
* @param appId appId
* @param menuGroupCode 菜单组
* @param roleId 角色id
* @param elementIdList 资源id
*/
void authorityResources(String appId, String menuGroupCode, Long roleId, List<Long> elementIdList);
/**
* 根据userId查找用户拥有的角色
* @param appId appId
* @param applicationId 应用id
* @return
*/
List<AdminRole> findRole(String appId, Long applicationId);
}
|
// Menu.js
import React from "react";
class Menu extends React.Component {
constructor() {
super();
this.state = {
selectedOption: ""
};
}
handleChange = e => {
this.setState({
selectedOption: e.target.value
});
};
render() {
return (
<div>
{/* Render the options */}
{this.props.options.map((option, i) => (
<div key={i}>
<input
type="radio"
name="option"
value={option}
onChange={e => this.handleChange(e)}
/>
{option}
</div>
))}
{/* Render the selected option */}
{this.state.selectedOption ? (
<p>Selected option: {this.state.selectedOption}</p>
) : (
<p>Please select an option</p>
)}
</div>
);
}
}
export default Menu;
// App.js
import React from "react";
import Menu from "./Menu";
class App extends React.Component {
render() {
return (
<div>
<Menu options={["Apples", "Oranges", "Bananas"]} />
</div>
);
}
}
export default App; |
#! /bin/bash
# $0 - path to script
# $1 - path to data folder
# $2 - path to processed data folder
# $3 - panorama run time
scriptPath=$0
scriptPath=${scriptPath%/*}
workPath=`pwd`
cat "$scriptPath/panorama_template.osp" | sed ''s/#data#/$1/g'' | sed ''s/#processedData#/$2/g'' | sed ''s/#panoramaTime#/$3/g''> "$workPath/$2/media/panorama.osp"
|
#!/bin/bash
sudo apt-get update
sudo apt-get install -y python3-dev git python-dev unzip python-pip awscli python-virtualenv
wget https://releases.hashicorp.com/terraform/0.12.28/terraform_0.12.28_linux_amd64.zip
unzip terraform_0.12.28_linux_amd64.zip
sudo mv terraform /usr/local/bin/
git clone https://github.com/splunk/attack_range && cd attack_range
cd terraform/aws
terraform init
cd ../..
virtualenv -p python3 venv
source venv/bin/activate
pip install -r requirements.txt
cp attack_range.conf.template attack_range.conf
|
<reponame>nartc/tnc
import Card from "@material-ui/core/Card";
import CardActionArea from "@material-ui/core/CardActionArea";
import CardContent from "@material-ui/core/CardContent";
import CardMedia from "@material-ui/core/CardMedia";
import Grid from "@material-ui/core/Grid";
import { makeStyles } from "@material-ui/core/styles";
import Typography from "@material-ui/core/Typography";
import { NavigateFn } from "@reach/router";
import React, { FC, memo, useCallback } from "react";
import {
ImageSharp,
ImageSharpFluid,
MarkdownRemarkEdge,
MarkdownRemarkFrontmatter,
} from "../../graph-types";
import BlogChipList from "./blog-chip-list";
import BlogTimeToRead from "./blog-time-to-read";
type BlogListItemProps = {
item: MarkdownRemarkEdge;
navigate: NavigateFn;
};
const useStyles = makeStyles(theme => ({
cardRoot: {
borderRadius: theme.shape.borderRadius,
},
excerptRoot: {
marginTop: theme.spacing(1),
marginBottom: theme.spacing(1),
minHeight: 70,
},
titleRoot: {
minHeight: 80,
},
}));
const BlogListItem: FC<BlogListItemProps> = memo(({ item, navigate }) => {
const classes = useStyles();
const frontmatter = item.node.frontmatter as MarkdownRemarkFrontmatter;
const slug = item.node.fields?.slug;
const langKey = item.node.fields?.langKey;
const tags = frontmatter.tags as string[];
const langs = frontmatter.langs as string[];
const cover = frontmatter.cover;
const coverImg = !!cover
? (((cover.childImageSharp as ImageSharp).fluid as ImageSharpFluid)
.src as string)
: "http://lorempixel.com/600/480/";
const onItemClicked = useCallback(() => {
const path = `/blogs/${(slug as string).replace("/", "")}`;
navigate(langKey === "en" ? path : path.replace("/blogs/", "/blogs/vi/"));
}, [slug, langKey, navigate]);
return (
<Card elevation={4} raised classes={{ root: classes.cardRoot }}>
<CardActionArea onClick={onItemClicked}>
<CardMedia
component={"img"}
alt={"blog image"}
height={250}
image={coverImg}
/>
<CardContent>
<Typography
gutterBottom
variant="h5"
component="h2"
classes={{ root: classes.titleRoot }}
>
{frontmatter.title}
</Typography>
{!!tags.length && <BlogChipList chips={tags} />}
<Typography
variant="body2"
color="textSecondary"
component="p"
classes={{ root: classes.excerptRoot }}
>
{item.node.excerpt}
</Typography>
<Grid container justify={"space-between"} alignItems={"center"}>
<BlogTimeToRead timeToRead={item.node.timeToRead as number} />
{langs && (
<BlogChipList
chips={langs.map(lang => lang.toUpperCase())}
isOutline
/>
)}
</Grid>
</CardContent>
</CardActionArea>
</Card>
);
});
export default BlogListItem;
|
<gh_stars>100-1000
// +k8s:conversion-gen=github.com/openshift/openshift-apiserver/pkg/image/apis/image
// +k8s:conversion-gen-external-types=github.com/openshift/api/image/dockerpre012
// +k8s:defaulter-gen=TypeMeta
// +k8s:defaulter-gen-input=../../../../../../../../github.com/openshift/api/image/dockerpre012
// +groupName=image.openshift.io
// Package dockerpre012 is the dockerpre012 version of the API.
package dockerpre012
|
#!/bin/sh
export SOL_BENCH_DIR=$SOLUTIONS_BENCH_DIR/Spark
export SOL_SBIN_DIR=$SOLUTION_HOME/sbin
export SOL_CONF_DIR_SRC=$SOLUTION_HOME/conf
export SOL_CONF_DIR=$SOLUTION_REPORT_DIR/conf/spark
export SOL_LOG_DIR=$SOLUTION_REPORT_DIR/logs/spark
export SOL_TEMPLATE_DIR=$TEMPLATES_DIR/Spark
export MASTERFILE=$SOL_CONF_DIR/masters
export SLAVESFILE=$SOL_CONF_DIR/slaves
#SPARK
export SPARK_HOME=$SOLUTION_HOME
export SPARK_CONF_DIR=$SOL_CONF_DIR
export SPARK_SBIN_DIR=$SOL_SBIN_DIR
export SPARK_LOG_DIR=$SOL_LOG_DIR
export SPARK_WORKER_DIR=$SOLUTION_REPORT_DIR/logs/spark/work
export SPARK_EXECUTORS=$(($SPARK_YARN_EXECUTORS_PER_NODE * $SLAVES_NUMBER))
export SPARK_DEFAULT_PARALLELISM=$(($SPARK_EXECUTORS * $SPARK_YARN_CORES_PER_EXECUTOR))
export PATH=$SPARK_HOME/bin:$PATH
export SPARK_MAJOR_VERSION=`echo $SOLUTION_VERSION | awk 'BEGIN{FS=OFS="."} NF--'`
export SPARK_SERIES=`echo ${SPARK_MAJOR_VERSION} | cut -d '.' -f 1`
#YARN environment variables
export HADOOP_HOME=$SPARK_HADOOP_HOME
export HADOOP_CONF_DIR_SRC=$HADOOP_HOME/etc/hadoop
export HADOOP_CONF_DIR=$SOLUTION_REPORT_DIR/conf/hadoop
export HADOOP_LOG_DIR=$SOLUTION_REPORT_DIR/logs/hadoop
export YARN_CONF_DIR=$HADOOP_CONF_DIR
export YARN_LOG_DIR=$SOLUTION_REPORT_DIR/logs/hadoop
export PATH=$HADOOP_HOME/bin:$PATH
export HADOOP_SERIES=`echo ${HADOOP_HOME##*/} | cut -d '.' -f 1`
if [[ $HADOOP_SERIES == "3" ]]
then
export HADOOP_TEMPLATE_DIR=$TEMPLATES_DIR/Hadoop-YARN-3
export HADOOP_SGE_DAEMONS_DIR=$SGE_DAEMONS_DIR/Hadoop-YARN-3
export HADOOP_STD_DAEMONS_DIR=$STD_DAEMONS_DIR/Hadoop-YARN-3
export HADOOP_SBIN_DIR=$HADOOP_HOME/libexec
export HADOOP_SLAVESFILE=$HADOOP_CONF_DIR/workers
if [[ "$SGE_ENV" == "true" ]]
then
export HADOOP_SSH_OPTS=" "
fi
else
export HADOOP_TEMPLATE_DIR=$TEMPLATES_DIR/Hadoop-YARN
export HADOOP_SGE_DAEMONS_DIR=$SGE_DAEMONS_DIR/Hadoop-YARN
export HADOOP_STD_DAEMONS_DIR=$STD_DAEMONS_DIR/Hadoop-YARN
export HADOOP_SBIN_DIR=$HADOOP_HOME/sbin
export HADOOP_SLAVESFILE=$HADOOP_CONF_DIR/slaves
fi
export HADOOP_MR_VERSION="YARN"
#Configuracion
export OLD_GEN_CONFIG_SCRIPT=$GEN_CONFIG_SCRIPT
export GEN_CONFIG_SCRIPT=$SOLUTION_DIR/bin/gen-config.sh
#Deploy mode
export FINISH_YARN="false"
export DEPLOY_ARGS="--master yarn --deploy-mode client \
--conf spark.hadoop.yarn.timeline-service.enabled=false"
add_conf_param "spark_conf_dir" $SPARK_CONF_DIR
add_conf_param "spark_log_dir" $SPARK_LOG_DIR
add_conf_param "spark_worker_dir" $SPARK_WORKER_DIR
add_conf_param "spark_executor_instances" $SPARK_EXECUTORS
add_conf_param "spark_default_parallelism" $SPARK_DEFAULT_PARALLELISM
add_conf_param "spark_sql_shuffle_partitions" $SPARK_DEFAULT_PARALLELISM
add_conf_param "spark_executor_memory" $SPARK_YARN_EXECUTOR_HEAPSIZE
add_conf_param "spark_yarn_executor_memory" $SPARK_YARN_EXECUTOR_HEAPSIZE
add_conf_param "spark_executor_cores" $SPARK_YARN_CORES_PER_EXECUTOR
|
import datetime
def find_closest_start_date_employees(data, start_date):
start_date = datetime.datetime.strptime(start_date, '%Y-%m-%d')
min_date = datetime.datetime.today()
min_date_employees = []
for row in data:
row_date = datetime.datetime.strptime(row[3], '%Y-%m-%d')
if row_date < start_date:
continue
if row_date < min_date:
min_date = row_date
min_date_employees = [row[0]]
elif row_date == min_date:
min_date_employees.append(row[0])
return min_date_employees
# Test the function with sample data
data = [
["John Doe", 101, "Sales", "2022-01-15"],
["Jane Smith", 102, "Marketing", "2022-01-10"],
["Bob Johnson", 103, "HR", "2022-01-10"],
["Alice Brown", 104, "Sales", "2022-01-20"]
]
start_date = "2022-01-12"
print(find_closest_start_date_employees(data, start_date)) # Output: ["Jane Smith", "Bob Johnson"] |
from typing import List
def find_invalid_function_names(function_names: List[str]) -> List[str]:
invalid_names = []
for name in function_names:
if not name.islower() or name.startswith("without"):
invalid_names.append(name)
return invalid_names |
<gh_stars>1000+
/*
* Copyright 2014-2021 Real Logic Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.agrona.io;
import org.agrona.DirectBuffer;
import org.agrona.concurrent.UnsafeBuffer;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class DirectBufferInputStreamTest
{
private static final int END_OF_STREAM_MARKER = -1;
@Test
public void shouldCorrectlyConvertBytesToPositiveIntegers()
{
final byte[] data = { (byte)-1, 0 };
final DirectBuffer buffer = new UnsafeBuffer(data);
final DirectBufferInputStream inputStream = new DirectBufferInputStream(buffer);
assertEquals(inputStream.read(), 255);
}
@Test
public void shouldReturnMinusOneOnEndOfStream()
{
final byte[] data = { 1, 2 };
final DirectBuffer buffer = new UnsafeBuffer(data);
final DirectBufferInputStream inputStream = new DirectBufferInputStream(buffer);
assertEquals(inputStream.read(), 1);
assertEquals(inputStream.read(), 2);
assertEquals(inputStream.read(), END_OF_STREAM_MARKER);
}
}
|
<reponame>alicegiandjian/calendarCL-<filename>calendarCL.py
# Make this avaliable for Python 2 & 3
from __future__ import print_function
from apiclient import discovery
from httplib2 import Http
from oauth2client import file, client, tools
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
# This chunk of code basically gives us access to the data in the google calendar if succesful
SCOPES = 'https://www.googleapis.com/auth/calendar'
store = file.Storage('credentials.json')
creds = store.get()
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets('client_secret.json', SCOPES)
creds = tools.run_flow(flow, store, flags) \
if flags else tools.run(flow, store)
# With permissions now granted, we can create a point to enter the calendar
CAL = discovery.build('calendar', 'v3', http=creds.authorize(Http())) # Requests our sign with the creditionals
GMT_OFF = '-07:00' # PDT/MST/GMT-7
EVENT = {
'summary': 'Dinner with friends',
'start': {'dateTime': '2020-09-28T19:00:00%s' % GMT_OFF},
'end': {'dateTime': '2020-09-15T22:00:00%s' % GMT_OFF},
}
# Insert the information into the calendar
e = CAL.events().insert(calendarId = 'primary',
sendNotifications=True, body=EVENT).execute()
# Confirm the calendar event was created successfully, checking the return value
print('''*** %r event added:
Start: %s
End %s''' % (e['summary'].encode('utf-8'),
e['start']['dataTime'], e['end']['dataTime']))
|
#!/usr/bin/env bash
# This program and the accompanying materials are made available under the
# terms of the MIT license (X11 license) which accompanies this distribution.
# author: C. Bürger
set -e
set -o pipefail
shopt -s inherit_errexit
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
################################################################################################################ Parse arguments:
arguments="$* --"
arguments="${arguments#*--}"
if [ $# -eq 0 ]
then
"$script_dir/check-tables.bash" -h
exit $?
fi
while getopts c:h opt
do
case $opt in
c)
if [ -z ${profiling_configuration+x} ]
then
profiling_configuration="$OPTARG"
else
echo " !!! ERROR: Several profiling configurations selected via -c parameter !!!" >&2
exit 2
fi;;
h|?)
echo "Usage: -c Profiling configuration (mandatory parameter)." >&2
echo " -- List of measurements tables to check (mandatory parameter)." >&2
echo " Must be non-empty." >&2
exit 2;;
esac
done
shift $(( OPTIND - 1 ))
if [ $# -ge 1 ] && [ " $* --" != "$arguments" ]
then
echo " !!! ERROR: Unknown [$*] command line arguments !!!" >&2
exit 2
fi
measurements_tables=( "$@" )
shift ${#measurements_tables[@]}
for t in "${measurements_tables[@]}"
do
if [ ! -f "$t" ]
then
echo " !!! ERROR: Non-existing measurements table [$t] specified via '--' argument list !!!" >&2
exit 2
fi
done
if (( ${#measurements_tables[@]} == 0 ))
then
echo " !!! ERROR: No measurements table specified via '--' argument list !!!" >&2
exit 2
fi
###################################################################################################### Check measurements tables:
for t in "${measurements_tables[@]}"
do
"$script_dir/record.bash" -c "$profiling_configuration" -t "$t" -x
done
exit 0
|
import com.google.common.collect.Lists;
import org.cyclops.integrateddynamics.api.part.aspect.IAspect;
import org.cyclops.integrateddynamics.core.part.aspect.AspectRegistry;
import org.cyclops.integrateddynamics.part.aspect.Aspects;
import org.cyclops.integratedtunnels.GeneralConfig;
import org.cyclops.integratedtunnels.core.part.PartTypeTunnelAspectsWorld;
import java.util.List;
public class AspectRetriever {
public List<IAspect> retrieveAspects(AspectRegistry aspectRegistry) {
List<IAspect> aspects = Lists.newArrayList();
for (IAspect aspect : aspectRegistry.getAspects()) {
aspects.add(aspect);
}
return aspects;
}
} |
package com.deskbill.tools;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import javax.sql.DataSource;
import org.apache.commons.dbcp.BasicDataSource;
public class JDBCUtils {
public static final String DRIVER_CLASS_NAME = "com.mysql.jdbc.Driver";
public static String URL = "jdbc:mysql://localhost:3306/bill?characterEncoding=utf-8&useSSL=false&serverTimezone=UTC&autoReconnect=true";
public static String USERNAME = "root";
public static String PASSWORD = "<PASSWORD>";
private static final int MAX_IDLE = 3;
private static final long MAX_WAIT = 5000;
private static final int MAX_ACTIVE = 5;
private static final int INITIAL_SIZE = 3;
private static BasicDataSource dataSource = new BasicDataSource();
static {
/* String path = System.getProperty("user.dir");
Properties properties = new Properties();
try {
InputStream inputStream = new FileInputStream(path+"resource\\config.properties");
properties.load(inputStream);
} catch (IOException e) {
e.printStackTrace();
}
if (!StringUtils.isNull(properties.getProperty("jdbc.url"))) {
URL = properties.getProperty("jdbc.url");
}
if (!StringUtils.isNull(properties.getProperty("jdbc.username"))) {
USERNAME = properties.getProperty("jdbc.username");
}
if (!StringUtils.isNull(properties.getProperty("jdbc.url"))) {
PASSWORD = properties.getProperty("jdbc.password");
}*/
dataSource.setDriverClassName(DRIVER_CLASS_NAME);
dataSource.setUrl(URL);
dataSource.setUsername(USERNAME);
dataSource.setPassword(PASSWORD);
dataSource.setMaxActive(MAX_IDLE);
dataSource.setMaxWait(MAX_WAIT);
dataSource.setMaxActive(MAX_ACTIVE);
dataSource.setInitialSize(INITIAL_SIZE);
}
public static DataSource getDataSource() {
return dataSource;
}
}
|
<filename>db.go<gh_stars>0
package main
import (
"fmt"
"time"
"github.com/go-redis/redis"
)
type DB interface {
DBType() string
ReserveSlug(fp string, val string) bool
SlugReserved(fp string, val string) bool
DestCreate(dest *Dest) (string, bool)
SlugCreate(slug, dest string, expire int, fp string) bool
SlugFollow(slug string) (string, error)
UserCreate(username, digest string) error
UserGetDigest(username string) (string, error)
SessionCreate(username, token string) string
SessionLookup(token string) (bool, string, bool)
BuildDestIndex(username string) []*DestListing
}
type RedisDB struct {
Client *redis.Client
}
func (rdb *RedisDB) Init(addr, pass string, dbnum int) {
rdb.Client = redis.NewClient(&redis.Options{
Addr: addr,
Password: <PASSWORD>,
DB: dbnum,
})
}
func (rdb *RedisDB) DBType() string {
return "redis"
}
func fingerprintKey(fp string) string { return "fp:" + fp }
func reserveKey(slug string) string { return "reserve:" + slug }
func slugKey(slug string) string { return "slug:" + slug }
func destKey(destUUID string) string { return "dest:" + destUUID }
func userKey(username string) string { return "user:" + username }
func sessionKey(token string) string { return "session:" + token }
func userdestsKey(username string) string { return "userdests:" + username }
func destslugsKey(dest string) string { return "destslugs:" + dest }
func (rdb *RedisDB) ReserveSlug(fp string, slug string) bool {
fpKey := fingerprintKey(fp)
rKey := reserveKey(slug)
sKey := slugKey(slug)
// TODO: fix race conditions
// check if key exists or is reserved
slugExists := rdb.Client.Exists(sKey).Val() > 0
if slugExists {
return false
}
isReserved := rdb.Client.Exists(rKey).Val() > 0
// Get currently reserved slug
reservedByMe, _ := rdb.Client.HGet(fpKey, "reserve").Result()
if isReserved && reservedByMe != rKey {
return false // It's already reserved but by someone else
}
// Clear previously reserved slug
if reservedByMe != "" {
rdb.Client.Del(reservedByMe) // Unreserve the previously reserved slug
}
// Set the new one as reserved
rdb.Client.HSet(fpKey, "reserve", rKey)
rdb.Client.Set(rKey, fp, 15*time.Minute)
return true
}
func (rdb *RedisDB) SlugReserved(fp, slug string) bool {
fpKey := fingerprintKey(fp)
rKey := reserveKey(slug)
sKey := slugKey(slug)
slugExists := rdb.Client.Exists(sKey).Val() > 0
slugIsReserved := rdb.Client.Exists(rKey).Val() > 0
reservedByMe, _ := rdb.Client.HGet(fpKey, "reserve").Result()
return !slugExists && slugIsReserved && reservedByMe == rKey
}
func (rdb *RedisDB) DestCreate(dest *Dest) (string, bool) {
destUUID := GetRandString(24)
destKey := destKey(destUUID)
err := rdb.Client.HMSet(destKey, *dest.ToMap()).Err()
// add to userdests sorted set
owner := dest.Owner
rdb.Client.ZAdd(userdestsKey(owner), redis.Z{Score: float64(time.Now().Unix()), Member: destUUID})
return destUUID, err == nil
}
func (rdb *RedisDB) SlugCreate(slug, destUUID string, expire int, fp string) bool {
fpKey := fingerprintKey(fp)
dKey := destKey(destUUID)
sKey := slugKey(slug)
rKey := reserveKey(slug)
rdb.Client.Set(sKey, dKey, time.Duration(expire)*time.Minute).Err()
rdb.Client.HDel(fpKey, "reserve").Err()
rdb.Client.Del(rKey)
// add to destslugs list
rdb.Client.LPush(destslugsKey(destUUID), slug)
return true
}
func (rdb *RedisDB) SlugFollow(slug string) (string, error) {
sKey := slugKey(slug)
dKey, errGet := rdb.Client.Get(sKey).Result()
if errGet != nil {
return "", errGet
}
url, errHGet := rdb.Client.HGet(dKey, "Dest").Result()
if errHGet != nil {
return "", errHGet
}
return url, errHGet
}
func (rdb *RedisDB) UserCreate(username, digest string) error {
uKey := userKey(username)
userExists := rdb.Client.Exists(uKey).Val() > 0
if userExists {
return fmt.Errorf("User already exists")
}
rdb.Client.HSet(uKey, "digest", digest)
return nil
}
func (rdb *RedisDB) UserGetDigest(username string) (string, error) {
uKey := userKey(username)
return rdb.Client.HGet(uKey, "digest").Result()
}
func (rdb *RedisDB) SessionCreate(username, token string) string {
sKey := sessionKey(token)
rdb.Client.Set(sKey, username, 30*24*time.Hour)
return token
}
func (rdb *RedisDB) SessionLookup(token string) (bool, string, bool) {
// Returns if the session token is valid, what the username is, and if the corresponding user exists
username, err := rdb.Client.Get(sessionKey(token)).Result()
if err != nil {
return false, "", false
} else {
return true, username, rdb.Client.Exists(userKey(username)).Val() > 0
}
}
type DestListing struct {
UUID string
Dest string
Description string
EnableAnalytics bool
CreatedAt string
Slugs []SlugListing
}
type SlugListing struct {
Slug string
Expires string
}
func (rdb *RedisDB) BuildDestIndex(username string) []*DestListing {
var destIndex []*DestListing
userDestsZ := rdb.Client.ZRevRangeWithScores(userdestsKey(username), 0, -1).Val()
for _, Z := range userDestsZ {
createdAt := time.Unix(int64(Z.Score), 0).Format(time.RFC3339)
ud := Z.Member.(string)
destMap := rdb.Client.HGetAll(destKey(ud)).Val()
destSlugs := rdb.Client.LRange(destslugsKey(ud), 0, -1).Val()
var slugIndex []SlugListing
for _, ds := range destSlugs {
ttl := rdb.Client.TTL(slugKey(ds)).Val()
var expires string
if ttl > 0 {
expires = time.Now().Add(ttl).Format(time.RFC3339)
} else {
expires = ""
}
slugIndex = append(slugIndex, SlugListing{
Slug: ds,
Expires: expires,
})
}
dl := DestListing{
UUID: ud,
Dest: destMap["Dest"],
Description: destMap["Description"],
EnableAnalytics: destMap["EnableAnalytics"] == "1",
CreatedAt: createdAt,
Slugs: slugIndex,
}
destIndex = append(destIndex, &dl)
}
return destIndex
}
|
# User defined aliases
alias dir='ls -alh --color'
|
import statistics
sequence = [3, -2, 4, 7, 10, 12, 11, -4, 8]
median = statistics.median(sequence)
print(median) # Output: 7 |
import numpy as np
from scipy import ndimage
from skimage.morphology import binary_closing, binary_fill_holes
def process_mask_image(input_img, mask_img, iterations_closing, return_inverse):
if len(input_img.shape) == 3:
se = ndimage.generate_binary_structure(3, 6)
elif len(input_img.shape) == 2:
se = ndimage.generate_binary_structure(2, 4)
else:
raise ValueError('Image must be 2D or 3D')
mask_img = binary_closing(mask_img, se, iterations=iterations_closing)
mask_img = binary_fill_holes(mask_img, se)
if return_inverse:
mask_img = np.logical_not(mask_img)
return mask_img |
#!/bin/bash
set -e
# Make sure only root can run this script
if [ "$(id -u)" != "0" ]; then
echo "This script must be run as root" 1>&2
exit 1
fi
INSTALLATION_FOLDER="${HOME}/demo-puppet"
BRANCH="master"
if [ -d "${INSTALLATION_FOLDER}" ];
then
rm -r "${INSTALLATION_FOLDER}";
fi
git clone --branch "${BRANCH}" --single-branch 'https://github.com/emilybache/demo-puppet.git' "${INSTALLATION_FOLDER}"
cd "${INSTALLATION_FOLDER}"
puppet apply --modulepath=modules/ manifests/site.pp
|
// import { take, call, put, select, fork, cancel } from 'redux-saga/effects';
// import { SagaCancellationException } from 'redux-saga';
// import { sampleAction } from './actions.js';
// Individual exports for testing
export function* sampleSaga() {
return;
}
// All sagas to be loaded
export default [sampleSaga];
|
<gh_stars>100-1000
// https://cses.fi/problemset/stats/1662/
#include <bits/stdc++.h>
using namespace std;
using ll = long long;
using vi = vector<ll>;
using vvi = vector<vi>;
int main() {
ios::sync_with_stdio(0);
cin.tie(0);
ll n;
cin >> n;
vvi a(n);
vi b(n + 1);
a[0].push_back(0);
for (int i = 0; i < n; i++) {
cin >> b[i + 1];
(b[i + 1] += b[i]) %= n;
if (b[i + 1] < 0) b[i + 1] += n;
a[b[i + 1]].push_back(i + 1);
}
ll s = 0;
for (int i = 1; i <= n; i++) {
ll x = (b[i] - n) % n;
if (x < 0) x += n;
s += lower_bound(a[x].begin(), a[x].end(), i) - a[x].begin();
}
cout << s << "\n";
}
|
<reponame>JaviMiot/patternDesignJS_Python<filename>Singleton/typeScript/singleton.ts<gh_stars>0
class Car {
static _instance: Car;
color: string;
capacity: number;
private constructor(color: string, capacity: number) {
this.color = color;
this.capacity = capacity;
}
static getInstance(color: string, capacity: number) {
if(!this._instance){
this._instance = new Car(color, capacity);
}
return this._instance;
}
}
const c1 = Car.getInstance('red', 5)
const c2 = Car.getInstance('blue', 15)
console.log(c1)
console.log(c2)
console.log(c1==c2) |
<gh_stars>1-10
/* eslint-env jest */
const Utils = require('../../../../util/utils')
const HandleDownloadErrorAction = Utils.Actions.HandleDownloadErrorAction
const ScrapeError = Utils.Actions.Errors.ScrapeError
const PDFParseError = Utils.Actions.Errors.PDFParseError
const chai = require('chai')
const Assert = chai.assert
describe('HandleDownloadErrorAction.js', () => {
const connectionErrors = [
'ESOCKETTIMEDOUT',
'ETIMEDOUT',
'ECONNRESET',
'EPIPE',
'ENOTFOUND'
]
const parseErrors = [
'InvalidPDF',
'ParseException'
]
let undertest
beforeEach(() => {
undertest = new HandleDownloadErrorAction(() => {}, () => {}, {
url: 'www.google.ca'
})
})
test('HandleDownloadErrorAction.js::perform() requeues connection errors', (done) => {
connectionErrors.map(error => {
let called = false
undertest.callback = () => { called = true }
undertest.perform(new ScrapeError(error, 'www.google.ca'))
Assert(undertest.error instanceof ScrapeError)
Assert(undertest.error.message.includes(error))
Assert(called)
})
done()
})
test('HandleDownloadErrorAction.js::perform() requeues on read errors', (done) => {
parseErrors.map(async error => {
let called = false
undertest.callback = () => { called = true }
undertest.perform(new PDFParseError(error, 'www.google.ca'))
Assert(undertest.error instanceof PDFParseError)
Assert(undertest.error.message.includes(error))
Assert(called)
})
done()
})
})
|
#/bin/bash
curl 'https://stu3.test.pyrohealth.net/fhir/Patient' \
-H 'Connection: keep-alive' \
-H 'sec-ch-ua: " Not A;Brand";v="99", "Chromium";v="98", "Google Chrome";v="98"' \
-H 'Accept: application/json; charset=utf-8' \
-H 'Prefer: return=representation' \
-H 'Content-Type: application/json' \
-H 'sec-ch-ua-mobile: ?0' \
-H 'User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.80 Safari/537.36' \
-H 'sec-ch-ua-platform: "Linux"' \
-H 'Origin: https://fhir-drills.github.io' \
-H 'Sec-Fetch-Site: cross-site' \
-H 'Sec-Fetch-Mode: cors' \
-H 'Sec-Fetch-Dest: empty' \
-H 'Referer: https://fhir-drills.github.io/' \
-H 'Accept-Language: en-US,en;q=0.9' \
--data-raw $'{"resourceType":"Patient","maritalStatus":{"coding":[{"code":"M","display":"Married","system":"http://hl7.org/fhir/v3/MaritalStatus"}]},"text":{"status":"generated","div":"<div xmlns=\'http://www.w3.org/1999/xhtml\'>\\n <p>Patient: Fhirman, Sam</p>\\n</div>"},"identifier":[{"system":"http://ns.electronichealth.net.au/id/hi/ihi/1.0","value":"8003608166690503","type":{"coding":[{"code":"NI","display":"National unique individual identifier","system":"http://hl7.org/fhir/v2/0203"}],"text":"IHI"}},{"system":"urn:oid:1.2.36.146.595.217.0.1","period":{"start":"2001-05-06"},"use":"usual","type":{"coding":[{"system":"http://hl7.org/fhir/v2/0203","code":"MR"}]},"assigner":{"display":"Acme Healthcare"},"value":"6666"}],"telecom":[{"system":"phone","use":"mobile","value":"+61481059995"}],"birthDate":"1973-09-30","gender":"male","address":[{"postalCode":"4000","use":"work","line":["400 George Street"],"city":"Brisbane","country":"AUS","state":"QLD"}],"name":[{"use":"official","text":"Sam Fhirman","prefix":["Mr"],"family":["Fhirman"],"given":["Sam"]}]}' \
--compressed > /tmp/upload_response.txt
|
import tensorflow as tf
import pyredner
def set_gpu_flag():
if tf.test.is_gpu_available(cuda_only=True, min_cuda_compute_capability=None):
pyredner.set_use_gpu(True)
return True
else:
return False
# Test the function
print(set_gpu_flag()) |
def find_max_sum_subarray(array):
# Initialize maximum sum variables
max_sum = 0
max_start_index = 0
max_end_index = 0
# Check each subarray starting from the beginning of the array
current_sum = 0
for i in range(len(array)):
# get the sum of the current subarray
current_sum += array[i]
# check if current sum is greater than the max sum
if current_sum > max_sum:
max_sum = current_sum
max_start_index = 0
max_end_index = i
#Check if the current sum is less than 0
if current_sum < 0:
current_sum = 0
start_index = i+1
# Return the sum of the maximum subarray
return max_sum |
package example
import "gorm.io/gorm"
//go:generate gormgen -structs User -output user_gen.go
type User struct {
gorm.Model
Name string
Age int
Email string
}
|
# Ensuring that everything is up-to-date
git submodule sync --recursive
git submodule update --init --recursive
git submodule foreach git pull origin master
# Set variables
export FOLDER_SOURCE="${FOLDER_SOURCE:-"$( cd "$(dirname "$0")" ; pwd -P )"/..}"
export FOLDER_BUILD="${FOLDER_SOURCE}/build"
# Configure
cmake --no-warn-unused-cli ..
cmake --build "${FOLDER_BUILD}" -- -j$(nproc)
|
<reponame>zenglongGH/spresense
var group__CMSIS__CPACR__BITS =
[
[ "CPACR_ASEDIS_Msk", "group__CMSIS__CPACR__BITS.html#ga46d28804bfa370b0dd4ac520a7a67609", null ],
[ "CPACR_ASEDIS_Pos", "group__CMSIS__CPACR__BITS.html#ga3acd342ab1e88bd4ad73f5670e7af163", null ],
[ "CPACR_CP_Msk_", "group__CMSIS__CPACR__BITS.html#ga7c87723442baa681a80de8f644eda1a2", null ],
[ "CPACR_CP_Pos_", "group__CMSIS__CPACR__BITS.html#ga77dc035e6d16dee8f5cf53b36b86cfaf", null ],
[ "CPACR_D32DIS_Msk", "group__CMSIS__CPACR__BITS.html#ga96266eb6bf35c3c3f22718bd06b12d79", null ],
[ "CPACR_D32DIS_Pos", "group__CMSIS__CPACR__BITS.html#ga6df0c4e805105285e63b0f0e992bd416", null ],
[ "CPACR_TRCDIS_Msk", "group__CMSIS__CPACR__BITS.html#gab5d6ec83339e755bd3e7eacb914edf37", null ],
[ "CPACR_TRCDIS_Pos", "group__CMSIS__CPACR__BITS.html#ga6866c97020fdba42f7c287433c58d77c", null ]
]; |
#!/bin/bash
# Copyright 2012-2013 Brno University of Technology (author: Karel Vesely), Daniel Povey
# Apache 2.0.
# Create denominator lattices for MMI/MPE/sMBR training.
# Creates its output in $dir/lat.*.ark,$dir/lat.scp
# The lattices are uncompressed, we need random access for DNN training.
# Begin configuration section.
nj=4
cmd=run.pl
sub_split=1
beam=13.0
lattice_beam=7.0
acwt=0.1
max_active=5000
nnet=
ivector_append_tool=append-vector-to-feats # default
nnet_forward_opts="--no-softmax=true --prior-scale=1.0"
max_mem=20000000 # This will stop the processes getting too large.
# This is in bytes, but not "real" bytes-- you have to multiply
# by something like 5 or 10 to get real bytes (not sure why so large)
# End configuration section.
use_gpu=no # yes|no|optional
parallel_opts="--num-threads 2"
ivector= # rx-specifier with i-vectors (ark-with-vectors),
echo "$0 $@" # Print the command line for logging
[ -f ./path.sh ] && . ./path.sh; # source the path.
. parse_options.sh || exit 1;
set -euo pipefail
if [ $# != 4 ]; then
echo "Usage: steps/$0 [options] <data-dir> <lang-dir> <src-dir> <exp-dir>"
echo " e.g.: steps/$0 data/train data/lang exp/tri1 exp/tri1_denlats"
echo "Works for plain features (or CMN, delta), forwarded through feature-transform."
echo ""
echo "Main options (for others, see top of script file)"
echo " --config <config-file> # config containing options"
echo " --nj <nj> # number of parallel jobs"
echo " --cmd (utils/run.pl|utils/queue.pl <queue opts>) # how to run jobs."
echo " --sub-split <n-split> # e.g. 40; use this for "
echo " # large databases so your jobs will be smaller and"
echo " # will (individually) finish reasonably soon."
exit 1;
fi
data=$1
lang=$2
srcdir=$3
dir=$4
sdata=$data/split$nj
mkdir -p $dir/log
[[ -d $sdata && $data/feats.scp -ot $sdata ]] || split_data.sh $data $nj || exit 1;
echo $nj > $dir/num_jobs
oov=`cat $lang/oov.int` || exit 1;
mkdir -p $dir
utils/lang/check_phones_compatible.sh $lang/phones.txt $srcdir/phones.txt
cp -r $lang $dir/
# Compute grammar FST which corresponds to unigram decoding graph.
new_lang="$dir/"$(basename "$lang")
echo "Making unigram grammar FST in $new_lang"
cat $data/text | utils/sym2int.pl --map-oov $oov -f 2- $lang/words.txt | \
awk '{for(n=2;n<=NF;n++){ printf("%s ", $n); } printf("\n"); }' | \
utils/make_unigram_grammar.pl | fstcompile | fstarcsort --sort_type=ilabel > $new_lang/G.fst \
|| exit 1;
# mkgraph.sh expects a whole directory "lang", so put everything in one directory...
# it gets L_disambig.fst and G.fst (among other things) from $dir/lang, and
# final.mdl from $srcdir; the output HCLG.fst goes in $dir/graph.
echo "Compiling decoding graph in $dir/dengraph"
if [ -s $dir/dengraph/HCLG.fst ] && [ $dir/dengraph/HCLG.fst -nt $srcdir/final.mdl ]; then
echo "Graph $dir/dengraph/HCLG.fst already exists: skipping graph creation."
else
utils/mkgraph.sh $new_lang $srcdir $dir/dengraph || exit 1;
fi
cp $srcdir/{tree,final.mdl} $dir
# Select default locations to model files
[ -z "$nnet" ] && nnet=$srcdir/final.nnet;
class_frame_counts=$srcdir/ali_train_pdf.counts
feature_transform=$srcdir/final.feature_transform
model=$dir/final.mdl
# Check that files exist
for f in $sdata/1/feats.scp $nnet $model $feature_transform $class_frame_counts; do
[ ! -f $f ] && echo "$0: missing file $f" && exit 1;
done
# PREPARE FEATURE EXTRACTION PIPELINE
# import config,
cmvn_opts=
delta_opts=
D=$srcdir
[ -e $D/norm_vars ] && cmvn_opts="--norm-means=true --norm-vars=$(cat $D/norm_vars)" # Bwd-compatibility,
[ -e $D/cmvn_opts ] && cmvn_opts=$(cat $D/cmvn_opts)
[ -e $D/delta_order ] && delta_opts="--delta-order=$(cat $D/delta_order)" # Bwd-compatibility,
[ -e $D/delta_opts ] && delta_opts=$(cat $D/delta_opts)
#
# Create the feature stream,
feats="ark,s,cs:copy-feats scp:$sdata/JOB/feats.scp ark:- |"
# apply-cmvn (optional),
[ ! -z "$cmvn_opts" -a ! -f $sdata/1/cmvn.scp ] && echo "$0: Missing $sdata/1/cmvn.scp" && exit 1
[ ! -z "$cmvn_opts" ] && feats="$feats apply-cmvn $cmvn_opts --utt2spk=ark:$sdata/JOB/utt2spk scp:$sdata/JOB/cmvn.scp ark:- ark:- |"
# add-deltas (optional),
[ ! -z "$delta_opts" ] && feats="$feats add-deltas $delta_opts ark:- ark:- |"
# add-pytel transform (optional),
[ -e $D/pytel_transform.py ] && feats="$feats /bin/env python $D/pytel_transform.py |"
# add-ivector (optional),
if [ -e $D/ivector_dim ]; then
[ -z $ivector ] && echo "Missing --ivector, they were used in training!" && exit 1
# Get the tool,
# ivector_append_tool=append-vector-to-feats # default,
[ -e $D/ivector_append_tool ] && ivector_append_tool=$(cat $D/ivector_append_tool)
# Check dims,
feats_job_1=$(sed 's:JOB:1:g' <(echo $feats))
dim_raw=$(feat-to-dim "$feats_job_1" -)
dim_raw_and_ivec=$(feat-to-dim "$feats_job_1 $ivector_append_tool ark:- '$ivector' ark:- |" -)
dim_ivec=$((dim_raw_and_ivec - dim_raw))
[ $dim_ivec != "$(cat $D/ivector_dim)" ] && \
echo "Error, i-vector dim. mismatch (expected $(cat $D/ivector_dim), got $dim_ivec in '$ivector')" && \
exit 1
# Append to feats,
feats="$feats $ivector_append_tool ark:- '$ivector' ark:- |"
fi
# nnet-forward,
feats="$feats nnet-forward $nnet_forward_opts --feature-transform=$feature_transform --class-frame-counts=$class_frame_counts --use-gpu=$use_gpu $nnet ark:- ark:- |"
# if this job is interrupted by the user, we want any background jobs to be
# killed too.
cleanup() {
local pids=$(jobs -pr)
[ -n "$pids" ] && kill $pids || true
}
trap "cleanup" INT QUIT TERM EXIT
echo "$0: generating denlats from data '$data', putting lattices in '$dir'"
#1) Generate the denominator lattices
if [ $sub_split -eq 1 ]; then
# Prepare 'scp' for storing lattices separately and gzipped
for n in `seq $nj`; do
[ ! -d $dir/lat$n ] && mkdir $dir/lat$n;
cat $sdata/$n/feats.scp | \
awk -v dir=$dir -v n=$n '{ utt=$1; utt_noslash=utt; gsub("/","_",utt_noslash);
printf("%s | gzip -c >%s/lat%d/%s.gz\n", utt, dir, n, utt_noslash); }'
done >$dir/lat.store_separately_as_gz.scp
# Generate the lattices
$cmd $parallel_opts JOB=1:$nj $dir/log/decode_den.JOB.log \
latgen-faster-mapped --beam=$beam --lattice-beam=$lattice_beam --acoustic-scale=$acwt \
--max-mem=$max_mem --max-active=$max_active --word-symbol-table=$lang/words.txt $srcdir/final.mdl \
$dir/dengraph/HCLG.fst "$feats" "scp:$dir/lat.store_separately_as_gz.scp" || exit 1;
else
# each job from 1 to $nj is split into multiple pieces (sub-split), and we aim
# to have at most two jobs running at each time. The idea is that if we have stragglers
# from one job, we can be processing another one at the same time.
rm -f $dir/.error
prev_pid=
for n in `seq $[nj+1]`; do
if [ $n -gt $nj ]; then
this_pid=
elif [ -f $dir/.done.$n ] && [ $dir/.done.$n -nt $srcdir/final.mdl ]; then
echo "Not processing subset $n as already done (delete $dir/.done.$n if not)";
this_pid=
else
sdata2=$data/split$nj/$n/split${sub_split}utt;
split_data.sh --per-utt $sdata/$n $sub_split || exit 1;
mkdir -p $dir/log/$n
mkdir -p $dir/part
feats_subset=$(echo $feats | sed s:JOB/:$n/split${sub_split}utt/JOB/:g)
# Prepare 'scp' for storing lattices separately and gzipped
for k in `seq $sub_split`; do
[ ! -d $dir/lat$n/$k ] && mkdir -p $dir/lat$n/$k;
cat $sdata2/$k/feats.scp | \
awk -v dir=$dir -v n=$n -v k=$k '{ utt=$1; utt_noslash=utt; gsub("/","_",utt_noslash);
printf("%s | gzip -c >%s/lat%d/%d/%s.gz\n", utt, dir, n, k, utt_noslash); }'
done >$dir/lat.${n}.store_separately_as_gz.scp
# Generate lattices
$cmd $parallel_opts JOB=1:$sub_split $dir/log/$n/decode_den.JOB.log \
latgen-faster-mapped --beam=$beam --lattice-beam=$lattice_beam --acoustic-scale=$acwt \
--max-mem=$max_mem --max-active=$max_active --word-symbol-table=$lang/words.txt $srcdir/final.mdl \
$dir/dengraph/HCLG.fst "$feats_subset" scp:$dir/lat.$n.store_separately_as_gz.scp || touch .error &
this_pid=$!
fi
if [ ! -z "$prev_pid" ]; then # Wait for the previous job; merge the previous set of lattices.
wait $prev_pid
[ -f $dir/.error ] && echo "$0: error generating denominator lattices" && exit 1;
touch $dir/.done.$prev_n
fi
prev_n=$n
prev_pid=$this_pid
done
fi
#2) Generate 'scp' for reading the lattices
# make $dir an absolute pathname.
[ '/' != ${dir:0:1} ] && dir=$PWD/$dir
for n in `seq $nj`; do
find $dir/lat${n} -name "*.gz" | perl -ape 's:.*/([^/]+)\.gz$:$1 gunzip -c $& |:; '
done | sort >$dir/lat.scp
[ -s $dir/lat.scp ] || exit 1
echo "$0: done generating denominator lattices."
|
#!/bin/bash
SERVICE_USER_GROUP=vagrant:vagrant
# runtime environment
mkdir -p /apps/aspace/apache/{bin,logs,run}
chown -R "$SERVICE_USER_GROUP" /apps/aspace/apache
# symlink to system modules
ln -sf /usr/lib64/httpd/modules /apps/aspace/apache/modules
# compile the helper setuid program
cd /apps/aspace/apache/src
make SERVICE_USER=vagrant SERVICE_GROUP=vagrant install clean
|
// Define the supported operations
enum Operation {
Sum,
Sub,
Mul,
Div,
}
// Implement the interpreter function
fn interpret_operation(operation: Operation, args: Vec<isize>) -> isize {
match operation {
Operation::Sum => args.iter().sum(),
Operation::Sub => {
if let Some((first, rest)) = args.split_first() {
rest.iter().fold(*first, |acc, &x| acc - x)
} else {
0
}
}
Operation::Mul => args.iter().product(),
Operation::Div => {
if let Some((first, rest)) = args.split_first() {
rest.iter().fold(*first, |acc, &x| acc / x)
} else {
0
}
}
}
}
// Add the missing functionality to the interpreter
bundle.add_function("SUB", |args, _named_args| {
let values: Vec<isize> = args.iter().map(|&x| x).collect();
FluentValue::Int(interpret_operation(Operation::Sub, values))
})
.expect("Failed to add a function to the bundle.");
bundle.add_function("MUL", |args, _named_args| {
let values: Vec<isize> = args.iter().map(|&x| x).collect();
FluentValue::Int(interpret_operation(Operation::Mul, values))
})
.expect("Failed to add a function to the bundle.");
bundle.add_function("DIV", |args, _named_args| {
let values: Vec<isize> = args.iter().map(|&x| x).collect();
FluentValue::Int(interpret_operation(Operation::Div, values))
})
.expect("Failed to add a function to the bundle."); |
#include <iostream>
#include <string>
#include <bitset>
// SHA-256 logical functions
#define Ch(x, y, z) ((x & y) ^ (~x & z))
#define Maj(x, y, z) ((x & y) ^ (x & z) ^ (y & z))
#define Sigma0(x) (ROTRIGHT(x, 2) ^ ROTRIGHT(x, 13) ^ ROTRIGHT(x, 22))
#define Sigma1(x) (ROTRIGHT(x, 6) ^ ROTRIGHT(x, 11) ^ ROTRIGHT(x, 25))
#define Gamma0(x) (ROTRIGHT(x, 7) ^ ROTRIGHT(x, 18) ^ (x >> 3))
#define Gamma1(x) (ROTRIGHT(x, 17) ^ ROTRIGHT(x, 19) ^ (x >> 10))
// SHA-256 constants
const uint32_t K[] = {
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2
};
// SHA-256 state
struct State {
uint32_t h[8]; // Initial hash values
};
// Right rotate operation
#define ROTRIGHT(word, bits) (((word) >> (bits)) | ((word) << (32 - (bits))))
// SHA-256 core processing function
void sha256_process(const uint8_t *data, size_t len, State &s) {
// Implement the core processing steps of SHA-256 algorithm
// Message Padding
// Parsing
// Processing
// Update the state s.h[8] with the final hash value
}
// Example usage
int main() {
std::string message = "Hello, SHA-256!";
State s = {{0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19}};
sha256_process(reinterpret_cast<const uint8_t *>(message.c_str()), message.length(), s);
for (int i = 0; i < 8; i++) {
std::cout << std::hex << s.h[i] << " ";
}
return 0;
} |
<gh_stars>1-10
export const serialize = (
object,
{
objectPrefix = '=',
objectPostfix = '+',
separator = '&',
emptyPlaceholder = ',',
encoder = encodeURIComponent,
} = {}
) => {
const escapeRegEx = string => string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
const prefixEscaped = escapeRegEx(objectPrefix);
const postfixEscaped = escapeRegEx(objectPostfix);
const separatorEscaped = escapeRegEx(separator);
const emptyEscaped = escapeRegEx(emptyPlaceholder);
if (typeof object === 'string') object = JSON.parse(object);
const string = Object.values(object)
.map(function encodeValues(value) {
if (value && typeof value === 'object') {
return (
objectPrefix +
Object.values(value)
.map(encodeValues)
.join(separator) +
objectPostfix
);
}
if (!value && value !== 0) value = emptyPlaceholder;
else {
if (value === true) value = 1;
value = encoder(value);
if (
value.match(
`(${postfixEscaped})|(${prefixEscaped})|(${separatorEscaped})|(${emptyEscaped})`
)
)
throw `Defined postfix, prefix or separator exists in source! (value: ${value})`;
}
return value;
})
.join(separator)
.replace(
// last postfixes are not needed
new RegExp(`${postfixEscaped}+$`),
''
)
.replace(
// prefix / postfix can act as sole separator
new RegExp(`${separatorEscaped}${prefixEscaped}`, 'g'),
objectPrefix
)
.replace(
new RegExp(`${postfixEscaped}${separatorEscaped}`, 'g'),
objectPostfix
);
return string;
};
export const deserialize = (
string,
template,
{
objectPrefix = '=',
objectPostfix = '+',
separator = '&',
emptyPlaceholder = ',',
decoder = decodeURIComponent,
} = {}
) => {
let arrays = [[]];
let curArray = arrays[0];
let curValue = '';
for (const c of string) {
if ([separator, objectPrefix, objectPostfix].indexOf(c) !== -1) {
if (curValue === emptyPlaceholder) curArray.push('');
else if (curValue) curArray.push(decoder(curValue));
curValue = '';
if (c === objectPrefix) {
curArray = [];
arrays.push(curArray);
} else if (c === objectPostfix) {
arrays.pop();
arrays[arrays.length - 1].push(curArray);
curArray = arrays[arrays.length - 1];
}
continue;
}
curValue += c;
}
// if last objectPostfixes are missing
if (curValue === emptyPlaceholder) curArray.push('');
else if (curValue) curArray.push(decoder(curValue));
while (arrays.length > 1) arrays[arrays.length - 2].push(arrays.pop());
// remove outer array
arrays = arrays[0];
if (template) {
let object;
if (Array.isArray(template)) object = [...template];
else object = { ...template };
const mapValuesToObject = (object, array) => {
Object.entries(object).forEach(([key, value], i) => {
if (value && typeof value === 'object') {
mapValuesToObject(value, array[i]);
return;
}
object[key] = array[i];
if (typeof value === 'number') object[key] = parseFloat(array[i]) || 0;
else if (typeof value === 'boolean') object[key] = !!array[i];
});
};
mapValuesToObject(object, arrays);
return object;
}
return arrays;
};
|
from os import path, walk
from typing import List
def count_wiki_files(f_d: List[str]) -> int:
files = []
for i in f_d:
if path.isdir(i):
for dirpath, dirnames, filenames in walk(i):
files.extend(path.join(dirpath, j) for j in filenames if len(j) >= 5 and j.startswith(("wiki")))
else:
files.append(i)
wiki_files = [file for file in files if len(file) >= 5 and path.basename(file).startswith("wiki")]
return len(wiki_files)
# Test the function
f_d = ['/path/to/directory1', '/path/to/directory2/file1.txt', '/path/to/directory3/file2.txt', '/path/to/directory4/wiki_file.txt']
print(count_wiki_files(f_d)) # Output: 2 |
<gh_stars>0
"use strict";
exports.__esModule = true;
exports.Searcher = void 0;
var _react = _interopRequireWildcard(require("react"));
var _recompose = require("recompose");
var _styledComponents = require("styled-components");
var _FormSearch = require("grommet-icons/icons/FormSearch");
var _defaultProps = require("../../default-props");
var _Box = require("../Box");
var _Button = require("../Button");
var _Keyboard = require("../Keyboard");
var _Text = require("../Text");
var _TextInput = require("../TextInput");
var _utils = require("../../utils");
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj["default"] = obj; return newObj; } }
function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
var Searcher =
/*#__PURE__*/
function (_Component) {
_inheritsLoose(Searcher, _Component);
function Searcher() {
var _this;
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
_this = _Component.call.apply(_Component, [this].concat(args)) || this;
_defineProperty(_assertThisInitialized(_this), "inputRef", _react["default"].createRef());
return _this;
}
var _proto = Searcher.prototype;
_proto.componentDidMount = function componentDidMount() {
this.focusInputIfNeeded();
};
_proto.componentDidUpdate = function componentDidUpdate() {
this.focusInputIfNeeded();
};
_proto.focusInputIfNeeded = function focusInputIfNeeded() {
/* eslint-disable-next-line react/prop-types */
var _this$props = this.props,
filtering = _this$props.filtering,
property = _this$props.property;
if (this.inputRef.current && filtering === property) {
this.inputRef.current.focus();
}
};
_proto.render = function render() {
var _this$props2 = this.props,
filtering = _this$props2.filtering,
filters = _this$props2.filters,
onFilter = _this$props2.onFilter,
onFiltering = _this$props2.onFiltering,
property = _this$props2.property,
theme = _this$props2.theme;
if (filtering === property) {
return _react["default"].createElement(_Keyboard.Keyboard, {
onEsc: function onEsc() {
return onFiltering(undefined);
}
}, _react["default"].createElement(_Box.Box, {
flex: true,
pad: {
horizontal: 'small'
}
}, _react["default"].createElement(_TextInput.TextInput, {
name: "search-" + property,
ref: this.inputRef,
value: filters[property],
onChange: function onChange(event) {
return onFilter(property, event.target.value);
},
onBlur: function onBlur() {
return onFiltering(undefined);
}
})));
}
return _react["default"].createElement(_react["default"].Fragment, null, filters[property] ? _react["default"].createElement(_Box.Box, {
flex: false,
pad: {
horizontal: 'small'
}
}, _react["default"].createElement(_Text.Text, null, filters[property])) : null, _react["default"].createElement(_Button.Button, {
a11yTitle: "focus-search-" + property,
icon: _react["default"].createElement(_FormSearch.FormSearch, {
color: (0, _utils.normalizeColor)(filtering === property ? 'brand' : 'border', theme)
}),
hoverIndicator: true,
onClick: function onClick() {
return onFiltering(filtering === property ? undefined : property);
}
}));
};
return Searcher;
}(_react.Component);
Searcher.defaultProps = {};
Object.setPrototypeOf(Searcher.defaultProps, _defaultProps.defaultProps);
var SearcherWrapper = (0, _recompose.compose)(_styledComponents.withTheme)(Searcher);
exports.Searcher = SearcherWrapper; |
# Import necessary modules
from pythran.passmanager import ModuleAnalysis
from beniget import DefUseChains
# Create a class SilentDefUseChains that inherits from DefUseChains
class SilentDefUseChains(DefUseChains):
# Implement necessary functionality to analyze defined and used variables
def analyze_variables(self, source_file):
# Perform static code analysis using pythran and beniget
module = ModuleAnalysis(source_file)
self.visit(module)
# Generate a report containing the count of defined and used variables
def generate_report(self):
defined_variables = len(self.def_chains)
used_variables = len(self.use_chains)
print(f"Defined variables: {defined_variables}")
print(f"Used variables: {used_variables}")
# Example usage
if __name__ == "__main__":
source_file = "example.py" # Replace with the path to the Python source file
analyzer = SilentDefUseChains()
analyzer.analyze_variables(source_file)
analyzer.generate_report() |
#!/bin/sh -e
. $(dirname $0)/functions.sh
# --- Test with coverage -----------------------------------------------------
log Measuring code coverage in debug mode
cargo tarpaulin -v --out Xml --ciserver travis-ci
|
#!/bin/bash
set -e
source path.sh
gpus=0,1
stage=0
stop_stage=100
conf_path=conf/default.yaml
train_output_path=exp/default
ckpt_name=snapshot_iter_76.pdz
# with the following command, you can choose the stage range you want to run
# such as `./run.sh --stage 0 --stop-stage 0`
# this can not be mixed use with `$1`, `$2` ...
source ${MAIN_ROOT}/utils/parse_options.sh || exit 1
if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then
# prepare data
./local/preprocess.sh ${conf_path} || exit -1
fi
if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then
# train model, all `ckpt` under `train_output_path/checkpoints/` dir
CUDA_VISIBLE_DEVICES=${gpus} ./local/train.sh ${conf_path} ${train_output_path} || exit -1
fi
if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then
# synthesize, vocoder is pwgan
CUDA_VISIBLE_DEVICES=${gpus} ./local/synthesize.sh ${conf_path} ${train_output_path} ${ckpt_name} || exit -1
fi
if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ]; then
# synthesize_e2e, vocoder is pwgan
CUDA_VISIBLE_DEVICES=${gpus} ./local/synthesize_e2e.sh ${conf_path} ${train_output_path} ${ckpt_name} || exit -1
fi
if [ ${stage} -le 4 ] && [ ${stop_stage} -ge 4 ]; then
# inference with static model
CUDA_VISIBLE_DEVICES=${gpus} ./local/inference.sh ${train_output_path} || exit -1
fi
# paddle2onnx, please make sure the static models are in ${train_output_path}/inference first
# we have only tested the following models so far
if [ ${stage} -le 5 ] && [ ${stop_stage} -ge 5 ]; then
# install paddle2onnx
version=$(echo `pip list |grep "paddle2onnx"` |awk -F" " '{print $2}')
if [[ -z "$version" || ${version} != '0.9.5' ]]; then
pip install paddle2onnx==0.9.5
fi
./local/paddle2onnx.sh ${train_output_path} inference inference_onnx speedyspeech_csmsc
./local/paddle2onnx.sh ${train_output_path} inference inference_onnx hifigan_csmsc
fi
# inference with onnxruntime, use fastspeech2 + hifigan by default
if [ ${stage} -le 6 ] && [ ${stop_stage} -ge 6 ]; then
# install onnxruntime
version=$(echo `pip list |grep "onnxruntime"` |awk -F" " '{print $2}')
if [[ -z "$version" || ${version} != '1.10.0' ]]; then
pip install onnxruntime==1.10.0
fi
./local/ort_predict.sh ${train_output_path}
fi
|
#!/usr/bin/env bash
function help() {
echo "Usage: bash test_espcoredump.sh [ELF_DIR]"
}
if [ -z "$1" ]; then
help
exit 1
else
elf_dir=$1
fi
SUPPORTED_TARGETS=("esp32" "esp32s2" "esp32c3" "esp32s3" )
res=0
coverage erase
for chip in "${SUPPORTED_TARGETS[@]}"; do
{
echo "run b64 decoding tests on $chip"
coverage run -a --source=corefile ../espcoredump.py --chip="$chip" --gdb-timeout-sec 5 info_corefile -m -t b64 -c "${chip}/coredump.b64" -s "${chip}/core.elf" "${elf_dir}/${chip}.elf" &>"${chip}/output" &&
diff "${chip}/expected_output" "${chip}/output" &&
coverage run -a --source=corefile ../espcoredump.py --chip="$chip" --gdb-timeout-sec 5 info_corefile -m -t elf -c "${chip}/core.elf" "${elf_dir}/${chip}.elf" &>"${chip}/output2" &&
diff "${chip}/expected_output" "${chip}/output2"
} || {
echo 'The test for espcoredump has failed!'
res=1
}
done
coverage run -a --source=corefile ./test_espcoredump.py
coverage report ../corefile/*.py ../espcoredump.py
exit $res
|
#!/bin/bash
cd ./terraform-service
terraform init \
&& terraform apply -auto-approve
../make-inventory.sh -l |
tee ../inventory.json
../make-compose-service.sh |
tee ../docker-compose.yml
cd ..
ansible-playbook playbook-service.yml -i make-inventory.sh
|
class Storage {
// Get data from localStorage
static getData() {
return localStorage.getItem('data') ? JSON.parse(localStorage.getItem('data')) : {};
}
// Set data to localStorage
static setData(data) {
localStorage.setItem('data', JSON.stringify(data));
}
}
// Usage:
// Get data
const data = Storage.getData();
// Update data
data.username = 'John';
Storage.setData(data); |
<reponame>gadge/term-ui
var blessed = require('../../lib/blessed')
, screen;
screen = blessed.screen({
dump: __dirname + '/logs/logger.logs',
smartCSR: true,
autoPadding: false,
warnings: true
});
var logger = blessed.log({
parent: screen,
top: 'center',
left: 'center',
width: '50%',
height: '50%',
border: 'line',
tags: true,
keys: true,
vi: true,
mouse: true,
scrollback: 100,
scrollbar: {
ch: ' ',
track: {
bg: 'yellow'
},
style: {
inverse: true
}
}
});
logger.focus();
setInterval(function() {
logger.log('Hello {#0fe1ab-fg}world{/}: {bold}%s{/bold}.', Date.now().toString(36));
if (Math.random() < 0.30) {
logger.log({foo:{bar:{baz:true}}});
}
screen.render();
}, 1000).unref();
screen.key('q', function() {
return screen.destroy();
});
screen.render();
|
<filename>GloboTicket/GloboTicket.Services.Discount/TicketDiscountDB-create.sql
IF OBJECT_ID(N'[__EFMigrationsHistory]') IS NULL
BEGIN
CREATE TABLE [__EFMigrationsHistory] (
[MigrationId] nvarchar(150) NOT NULL,
[ProductVersion] nvarchar(32) NOT NULL,
CONSTRAINT [PK___EFMigrationsHistory] PRIMARY KEY ([MigrationId])
);
END;
GO
CREATE TABLE [Coupons] (
[CouponId] uniqueidentifier NOT NULL,
[Code] nvarchar(max) NULL,
[Amount] int NOT NULL,
[AlreadyUsed] bit NOT NULL,
CONSTRAINT [PK_Coupons] PRIMARY KEY ([CouponId])
);
GO
IF EXISTS (SELECT * FROM [sys].[identity_columns] WHERE [name] IN (N'CouponId', N'AlreadyUsed', N'Amount', N'Code') AND [object_id] = OBJECT_ID(N'[Coupons]'))
SET IDENTITY_INSERT [Coupons] ON;
INSERT INTO [Coupons] ([CouponId], [AlreadyUsed], [Amount], [Code])
VALUES ('3416eeca-e569-44fe-a06e-b0eb0d70a855', CAST(0 AS bit), 10, N'BeNice');
IF EXISTS (SELECT * FROM [sys].[identity_columns] WHERE [name] IN (N'CouponId', N'AlreadyUsed', N'Amount', N'Code') AND [object_id] = OBJECT_ID(N'[Coupons]'))
SET IDENTITY_INSERT [Coupons] OFF;
GO
IF EXISTS (SELECT * FROM [sys].[identity_columns] WHERE [name] IN (N'CouponId', N'AlreadyUsed', N'Amount', N'Code') AND [object_id] = OBJECT_ID(N'[Coupons]'))
SET IDENTITY_INSERT [Coupons] ON;
INSERT INTO [Coupons] ([CouponId], [AlreadyUsed], [Amount], [Code])
VALUES ('819200b3-f05b-4416-a846-534228c26195', CAST(0 AS bit), 20, N'Awesome');
IF EXISTS (SELECT * FROM [sys].[identity_columns] WHERE [name] IN (N'CouponId', N'AlreadyUsed', N'Amount', N'Code') AND [object_id] = OBJECT_ID(N'[Coupons]'))
SET IDENTITY_INSERT [Coupons] OFF;
GO
IF EXISTS (SELECT * FROM [sys].[identity_columns] WHERE [name] IN (N'CouponId', N'AlreadyUsed', N'Amount', N'Code') AND [object_id] = OBJECT_ID(N'[Coupons]'))
SET IDENTITY_INSERT [Coupons] ON;
INSERT INTO [Coupons] ([CouponId], [AlreadyUsed], [Amount], [Code])
VALUES ('aed65b30-071f-4058-b42b-6ac0955ca3b9', CAST(0 AS bit), 100, N'AlmostFree');
IF EXISTS (SELECT * FROM [sys].[identity_columns] WHERE [name] IN (N'CouponId', N'AlreadyUsed', N'Amount', N'Code') AND [object_id] = OBJECT_ID(N'[Coupons]'))
SET IDENTITY_INSERT [Coupons] OFF;
GO
INSERT INTO [__EFMigrationsHistory] ([MigrationId], [ProductVersion])
VALUES (N'20200808123113_Initial', N'3.1.5');
GO
|
import logging
from typing import NewType, Dict
PrimitiveDescription = NewType('PrimitiveDescription', Dict[str, str])
_logger = logging.getLogger(__name__)
class PrimitiveManager:
def __init__(self):
self.primitive_dict = {}
def add_primitive_description(self, primitive_type: str, description: str):
self.primitive_dict[primitive_type] = description
_logger.info(f"Added/Updated description for {primitive_type}: {description}")
def get_primitive_description(self, primitive_type: str) -> str:
description = self.primitive_dict.get(primitive_type, "Description not found")
_logger.info(f"Retrieved description for {primitive_type}: {description}")
return description
# Example usage
manager = PrimitiveManager()
manager.add_primitive_description('int', 'Integer data type')
manager.add_primitive_description('str', 'String data type')
print(manager.get_primitive_description('int')) # Output: Integer data type
print(manager.get_primitive_description('float')) # Output: Description not found |
def convert_roman_to_int(roman_string):
symbols = { 'I': 1, 'V': 5, 'X': 10, 'L': 50, 'C': 100, 'D': 500, 'M': 1000 }
result = 0
max_num = 0
for char in roman_string[::-1]:
curr_num = symbols[char]
if curr_num >= max_num:
result += curr_num
else:
result -= curr_num
max_num = max(max_num, curr_num)
return result
roman_int = convert_roman_to_int(roman_string)
print(roman_int) # prints 17 |
<gh_stars>1-10
/*
* Copyright © 2021 Apple Inc. and the ServiceTalk project authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.servicetalk.examples.grpc.compression;
import io.servicetalk.concurrent.api.Single;
import io.servicetalk.encoding.api.ContentCodec;
import io.servicetalk.encoding.api.Identity;
import io.servicetalk.encoding.netty.ContentCodings;
import io.servicetalk.grpc.api.GrpcServiceContext;
import io.servicetalk.grpc.netty.GrpcServers;
import io.grpc.examples.compression.Greeter.GreeterService;
import io.grpc.examples.compression.Greeter.ServiceFactory;
import io.grpc.examples.compression.HelloReply;
import io.grpc.examples.compression.HelloRequest;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static io.servicetalk.concurrent.api.Single.succeeded;
/**
* A simple extension of the gRPC "Hello World" example which demonstrates
* compression of the request and response bodies.
*/
public class CompressionExampleServer {
/**
* Supported encodings in preferred order. These will be matched against the list of encodings provided by the
* client to choose a mutually agreeable encoding.
*/
private static final List<ContentCodec> SUPPORTED_ENCODINGS =
Collections.unmodifiableList(Arrays.asList(
ContentCodings.gzipDefault(),
ContentCodings.deflateDefault(),
Identity.identity()
));
public static void main(String... args) throws Exception {
GrpcServers.forPort(8080)
// Create Greeter service which uses default binding to create ServiceFactory.
// (see {@link MyGreeterService#bindService}). Alternately a non-default binding could be used by
// directly creating a ServiceFactory directly. i.e.
// new ServiceFactory(new MyGreeterService(), strategyFactory, contentCodecs)
.listenAndAwait(new MyGreeterService())
.awaitShutdown();
}
private static final class MyGreeterService implements GreeterService {
@Override
public Single<HelloReply> sayHello(final GrpcServiceContext ctx, final HelloRequest request) {
return succeeded(HelloReply.newBuilder().setMessage("Hello " + request.getName()).build());
}
@Override
public ServiceFactory bindService() {
// Create a ServiceFactory bound to this service and includes the encodings supported for requests and
// the preferred encodings for responses. Responses will automatically be compressed if the request includes
// a mutually agreeable compression encoding that the client indicates they will accept and that the
// server supports. Requests using unsupported encodings receive an error response in the "grpc-status".
return new ServiceFactory(this, SUPPORTED_ENCODINGS);
}
}
}
|
package top.mowang.cloud.pojo;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
/**
* <p>
*
* </p>
*
* @author <NAME>
* @since 2021-10-29
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
public class Payment implements Serializable {
private static final long serialVersionUID = 1L;
private Long id;
private String serial;
}
|
function kotsadm() {
local src="$DIR/addons/kotsadm/1.18.1"
local dst="$DIR/kustomize/kotsadm"
try_1m_stderr object_store_create_bucket kotsadm
kotsadm_rename_postgres_pvc_1-12-2 "$src"
cp "$src/kustomization.yaml" "$dst/"
cp "$src/api.yaml" "$dst/"
cp "$src/operator.yaml" "$dst/"
cp "$src/postgres.yaml" "$dst/"
cp "$src/schemahero.yaml" "$dst/"
cp "$src/kotsadm.yaml" "$dst/"
kotsadm_secret_cluster_token
kotsadm_secret_authstring
kotsadm_secret_password
kotsadm_secret_postgres
kotsadm_secret_s3
kotsadm_secret_session
kotsadm_api_encryption_key
if [ -n "$PROMETHEUS_VERSION" ]; then
kotsadm_api_patch_prometheus
fi
if [ -n "$PROXY_ADDRESS" ]; then
KUBERNETES_CLUSTER_IP=$(kubectl get services kubernetes --no-headers | awk '{ print $3 }')
render_yaml_file "$DIR/addons/kotsadm/1.18.1/tmpl-kotsadm-proxy.yaml" > "$DIR/kustomize/kotsadm/kotsadm-proxy.yaml"
insert_patches_strategic_merge "$DIR/kustomize/kotsadm/kustomization.yaml" kotsadm-proxy.yaml
render_yaml_file "$DIR/addons/kotsadm/1.18.1/tmpl-kotsadm-api-proxy.yaml" > "$DIR/kustomize/kotsadm/kotsadm-api-proxy.yaml"
insert_patches_strategic_merge "$DIR/kustomize/kotsadm/kustomization.yaml" kotsadm-api-proxy.yaml
fi
if [ "$AIRGAP" == "1" ]; then
cp "$DIR/addons/kotsadm/1.18.1/kotsadm-airgap.yaml" "$DIR/kustomize/kotsadm/kotsadm-airgap.yaml"
insert_patches_strategic_merge "$DIR/kustomize/kotsadm/kustomization.yaml" kotsadm-airgap.yaml
fi
kotsadm_etcd_client_secret
kotsadm_kubelet_client_secret
kotsadm_metadata_configmap $src $dst
if [ -z "$KOTSADM_HOSTNAME" ]; then
KOTSADM_HOSTNAME="$PUBLIC_ADDRESS"
fi
if [ -z "$KOTSADM_HOSTNAME" ]; then
KOTSADM_HOSTNAME="$PRIVATE_ADDRESS"
fi
cat "$src/tmpl-start-kotsadm-web.sh" | sed "s/###_HOSTNAME_###/$KOTSADM_HOSTNAME:8800/g" > "$dst/start-kotsadm-web.sh"
kubectl create configmap kotsadm-web-scripts --from-file="$dst/start-kotsadm-web.sh" --dry-run -oyaml > "$dst/kotsadm-web-scripts.yaml"
kubectl delete pod kotsadm-migrations || true;
kubectl delete deployment kotsadm-web || true; # replaced by 'kotsadm' deployment in 1.12.0
kubectl delete service kotsadm-api || true; # replaced by 'kotsadm-api-node' service in 1.12.0
kotsadm_namespaces "$src" "$dst"
kubectl apply -k "$dst/"
kotsadm_kurl_proxy $src $dst
kotsadm_ready_spinner
kotsadm_api_ready_spinner
kubectl label pvc kotsadm-postgres-kotsadm-postgres-0 velero.io/exclude-from-backup="true" --overwrite
kotsadm_cli $src
}
function kotsadm_join() {
kotsadm_cli "$DIR/addons/kotsadm/1.18.1"
}
function kotsadm_outro() {
local apiPod=$(kubectl get pods --selector app=kotsadm-api --no-headers | grep -E '(ContainerCreating|Running)' | head -1 | awk '{ print $1 }')
if [ -z "$apiPod" ]; then
apiPod="<api-pod>"
fi
local mainPod=$(kubectl get pods --selector app=kotsadm --no-headers | grep -E '(ContainerCreating|Running)' | head -1 | awk '{ print $1 }')
if [ -z "$mainPod" ]; then
mainPod="<main-pod>"
fi
printf "\n"
printf "\n"
printf "Kotsadm: ${GREEN}http://$KOTSADM_HOSTNAME:8800${NC}\n"
if [ -n "$KOTSADM_PASSWORD" ]; then
printf "Login with password (will not be shown again): ${GREEN}$KOTSADM_PASSWORD${NC}\n"
else
printf "You can log in with your existing password. If you need to reset it, run ${GREEN}kubectl kots reset-password default${NC}\n"
fi
printf "\n"
printf "\n"
}
function kotsadm_secret_cluster_token() {
local CLUSTER_TOKEN=$(kubernetes_secret_value default kotsadm-cluster-token kotsadm-cluster-token)
if [ -n "$CLUSTER_TOKEN" ]; then
return 0
fi
# check under old name
CLUSTER_TOKEN=$(kubernetes_secret_value default kotsadm-auto-create-cluster-token token)
if [ -n "$CLUSTER_TOKEN" ]; then
kubectl delete secret kotsadm-auto-create-cluster-token
else
CLUSTER_TOKEN=$(< /dev/urandom tr -dc A-Za-z0-9 | head -c16)
fi
render_yaml_file "$DIR/addons/kotsadm/1.18.1/tmpl-secret-cluster-token.yaml" > "$DIR/kustomize/kotsadm/secret-cluster-token.yaml"
insert_resources "$DIR/kustomize/kotsadm/kustomization.yaml" secret-cluster-token.yaml
# ensure all pods that consume the secret will be restarted
kubernetes_scale_down default deployment kotsadm-api
kubernetes_scale_down default deployment kotsadm-operator
}
function kotsadm_secret_authstring() {
local AUTHSTRING=$(kubernetes_secret_value default kotsadm-authstring kotsadm-authstring)
if [ -n "$AUTHSTRING" ]; then
# These are the only two valid formats. Regenerating token in other cases to fix existing installs.
if [[ "$AUTHSTRING" =~ ^'Kots ' ]]; then
return 0
fi
if [[ "$AUTHSTRING" =~ ^'Bearer ' ]]; then
return 0
fi
fi
AUTHSTRING="Kots $(< /dev/urandom tr -dc A-Za-z0-9 | head -c32)"
render_yaml_file "$DIR/addons/kotsadm/1.18.1/tmpl-secret-authstring.yaml" > "$DIR/kustomize/kotsadm/secret-authstring.yaml"
insert_resources "$DIR/kustomize/kotsadm/kustomization.yaml" secret-authstring.yaml
}
function kotsadm_secret_password() {
local BCRYPT_PASSWORD=$(kubernetes_secret_value default kotsadm-password passwordBcrypt)
if [ -n "$BCRYPT_PASSWORD" ]; then
return 0
fi
# global, used in outro
KOTSADM_PASSWORD=$(< /dev/urandom tr -dc A-Za-z0-9 | head -c9)
BCRYPT_PASSWORD=$(echo "$KOTSADM_PASSWORD" | $DIR/bin/bcrypt --cost=14)
render_yaml_file "$DIR/addons/kotsadm/1.18.1/tmpl-secret-password.yaml" > "$DIR/kustomize/kotsadm/secret-password.yaml"
insert_resources "$DIR/kustomize/kotsadm/kustomization.yaml" secret-password.yaml
kubernetes_scale_down default deployment kotsadm
kubernetes_scale_down default deployment kotsadm-api
}
function kotsadm_secret_postgres() {
local POSTGRES_PASSWORD=$(kubernetes_secret_value default kotsadm-postgres password)
if [ -n "$POSTGRES_PASSWORD" ]; then
return 0
fi
POSTGRES_PASSWORD=$(< /dev/urandom tr -dc A-Za-z0-9 | head -c16)
render_yaml_file "$DIR/addons/kotsadm/1.18.1/tmpl-secret-postgres.yaml" > "$DIR/kustomize/kotsadm/secret-postgres.yaml"
insert_resources "$DIR/kustomize/kotsadm/kustomization.yaml" secret-postgres.yaml
kubernetes_scale_down default deployment kotsadm
kubernetes_scale_down default deployment kotsadm-api
kubernetes_scale_down default deployment kotsadm-postgres
kubernetes_scale_down default deployment kotsadm-migrations
}
function kotsadm_secret_s3() {
if [ -z "$VELERO_LOCAL_BUCKET" ]; then
VELERO_LOCAL_BUCKET=velero
fi
render_yaml_file "$DIR/addons/kotsadm/1.18.1/tmpl-secret-s3.yaml" > "$DIR/kustomize/kotsadm/secret-s3.yaml"
insert_resources "$DIR/kustomize/kotsadm/kustomization.yaml" secret-s3.yaml
}
function kotsadm_secret_session() {
local JWT_SECRET=$(kubernetes_secret_value default kotsadm-session key)
if [ -n "$JWT_SECRET" ]; then
return 0
fi
JWT_SECRET=$(< /dev/urandom tr -dc A-Za-z0-9 | head -c16)
render_yaml_file "$DIR/addons/kotsadm/1.18.1/tmpl-secret-session.yaml" > "$DIR/kustomize/kotsadm/secret-session.yaml"
insert_resources "$DIR/kustomize/kotsadm/kustomization.yaml" secret-session.yaml
kubernetes_scale_down default deployment kotsadm
kubernetes_scale_down default deployment kotsadm-api
}
function kotsadm_api_encryption_key() {
local API_ENCRYPTION=$(kubernetes_secret_value default kotsadm-encryption encryptionKey)
if [ -n "$API_ENCRYPTION" ]; then
return 0
fi
# 24 byte key + 12 byte nonce, base64 encoded. This is separate from the base64 encoding used
# in secrets with kubectl. Kotsadm expects the value to be encoded when read as an env var.
API_ENCRYPTION=$(< /dev/urandom cat | head -c36 | base64)
render_yaml_file "$DIR/addons/kotsadm/1.18.1/tmpl-secret-api-encryption.yaml" > "$DIR/kustomize/kotsadm/secret-api-encryption.yaml"
insert_resources "$DIR/kustomize/kotsadm/kustomization.yaml" secret-api-encryption.yaml
kubernetes_scale_down default deployment kotsadm-api
}
function kotsadm_api_patch_prometheus() {
insert_patches_strategic_merge "$DIR/kustomize/kotsadm/kustomization.yaml" api-prometheus.yaml
cp "$DIR/addons/kotsadm/1.18.1/patches/api-prometheus.yaml" "$DIR/kustomize/kotsadm/api-prometheus.yaml"
}
function kotsadm_metadata_configmap() {
local src="$1"
local dst="$2"
# The application.yaml pre-exists from airgap bundle OR
# gets created below if user specified the app-slug and metadata exists.
if [ "$AIRGAP" != "1" ] && [ -n "$KOTSADM_APPLICATION_SLUG" ]; then
# If slug exists, but there's no branding, then replicated.app will return nothing.
# (application.yaml will remain empty)
echo "Retrieving app metadata: url=$REPLICATED_APP_URL, slug=$KOTSADM_APPLICATION_SLUG"
curl $REPLICATED_APP_URL/metadata/$KOTSADM_APPLICATION_SLUG > "$src/application.yaml"
fi
if test -s "$src/application.yaml"; then
cp "$src/application.yaml" "$dst/"
kubectl create configmap kotsadm-application-metadata --from-file="$dst/application.yaml" --dry-run -oyaml > "$dst/kotsadm-application-metadata.yaml"
insert_resources $dst/kustomization.yaml kotsadm-application-metadata.yaml
fi
}
function kotsadm_kurl_proxy() {
local src="$1/kurl-proxy"
local dst="$2/kurl-proxy"
mkdir -p "$dst"
cp "$src/kustomization.yaml" "$dst/"
cp "$src/rbac.yaml" "$dst/"
render_yaml_file "$src/tmpl-service.yaml" > "$dst/service.yaml"
render_yaml_file "$src/tmpl-deployment.yaml" > "$dst/deployment.yaml"
kotsadm_tls_secret
kubectl apply -k "$dst/"
}
# TODO rotate without overwriting uploaded certs
function kotsadm_tls_secret() {
if kubernetes_resource_exists default secret kotsadm-tls; then
return 0
fi
cat > kotsadm.cnf <<EOF
[ req ]
default_bits = 2048
prompt = no
default_md = sha256
req_extensions = req_ext
distinguished_name = dn
[ dn ]
CN = kotsadm.default.svc.cluster.local
[ req_ext ]
subjectAltName = @alt_names
[ v3_ext ]
authorityKeyIdentifier=keyid,issuer:always
basicConstraints=CA:FALSE
keyUsage=nonRepudiation,digitalSignature,keyEncipherment
extendedKeyUsage=serverAuth
subjectAltName=@alt_names
[ alt_names ]
DNS.1 = kotsadm
DNS.2 = kotsadm.default
DNS.3 = kotsadm.default.svc
DNS.4 = kotsadm.default.svc.cluster
DNS.5 = kotsadm.default.svc.cluster.local
IP.1 = $PRIVATE_ADDRESS
EOF
if [ -n "$PUBLIC_ADDRESS" ]; then
echo "IP.2 = $PUBLIC_ADDRESS" >> kotsadm.cnf
fi
openssl req -newkey rsa:2048 -nodes -keyout kotsadm.key -config kotsadm.cnf -x509 -days 365 -out kotsadm.crt -extensions v3_ext
kubectl -n default create secret tls kotsadm-tls --key=kotsadm.key --cert=kotsadm.crt
kubectl -n default annotate secret kotsadm-tls acceptAnonymousUploads=1
rm kotsadm.cnf kotsadm.key kotsadm.crt
}
# TODO rotate
function kotsadm_etcd_client_secret() {
if kubernetes_resource_exists default secret etcd-client-cert; then
return 0
fi
kubectl -n default create secret generic etcd-client-cert \
--from-file=client.crt=/etc/kubernetes/pki/etcd/healthcheck-client.crt \
--from-file=client.key=/etc/kubernetes/pki/etcd/healthcheck-client.key \
--from-file=/etc/kubernetes/pki/etcd/ca.crt
}
# TODO rotate
function kotsadm_kubelet_client_secret() {
if kubernetes_resource_exists default secret kubelet-client-cert; then
return 0
fi
kubectl -n default create secret generic kubelet-client-cert \
--from-file=client.crt=/etc/kubernetes/pki/apiserver-kubelet-client.crt \
--from-file=client.key=/etc/kubernetes/pki/apiserver-kubelet-client.key \
--from-file=/etc/kubernetes/pki/ca.crt
}
function kotsadm_cli() {
local src="$1"
if ! kubernetes_is_master; then
return 0
fi
if [ ! -f "$src/assets/kots.tar.gz" ] && [ "$AIRGAP" != "1" ]; then
mkdir -p "$src/assets"
curl -L "https://github.com/replicatedhq/kots/releases/download/v1.18.1/kots_linux_amd64.tar.gz" > "$src/assets/kots.tar.gz"
fi
pushd "$src/assets"
tar xf "kots.tar.gz"
mkdir -p "$KUBECTL_PLUGINS_PATH"
mv kots "$KUBECTL_PLUGINS_PATH/kubectl-kots"
popd
# https://github.com/replicatedhq/kots/issues/149
if [ ! -e /usr/lib64/libdevmapper.so.1.02.1 ] && [ -e /usr/lib64/libdevmapper.so.1.02 ]; then
ln -s /usr/lib64/libdevmapper.so.1.02 /usr/lib64/libdevmapper.so.1.02.1
fi
}
# copy pgdata from pvc named kotsadm-postgres to new pvc named kotsadm-postgres-kotsadm-postgres-0
# used by StatefulSet in 1.12.2+
function kotsadm_rename_postgres_pvc_1-12-2() {
local src="$1"
if kubernetes_resource_exists default deployment kotsadm-postgres; then
kubectl delete deployment kotsadm-postgres
fi
if ! kubernetes_resource_exists default pvc kotsadm-postgres; then
return 0
fi
printf "${YELLOW}Renaming PVC kotsadm-postgres to kotsadm-postgres-kotsadm-postgres-0${NC}\n"
kubectl apply -f "$src/kotsadm-postgres-rename-pvc.yaml"
spinner_until -1 kotsadm_postgres_pvc_renamed
kubectl delete pod kotsadm-postgres-rename-pvc
kubectl delete pvc kotsadm-postgres
}
function kotsadm_postgres_pvc_renamed {
local status=$(kubectl get pod kotsadm-postgres-rename-pvc -ojsonpath='{ .status.containerStatuses[0].state.terminated.reason }')
[ "$status" = "Completed" ]
}
function kotsadm_namespaces() {
local src="$1"
local dst="$2"
IFS=',' read -ra KOTSADM_APPLICATION_NAMESPACES_ARRAY <<< "$KOTSADM_APPLICATION_NAMESPACES"
for NAMESPACE in "${KOTSADM_APPLICATION_NAMESPACES_ARRAY[@]}"; do
kubectl create ns "$NAMESPACE" 2>/dev/null || true
done
}
function kotsadm_health_check() {
# Get pods below will initially return only 0 lines
# Then it will return 1 line: "PodScheduled=True"
# Finally, it will return 4 lines. And this is when we want to grep for "Ready=False"
if [ $(kubectl get pods -l app=kotsadm -o jsonpath="{range .items[*]}{range .status.conditions[*]}{ .type }={ .status }{'\n'}{end}{end}" | wc -l) -lt 4 ]; then
return 1
fi
if [[ -n $(kubectl get pods -l app=kotsadm -o jsonpath="{range .items[*]}{range .status.conditions[*]}{ .type }={ .status }{'\n'}{end}{end}" | grep Ready=False) ]]; then
return 1
fi
return 0
}
function kotsadm_ready_spinner() {
if ! spinner_until 120 kotsadm_health_check; then
kubectl logs -l app=kotsadm --all-containers --tail 10
bail "The kotsadm deployment in the kotsadm addon failed to deploy successfully."
fi
}
function kotsadm_api_health_check() {
# Get pods below will initially return only 0 lines
# Then it will return 1 line: "PodScheduled=True"
# Finally, it will return 4 lines. And this is when we want to grep for "Ready=False"
if [ $(kubectl get pods -l app=kotsadm-api -o jsonpath="{range .items[*]}{range .status.conditions[*]}{ .type }={ .status }{'\n'}{end}{end}" | wc -l) -lt 4 ]; then
return 1
fi
if [[ -n $(kubectl get pods -l app=kotsadm-api -o jsonpath="{range .items[*]}{range .status.conditions[*]}{ .type }={ .status }{'\n'}{end}{end}" | grep Ready=False) ]]; then
return 1
fi
return 0
}
function kotsadm_api_ready_spinner() {
if ! spinner_until 240 kotsadm_api_health_check; then
kubectl logs -l app=kotsadm-api --all-containers --tail 10
bail "The kotsadm-api deployment in the kotsadm addon failed to deploy successfully."
fi
}
|
<reponame>wangyeMob/jvm-sandbox-repeater<filename>repeater-plugins/mongo-plugin/src/main/java/com/alibaba/jvm/sandbox/repeater/plugin/mongo/MongoProcessor.java
package com.alibaba.jvm.sandbox.repeater.plugin.mongo;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.jvm.sandbox.api.event.BeforeEvent;
import com.alibaba.jvm.sandbox.api.event.Event;
import com.alibaba.jvm.sandbox.api.event.ReturnEvent;
import com.alibaba.jvm.sandbox.repeater.plugin.core.impl.api.DefaultInvocationProcessor;
import com.alibaba.jvm.sandbox.repeater.plugin.core.util.LogUtil;
import com.alibaba.jvm.sandbox.repeater.plugin.core.wrapper.SerializerWrapper;
import com.alibaba.jvm.sandbox.repeater.plugin.domain.Identity;
import com.alibaba.jvm.sandbox.repeater.plugin.domain.Invocation;
import com.alibaba.jvm.sandbox.repeater.plugin.domain.InvokeType;
import com.alibaba.jvm.sandbox.repeater.plugin.mongo.wrapper.MongoWrapperTransModel;
import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.commons.lang3.reflect.MethodUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.Field;
import java.lang.reflect.Constructor;
import java.util.HashMap;
/**
* <p>
*
* @author wangyeran
*/
class MongoProcessor extends DefaultInvocationProcessor {
protected static Logger log = LoggerFactory.getLogger(MongoProcessor.class);
MongoProcessor(InvokeType type) {
super(type);
}
@Override
public Identity assembleIdentity(BeforeEvent event) {
try {
MongoWrapperTransModel wtm = MongoWrapperTransModel.build(event);
return new Identity(InvokeType.MONGO.name(),wtm.getDbName() + "_" + wtm.gettableName(),wtm.getmethodName() + "_" + wtm.getparamString(), null);
} catch (Exception e) {
return new Identity(InvokeType.MONGO.name(), "Unknown", "Unknown", new HashMap<String, String>(1));
}
}
@Override
public Object[] assembleRequest(BeforeEvent event) {
// args可能存在不可序序列化异常(例如使用tk.mybatis)
MongoWrapperTransModel wtm = MongoWrapperTransModel.build(event);
log.debug("the mongo request:{}, {}", wtm.getDbName() + "_" + wtm.gettableName(), wtm.getmethodName() + "_" + wtm.getparamString());
return new Object[]{wtm};
}
@Override
public Object assembleResponse(Event event) {
if (event.type == Event.Type.RETURN) {
Object response = ((ReturnEvent) event).object;
try {
Object collection = MethodUtils.invokeMethod(response, "getCollection");
log.debug("here in the record response:{}",collection);
}catch(Exception e){
e.printStackTrace();
}
return response;
}
return null;
}
}
|
<reponame>jianguotian/swift-hive-metastore<gh_stars>1-10
/*
* Copyright (C) 2013 Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.apache.hadoop.hive.metastore.api;
import com.facebook.swift.codec.ThriftConstructor;
import com.facebook.swift.codec.ThriftField;
import com.facebook.swift.codec.ThriftStruct;
import java.util.List;
import java.util.Map;
import static com.google.common.base.Objects.toStringHelper;
@ThriftStruct("PrincipalPrivilegeSet")
public class PrincipalPrivilegeSet
{
@ThriftConstructor
public PrincipalPrivilegeSet(
@ThriftField(value = 1, name = "userPrivileges") final Map<String, List<PrivilegeGrantInfo>> userPrivileges,
@ThriftField(value = 2, name = "groupPrivileges") final Map<String, List<PrivilegeGrantInfo>> groupPrivileges,
@ThriftField(value = 3, name = "rolePrivileges") final Map<String, List<PrivilegeGrantInfo>> rolePrivileges)
{
this.userPrivileges = userPrivileges;
this.groupPrivileges = groupPrivileges;
this.rolePrivileges = rolePrivileges;
}
public PrincipalPrivilegeSet()
{
}
private Map<String, List<PrivilegeGrantInfo>> userPrivileges;
@ThriftField(value = 1, name = "userPrivileges")
public Map<String, List<PrivilegeGrantInfo>> getUserPrivileges()
{
return userPrivileges;
}
public void setUserPrivileges(final Map<String, List<PrivilegeGrantInfo>> userPrivileges)
{
this.userPrivileges = userPrivileges;
}
private Map<String, List<PrivilegeGrantInfo>> groupPrivileges;
@ThriftField(value = 2, name = "groupPrivileges")
public Map<String, List<PrivilegeGrantInfo>> getGroupPrivileges()
{
return groupPrivileges;
}
public void setGroupPrivileges(final Map<String, List<PrivilegeGrantInfo>> groupPrivileges)
{
this.groupPrivileges = groupPrivileges;
}
private Map<String, List<PrivilegeGrantInfo>> rolePrivileges;
@ThriftField(value = 3, name = "rolePrivileges")
public Map<String, List<PrivilegeGrantInfo>> getRolePrivileges()
{
return rolePrivileges;
}
public void setRolePrivileges(final Map<String, List<PrivilegeGrantInfo>> rolePrivileges)
{
this.rolePrivileges = rolePrivileges;
}
@Override
public String toString()
{
return toStringHelper(this)
.add("userPrivileges", userPrivileges)
.add("groupPrivileges", groupPrivileges)
.add("rolePrivileges", rolePrivileges)
.toString();
}
}
|
"use strict";
/*
* Copyright (C) 2021 Intlcaht
* All rights reserved
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.gitBranch = exports.gitHash = void 0;
const childProcess = require('child_process');
const gitHash = () => escape(childProcess.execSync('git rev-parse HEAD').toString('utf-8'));
exports.gitHash = gitHash;
const gitBranch = () => escape(childProcess.execSync('git branch --show-current').toString('utf-8'));
exports.gitBranch = gitBranch;
//# sourceMappingURL=scripts.js.map |
<filename>build.sbt<gh_stars>0
enablePlugins(ScalaJSPlugin)
enablePlugins(ScalaJSBundlerPlugin)
name := "Scala.js Spike"
// or any other Scala version >= 2.10.2
// This is an application with a main method
//scalaJSModuleKind := ModuleKind.CommonJSModule
//jsDependencies += ProvidedJS / "jsexamplemodule.js"
testFrameworks += new TestFramework("utest.runner.Framework")
coverageEnabled := true
npmDependencies in Compile += "left-pad" -> "1.1.3"
// Jvm AND JS support here
lazy val root = project.in(file(".")).
aggregate(spikeJS, spikeJVM).
settings(
publish := {},
publishLocal := {},
scalaVersion := "2.12.1"
)
val browser = org.scalajs.jsenv.selenium.Chrome()
val testTask = taskKey[Unit]("test task")
lazy val spike = crossProject.in(file(".")).
settings(
name := "spike",
version := "0.1-SNAPSHOT",
libraryDependencies += "com.lihaoyi" %%% "utest" % "0.4.5" % "test",
testFrameworks += new TestFramework("utest.runner.Framework")
).
jvmSettings(
// Add JVM-specific settings here
).
jsSettings(
scalaJSModuleKind := ModuleKind.CommonJSModule,
scalaJSUseMainModuleInitializer := true,
libraryDependencies += "org.scala-js" %%% "scalajs-dom" % "0.9.1",
testTask := {
println("HOOOOOOO")
}
// Apply to the 'run' command
//jsEnv := new org.scalajs.jsenv.selenium.SeleniumJSEnv(browser),
// Apply to tests
//jsEnv in Test := new org.scalajs.jsenv.selenium.SeleniumJSEnv(browser)
)
lazy val spikeJVM = spike.jvm
lazy val spikeJS = spike.js |
/**
* Copyright (c) 2001-2017 <NAME> and Robocode contributors
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://robocode.sourceforge.net/license/epl-v10.html
*/
package net.sf.robocode.dotnet.repository.items.handlers;
import net.sf.robocode.repository.items.handlers.PropertiesHandler;
import net.sf.robocode.repository.items.IRepositoryItem;
import net.sf.robocode.repository.items.RobotItem;
import net.sf.robocode.repository.root.IRepositoryRoot;
import net.sf.robocode.repository.IRepository;
import net.sf.robocode.dotnet.repository.items.DotNetRobotItem;
import net.sf.robocode.dotnet.repository.root.DllRoot;
import java.net.URL;
import java.io.File;
/**
* @author <NAME> (original)
*/
public class DotNetPropertiesHandler extends PropertiesHandler {
/**
* {@inheritDoc}
*/
@Override
protected IRepositoryItem acceptItem(URL itemURL, IRepositoryRoot root, IRepository repository) {
// Accept and register the item if it is a .NET properties file
String name = itemURL.toString().toLowerCase();
if (name.contains(".dll!/")) {
return register(itemURL, root, repository);
}
return null;
}
private IRepositoryItem register(URL itemURL, IRepositoryRoot root, IRepository repository) {
RobotItem item = (RobotItem) repository.getItem(itemURL.toString());
if (item == null) {
item = createItem(itemURL, root, repository);
}
repository.addOrUpdateItem(item);
return item;
}
protected RobotItem createItem(URL itemURL, IRepositoryRoot root, IRepository repository) {
File file = new File(itemURL.toString().replace(".properties", ".dll"));
DotNetRobotItem item = new DotNetRobotItem(new DllRoot(repository, file), itemURL);
item.setClassURL(itemURL);
return item;
}
} |
from typing import List
import re
def extract_urls(html: str) -> List[str]:
pattern = r'href="([^"]*)"'
urls = re.findall(pattern, html)
return urls |
#!/bin/sh
${CHECKER} ./test-ftello${EXEEXT} 1 < "$srcdir/test-ftello.sh" || exit 1
echo hi | ${CHECKER} ./test-ftello${EXEEXT} || exit 1
exit 0
|
//main.js
import './main.css'
const greeter = require('./Greeter.js');
const map = require('./map.js');
greeter();
map();
|
<filename>src/VoxelRender.js
class VoxelRender {
static create(voxels) {
// Create layers of circles slicing a sphere containing the voxels
var geom = new THREE.Geometry();
var sphere = voxels.box().getBoundingSphere();
var circleCenter = sphere.center.clone();
var z1 = sphere.center.z - sphere.radius + voxels.cellSize.z;
var z2 = sphere.center.z + sphere.radius - voxels.cellSize.z;
for (var z = z1; z <= z2; z += voxels.cellSize.z) {
var circleRadius = Math.sqrt(sphere.radius * sphere.radius - Math.pow(sphere.center.z - z, 2));
var circle = new THREE.CircleGeometry(circleRadius, 20);
circle.translate(sphere.center.x, sphere.center.y, z);
geom.merge(circle);
}
var tex = VoxelRender.makeTexture(voxels.data, voxels.side, voxels.side);
VoxelRender.obtainShaders();
var material = new THREE.ShaderMaterial({
transparent: true,
vertexShader: VoxelRender.vertShader,
fragmentShader: VoxelRender.fragShader,
uniforms: {
cubeTex: { type: 't', value: tex },
size: { type: 'v3', value: voxels.size },
cellSize: { type: 'v3', value: voxels.cellSize },
firstCell: { type: 'v3', value: voxels.firstCell },
lastCell: { type: 'v3', value: voxels.lastCell() },
center: { type: 'v3', value: voxels.firstCell.clone().add(voxels.lastCell()).divideScalar(2) },
tileNum: { type: 'v2', value: voxels.tileNum },
side: { type: 'v2', value: voxels.side },
},
// wireframe: true
});
// var material = new THREE.MeshNormalMaterial({ transparent: true, opacity: 0.3 });
return new THREE.Mesh(geom, material);
}
static makeTexture(arr, width, height) {
var texture = new THREE.DataTexture(arr, width, height, THREE.RGBAFormat);
texture.needsUpdate = true;
texture.wrapS = texture.wrapT = THREE.ClampToEdgeWrapping; // RepeatWrapping;
texture.minFilter = texture.magFilter = THREE.NearestFilter; // THREE.LinearFilter;
texture.flipY = false;
return texture;
}
static obtainShaders() {
VoxelRender.vertShader = `
varying vec3 worldSpaceCoords;
uniform vec3 size;
uniform vec3 cellSize;
uniform vec3 firstCell;
uniform vec3 lastCell;
uniform vec3 center;
vec4 rotationBetweenVectorsToQuaternion(vec3 a, vec3 b) {
float nxDir = dot(a, b);
if (nxDir < -0.999999) {
vec3 tmpvec3 = cross(vec3(1.0, 0.0, 0.0), a);
if (length(tmpvec3) < 0.000001)
tmpvec3 = cross(vec3(0.0, 1.0, 0.0), a);
return vec4(normalize(tmpvec3), 0.0);
} else if (nxDir > 0.999999) {
return vec4(0.0, 0.0, 0.0, 1.0);
} else {
vec3 tmpvec3 = cross(a, b);
vec4 tmpvec4 = vec4(tmpvec3, 1.0 + nxDir);
return normalize(tmpvec4);
}
}
vec3 rotateVectorByQuaternion( vec3 v, vec4 q ) {
vec3 dest = vec3( 0.0 );
float x = v.x, y = v.y, z = v.z;
float qx = q.x, qy = q.y, qz = q.z, qw = q.w;
// calculate quaternion * vector
float ix = qw * x + qy * z - qz * y,
iy = qw * y + qz * x - qx * z,
iz = qw * z + qx * y - qy * x,
iw = -qx * x - qy * y - qz * z;
// calculate result * inverse quaternion
dest.x = ix * qw + iw * -qx + iy * -qz - iz * -qy;
dest.y = iy * qw + iw * -qy + iz * -qx - ix * -qz;
dest.z = iz * qw + iw * -qz + ix * -qy - iy * -qx;
return dest;
}
void main() {
// //Set the world space coordinates of the back faces vertices as output.
vec3 cameraVector = normalize(center - cameraPosition);
vec3 down = vec3(0.0, 0.0, -1.0);
vec4 quat = rotationBetweenVectorsToQuaternion(down, cameraVector);
vec3 rotpos = rotateVectorByQuaternion(position - center, quat) + center;
worldSpaceCoords = (rotpos - firstCell) / cellSize / size; //move it from [-0.5;0.5] to [0,1]
gl_Position = projectionMatrix * modelViewMatrix * vec4(rotpos, 1.0);
// gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
`;
VoxelRender.fragShader = `
varying vec3 worldSpaceCoords;
uniform sampler2D cubeTex;
uniform vec3 size;
uniform vec3 cellSize;
uniform vec3 firstCell;
uniform vec3 lastCell;
uniform vec3 center;
uniform vec2 tileNum;
uniform float side;
vec4 sampleAs3DTexture(vec3 texCoord) {
if (min(min(texCoord.x, texCoord.y), texCoord.z) < 0.001 || max(max(texCoord.x, texCoord.y), texCoord.z) > 0.999) discard;
vec3 size1 = size - vec3(1.0);
vec3 coord = floor(texCoord * size1);
float u = (coord.x + mod(coord.z, tileNum.x) * size.x) / side;
float v = (coord.y + floor(coord.z / tileNum.x) * size.y) / side;
vec2 texCoordSlice = clamp(vec2(u, v), 0.0, 1.0);
return texture2D(cubeTex, texCoordSlice);
}
void main(void) {
gl_FragColor = sampleAs3DTexture(worldSpaceCoords);
if (gl_FragColor.w < 0.01) discard;
// gl_FragColor = vec4(1.0, 0.0, 0.0, 0.3);
// gl_FragColor = gl_FragCoord;
}
`;
}
}
if (typeof module !== 'undefined' && module.exports) {
module.exports.VoxelRender = VoxelRender;
}
|
import React from "react";
import { Member } from "./types/member";
export const useMembers = () => {
const [isLoading, setIsLoading] = React.useState<boolean>(false);
const [members, setMembers] = React.useState<Member[]>();
React.useEffect(() => {
setMembers([
{ name: "<NAME>", username: "lontronix", points: 3 },
{ name: "<NAME>", username: "hitchhacker", points: 5 },
{ name: "<NAME>", username: "mom", points: 2 },
{ name: "<NAME>", username: "harmon", points: 12 },
]);
setIsLoading(false);
}, []);
return { isLoading, members };
};
|
//
// tile-join.hpp
// Pods
//
// Created by <NAME> on 23.09.2020.
//
#ifndef TILE_JOIN_HPP
#define TILE_JOIN_HPP
#ifdef TARGET_OS_IPHONE
int tile_join_main(int argc, char **argv);
#endif
#endif /* TILE_JOIN_HPP */
|
<gh_stars>1-10
#!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors: <NAME>
# <NAME> <<EMAIL>>
#
import time
import numpy
from functools import reduce
from pyscf import lib
from pyscf.lib import logger
from pyscf.pbc import scf
from pyscf.cc import gccsd
from pyscf.pbc.mp.kmp2 import get_frozen_mask, get_nmo, get_nocc
from pyscf.pbc.cc import kintermediates as imdk
from pyscf.lib.parameters import LOOSE_ZERO_TOL, LARGE_DENOM
from pyscf.pbc.lib import kpts_helper
DEBUG = False
#
# FIXME: When linear dependence is found in KHF and handled by function
# pyscf.scf.addons.remove_linear_dep_, different k-point may have different
# number of orbitals.
#
#einsum = numpy.einsum
einsum = lib.einsum
def kernel(cc, eris, t1=None, t2=None, max_cycle=50, tol=1e-8, tolnormt=1e-6,
max_memory=2000, verbose=logger.INFO):
"""Exactly the same as pyscf.cc.ccsd.kernel, which calls a
*local* energy() function."""
if isinstance(verbose, logger.Logger):
log = verbose
else:
log = logger.Logger(cc.stdout, verbose)
assert (isinstance(eris, gccsd._PhysicistsERIs))
if t1 is None and t2 is None:
t1, t2 = cc.init_amps(eris)[1:]
elif t1 is None:
nocc = cc.nocc
nvir = cc.nmo - nocc
nkpts = cc.nkpts
t1 = numpy.zeros((nkpts, nocc, nvir), numpy.complex128)
elif t2 is None:
t2 = cc.init_amps(eris)[2]
cput1 = cput0 = (time.clock(), time.time())
nkpts, nocc, nvir = t1.shape
eold = 0
eccsd = 0
if isinstance(cc.diis, lib.diis.DIIS):
adiis = cc.diis
elif cc.diis:
adiis = lib.diis.DIIS(cc, cc.diis_file)
adiis.space = cc.diis_space
else:
adiis = None
conv = False
for istep in range(max_cycle):
t1new, t2new = cc.update_amps(t1, t2, eris, max_memory)
normt = numpy.linalg.norm(t1new - t1) + numpy.linalg.norm(t2new - t2)
if cc.iterative_damping < 1.0:
alpha = cc.iterative_damping
t1, t2 = (1-alpha)*t1 + alpha*t1new, (1-alpha)*t2 + alpha*t2new
else:
t1, t2 = t1new, t2new
t1new = t2new = None
t1, t2 = cc.run_diis(t1, t2, istep, normt, eccsd - eold, adiis)
eold, eccsd = eccsd, energy(cc, t1, t2, eris)
log.info('istep = %d E(CCSD) = %.15g dE = %.9g norm(t1,t2) = %.6g', istep, eccsd, eccsd - eold, normt)
cput1 = log.timer('CCSD iter', *cput1)
if abs(eccsd - eold) < tol and normt < tolnormt:
conv = True
break
log.timer('CCSD', *cput0)
return conv, eccsd, t1, t2
def energy(cc, t1, t2, eris):
nkpts, nocc, nvir = t1.shape
fock = eris.fock
eris_oovv = eris.oovv.copy()
e = 0.0 + 0j
for ki in range(nkpts):
e += einsum('ia,ia', fock[ki, :nocc, nocc:], t1[ki, :, :])
t1t1 = numpy.zeros(shape=t2.shape, dtype=t2.dtype)
for ki in range(nkpts):
ka = ki
for kj in range(nkpts):
#kb = kj
t1t1[ki, kj, ka, :, :, :, :] = einsum('ia,jb->ijab', t1[ki, :, :], t1[kj, :, :])
tau = t2 + 2 * t1t1
e += 0.25 * numpy.dot(tau.flatten(), eris_oovv.flatten())
e /= nkpts
if abs(e.imag) > 1e-4:
logger.warn(cc, 'Non-zero imaginary part found in KCCSD energy %s', e)
return e.real
def update_amps(cc, t1, t2, eris, max_memory=2000):
time0 = time.clock(), time.time()
log = logger.Logger(cc.stdout, cc.verbose)
nkpts, nocc, nvir = t1.shape
fock = eris.fock
fov = fock[:, :nocc, nocc:].copy()
foo = fock[:, :nocc, :nocc].copy()
fvv = fock[:, nocc:, nocc:].copy()
tau = imdk.make_tau(cc, t2, t1, t1)
Fvv = imdk.cc_Fvv(cc, t1, t2, eris)
Foo = imdk.cc_Foo(cc, t1, t2, eris)
Fov = imdk.cc_Fov(cc, t1, t2, eris)
Woooo = imdk.cc_Woooo(cc, t1, t2, eris)
Wvvvv = imdk.cc_Wvvvv(cc, t1, t2, eris)
Wovvo = imdk.cc_Wovvo(cc, t1, t2, eris)
# Move energy terms to the other side
for k in range(nkpts):
Fvv[k] -= numpy.diag(numpy.diag(fvv[k]))
Foo[k] -= numpy.diag(numpy.diag(foo[k]))
# Get the momentum conservation array
# Note: chemist's notation for momentum conserving t2(ki,kj,ka,kb), even though
# integrals are in physics notation
kconserv = kpts_helper.get_kconserv(cc._scf.cell, cc.kpts)
eris_ovvo = numpy.zeros(shape=(nkpts, nkpts, nkpts, nocc, nvir, nvir, nocc), dtype=t2.dtype)
eris_oovo = numpy.zeros(shape=(nkpts, nkpts, nkpts, nocc, nocc, nvir, nocc), dtype=t2.dtype)
eris_vvvo = numpy.zeros(shape=(nkpts, nkpts, nkpts, nvir, nvir, nvir, nocc), dtype=t2.dtype)
for km, kb, ke in kpts_helper.loop_kkk(nkpts):
kj = kconserv[km, ke, kb]
# <mb||je> -> -<mb||ej>
eris_ovvo[km, kb, ke] = -eris.ovov[km, kb, kj].transpose(0, 1, 3, 2)
# <mn||je> -> -<mn||ej>
# let kb = kn as a dummy variable
eris_oovo[km, kb, ke] = -eris.ooov[km, kb, kj].transpose(0, 1, 3, 2)
# <ma||be> -> - <be||am>*
# let kj = ka as a dummy variable
kj = kconserv[km, ke, kb]
eris_vvvo[ke, kj, kb] = -eris.ovvv[km, kb, ke].transpose(2, 3, 1, 0).conj()
# T1 equation
t1new = numpy.zeros(shape=t1.shape, dtype=t1.dtype)
for ka in range(nkpts):
ki = ka
t1new[ka] += numpy.array(fov[ka, :, :]).conj()
t1new[ka] += einsum('ie,ae->ia', t1[ka], Fvv[ka])
t1new[ka] += -einsum('ma,mi->ia', t1[ka], Foo[ka])
for km in range(nkpts):
t1new[ka] += einsum('imae,me->ia', t2[ka, km, ka], Fov[km])
t1new[ka] += -einsum('nf,naif->ia', t1[km], eris.ovov[km, ka, ki])
for kn in range(nkpts):
ke = kconserv[km, ki, kn]
t1new[ka] += -0.5 * einsum('imef,maef->ia', t2[ki, km, ke], eris.ovvv[km, ka, ke])
t1new[ka] += -0.5 * einsum('mnae,nmei->ia', t2[km, kn, ka], eris_oovo[kn, km, ke])
# T2 equation
t2new = numpy.array(eris.oovv).conj()
for ki, kj, ka in kpts_helper.loop_kkk(nkpts):
# Chemist's notation for momentum conserving t2(ki,kj,ka,kb)
kb = kconserv[ki, ka, kj]
Ftmp = Fvv[kb] - 0.5 * einsum('mb,me->be', t1[kb], Fov[kb])
tmp = einsum('ijae,be->ijab', t2[ki, kj, ka], Ftmp)
t2new[ki, kj, ka] += tmp
#t2new[ki,kj,kb] -= tmp.transpose(0,1,3,2)
Ftmp = Fvv[ka] - 0.5 * einsum('ma,me->ae', t1[ka], Fov[ka])
tmp = einsum('ijbe,ae->ijab', t2[ki, kj, kb], Ftmp)
t2new[ki, kj, ka] -= tmp
Ftmp = Foo[kj] + 0.5 * einsum('je,me->mj', t1[kj], Fov[kj])
tmp = einsum('imab,mj->ijab', t2[ki, kj, ka], Ftmp)
t2new[ki, kj, ka] -= tmp
#t2new[kj,ki,ka] += tmp.transpose(1,0,2,3)
Ftmp = Foo[ki] + 0.5 * einsum('ie,me->mi', t1[ki], Fov[ki])
tmp = einsum('jmab,mi->ijab', t2[kj, ki, ka], Ftmp)
t2new[ki, kj, ka] += tmp
for km in range(nkpts):
# Wminj
# - km - kn + ka + kb = 0
# => kn = ka - km + kb
kn = kconserv[ka, km, kb]
t2new[ki, kj, ka] += 0.5 * einsum('mnab,mnij->ijab', tau[km, kn, ka], Woooo[km, kn, ki])
ke = km
t2new[ki, kj, ka] += 0.5 * einsum('ijef,abef->ijab', tau[ki, kj, ke], Wvvvv[ka, kb, ke])
# Wmbej
# - km - kb + ke + kj = 0
# => ke = km - kj + kb
ke = kconserv[km, kj, kb]
tmp = einsum('imae,mbej->ijab', t2[ki, km, ka], Wovvo[km, kb, ke])
# - km - kb + ke + kj = 0
# => ke = km - kj + kb
#
# t[i,e] => ki = ke
# t[m,a] => km = ka
if km == ka and ke == ki:
tmp -= einsum('ie,ma,mbej->ijab', t1[ki], t1[km], eris_ovvo[km, kb, ke])
t2new[ki, kj, ka] += tmp
t2new[ki, kj, kb] -= tmp.transpose(0, 1, 3, 2)
t2new[kj, ki, ka] -= tmp.transpose(1, 0, 2, 3)
t2new[kj, ki, kb] += tmp.transpose(1, 0, 3, 2)
ke = ki
tmp = einsum('ie,abej->ijab', t1[ki], eris_vvvo[ka, kb, ke])
t2new[ki, kj, ka] += tmp
# P(ij) term
ke = kj
tmp = einsum('je,abei->ijab', t1[kj], eris_vvvo[ka, kb, ke])
t2new[ki, kj, ka] -= tmp
km = ka
tmp = einsum('ma,mbij->ijab', t1[ka], eris.ovoo[km, kb, ki])
t2new[ki, kj, ka] -= tmp
# P(ab) term
km = kb
tmp = einsum('mb,maij->ijab', t1[kb], eris.ovoo[km, ka, ki])
t2new[ki, kj, ka] += tmp
eia = numpy.zeros(shape=(nocc, nvir), dtype=t1new.dtype)
for ki in range(nkpts):
eia = foo[ki].diagonal()[:, None] - fvv[ki].diagonal()[None, :]
# When padding the occupied/virtual arrays, some fock elements will be zero
idx = numpy.where(abs(eia) < LOOSE_ZERO_TOL)[0]
eia[idx] = LARGE_DENOM
t1new[ki] /= eia
eijab = numpy.zeros(shape=(nocc, nocc, nvir, nvir), dtype=t2new.dtype)
kconserv = kpts_helper.get_kconserv(cc._scf.cell, cc.kpts)
for ki, kj, ka in kpts_helper.loop_kkk(nkpts):
kb = kconserv[ki, ka, kj]
eijab = (foo[ki].diagonal()[:, None, None, None] + foo[kj].diagonal()[None, :, None, None] -
fvv[ka].diagonal()[None, None, :, None] - fvv[kb].diagonal()[None, None, None, :])
# Due to padding; see above discussion concerning t1new in update_amps()
idx = numpy.where(abs(eijab) < LOOSE_ZERO_TOL)[0]
eijab[idx] = LARGE_DENOM
t2new[ki, kj, ka] /= eijab
time0 = log.timer_debug1('update t1 t2', *time0)
return t1new, t2new
class GCCSD(gccsd.GCCSD):
def __init__(self, mf, frozen=0, mo_coeff=None, mo_occ=None):
assert (isinstance(mf, scf.khf.KSCF))
if not isinstance(mf, scf.kghf.KGHF):
mf = scf.addons.convert_to_ghf(mf)
self.kpts = mf.kpts
gccsd.GCCSD.__init__(self, mf, frozen, mo_coeff, mo_occ)
@property
def nkpts(self):
return len(self.kpts)
get_nocc = get_nocc
get_nmo = get_nmo
get_frozen_mask = get_frozen_mask
def dump_flags(self):
logger.info(self, '\n')
logger.info(self, '******** PBC CC flags ********')
gccsd.GCCSD.dump_flags(self)
return self
def init_amps(self, eris):
time0 = time.clock(), time.time()
nocc = self.nocc
nvir = self.nmo - nocc
nkpts = self.nkpts
t1 = numpy.zeros((nkpts, nocc, nvir), dtype=numpy.complex128)
t2 = numpy.zeros((nkpts, nkpts, nkpts, nocc, nocc, nvir, nvir), dtype=numpy.complex128)
self.emp2 = 0
foo = eris.fock[:, :nocc, :nocc].copy()
fvv = eris.fock[:, nocc:, nocc:].copy()
fov = eris.fock[:, :nocc, nocc:].copy()
eris_oovv = eris.oovv.copy()
eia = numpy.zeros((nocc, nvir))
eijab = numpy.zeros((nocc, nocc, nvir, nvir))
kconserv = kpts_helper.get_kconserv(self._scf.cell, self.kpts)
for ki, kj, ka in kpts_helper.loop_kkk(nkpts):
kb = kconserv[ki, ka, kj]
eijab = (foo[ki].diagonal()[:, None, None, None] + foo[kj].diagonal()[None, :, None, None] -
fvv[ka].diagonal()[None, None, :, None] - fvv[kb].diagonal()[None, None, None, :])
# Due to padding; see above discussion concerning t1new in update_amps()
idx = numpy.where(abs(eijab) < LOOSE_ZERO_TOL)[0]
eijab[idx] = LARGE_DENOM
t2[ki, kj, ka] = eris_oovv[ki, kj, ka] / eijab
t2 = numpy.conj(t2)
self.emp2 = 0.25 * numpy.einsum('pqrijab,pqrijab', t2, eris_oovv).real
self.emp2 /= nkpts
logger.info(self, 'Init t2, MP2 energy = %.15g', self.emp2.real)
logger.timer(self, 'init mp2', *time0)
return self.emp2, t1, t2
def ccsd(self, t1=None, t2=None, eris=None, **kwargs):
if eris is None: eris = self.ao2mo(self.mo_coeff)
self.eris = eris
self.converged, self.e_corr, self.t1, self.t2 = \
kernel(self, eris, t1, t2, max_cycle=self.max_cycle,
tol=self.conv_tol,
tolnormt=self.conv_tol_normt,
max_memory=self.max_memory, verbose=self.verbose)
if self.converged:
logger.info(self, 'CCSD converged')
else:
logger.info(self, 'CCSD not converge')
if self._scf.e_tot == 0:
logger.info(self, 'E_corr = %.16g', self.e_corr)
else:
logger.info(self, 'E(CCSD) = %.16g E_corr = %.16g', self.e_corr + self._scf.e_tot, self.e_corr)
return self.e_corr, self.t1, self.t2
def ao2mo(self, mo_coeff=None):
nkpts = self.nkpts
nmo = self.nmo
mem_incore = nkpts**3 * nmo**4 * 8 / 1e6
mem_now = lib.current_memory()[0]
if (mem_incore + mem_now < self.max_memory) or self.mol.incore_anyway:
return _make_eris_incore(self, mo_coeff)
else:
raise NotImplementedError
def update_amps(self, t1, t2, eris, max_memory=2000):
return update_amps(self, t1, t2, eris, max_memory)
def amplitudes_to_vector(self, t1, t2):
return numpy.hstack((t1.ravel(), t2.ravel()))
def vector_to_amplitudes(self, vec, nmo=None, nocc=None):
if nocc is None: nocc = self.nocc
if nmo is None: nmo = self.nmo
nvir = nmo - nocc
nkpts = self.nkpts
nov = nkpts * nocc * nvir
t1 = vec[:nov].reshape(nkpts, nocc, nvir)
t2 = vec[nov:].reshape(nkpts, nkpts, nkpts, nocc, nocc, nvir, nvir)
return t1, t2
CCSD = GCCSD
def _make_eris_incore(cc, mo_coeff=None):
log = logger.Logger(cc.stdout, cc.verbose)
cput0 = (time.clock(), time.time())
eris = gccsd._PhysicistsERIs()
kpts = cc.kpts
nkpts = cc.nkpts
nocc = cc.nocc
nmo = cc.nmo
nvir = nmo - nocc
eris.nocc = nocc
#if any(nocc != numpy.count_nonzero(cc._scf.mo_occ[k] > 0) for k in range(nkpts)):
# raise NotImplementedError('Different occupancies found for different k-points')
if mo_coeff is None:
# If mo_coeff is not canonical orbital
# TODO does this work for k-points? changed to conjugate.
raise NotImplementedError
mo_coeff = cc.mo_coeff
nao = mo_coeff[0].shape[0]
dtype = mo_coeff[0].dtype
moidx = get_frozen_mask(cc)
nocc_per_kpt = numpy.asarray(get_nocc(cc, per_kpoint=True))
nmo_per_kpt = numpy.asarray(get_nmo(cc, per_kpoint=True))
padded_moidx = []
for k in range(nkpts):
kpt_nocc = nocc_per_kpt[k]
kpt_nvir = nmo_per_kpt[k] - kpt_nocc
kpt_padded_moidx = numpy.concatenate((numpy.ones(kpt_nocc, dtype=numpy.bool),
numpy.zeros(nmo - kpt_nocc - kpt_nvir, dtype=numpy.bool),
numpy.ones(kpt_nvir, dtype=numpy.bool)))
padded_moidx.append(kpt_padded_moidx)
eris.mo_coeff = []
eris.orbspin = []
# Generate the molecular orbital coefficients with the frozen orbitals masked.
# Each MO is tagged with orbspin, a list of 0's and 1's that give the overall
# spin of each MO.
#
# Here we will work with two index arrays; one is for our original (small) moidx
# array while the next is for our new (large) padded array.
for k in range(nkpts):
kpt_moidx = moidx[k]
kpt_padded_moidx = padded_moidx[k]
mo = numpy.zeros((nao, nmo), dtype=dtype)
mo[:, kpt_padded_moidx] = mo_coeff[k][:, kpt_moidx]
if hasattr(mo_coeff[k], 'orbspin'):
orbspin_dtype = mo_coeff[k].orbspin[kpt_moidx].dtype
orbspin = numpy.zeros(nmo, dtype=orbspin_dtype)
orbspin[kpt_padded_moidx] = mo_coeff[k].orbspin[kpt_moidx]
mo = lib.tag_array(mo, orbspin=orbspin)
eris.orbspin.append(orbspin)
# FIXME: What if the user freezes all up spin orbitals in
# an RHF calculation? The number of electrons will still be
# even.
else: # guess orbital spin - assumes an RHF calculation
assert (numpy.count_nonzero(kpt_moidx) % 2 == 0)
orbspin = numpy.zeros(mo.shape[1], dtype=int)
orbspin[1::2] = 1
mo = lib.tag_array(mo, orbspin=orbspin)
eris.orbspin.append(orbspin)
eris.mo_coeff.append(mo)
# Re-make our fock MO matrix elements from density and fock AO
dm = cc._scf.make_rdm1(cc.mo_coeff, cc.mo_occ)
fockao = cc._scf.get_hcore() + cc._scf.get_veff(cc._scf.cell, dm)
eris.fock = numpy.asarray([reduce(numpy.dot, (mo.T.conj(), fockao[k], mo)) for k, mo in enumerate(eris.mo_coeff)])
kconserv = kpts_helper.get_kconserv(cc._scf.cell, cc.kpts)
# The bottom nao//2 coefficients are down (up) spin while the top are up (down).
# These are 'spin-less' quantities; spin-conservation will be added manually.
so_coeff = [mo[:nao // 2] + mo[nao // 2:] for mo in eris.mo_coeff]
eri = numpy.empty((nkpts, nkpts, nkpts, nmo, nmo, nmo, nmo), dtype=numpy.complex128)
fao2mo = cc._scf.with_df.ao2mo
for kp, kq, kr in kpts_helper.loop_kkk(nkpts):
ks = kconserv[kp, kq, kr]
eri_kpt = fao2mo(
(so_coeff[kp], so_coeff[kq], so_coeff[kr], so_coeff[ks]), (kpts[kp], kpts[kq], kpts[kr], kpts[ks]),
compact=False)
eri_kpt[(eris.orbspin[kp][:, None] != eris.orbspin[kq]).ravel()] = 0
eri_kpt[:, (eris.orbspin[kr][:, None] != eris.orbspin[ks]).ravel()] = 0
eri_kpt = eri_kpt.reshape(nmo, nmo, nmo, nmo)
eri[kp, kq, kr] = eri_kpt
# Check some antisymmetrized properties of the integrals
if DEBUG:
check_antisymm_3412(cc, cc.kpts, eri)
# Antisymmetrizing (pq|rs)-(ps|rq), where the latter integral is equal to
# (rq|ps); done since we aren't tracking the kpoint of orbital 's'
eri = eri - eri.transpose(2, 1, 0, 5, 4, 3, 6)
# Chemist -> physics notation
eri = eri.transpose(0, 2, 1, 3, 5, 4, 6)
# Set the various integrals
eris.dtype = eri.dtype
eris.oooo = eri[:, :, :, :nocc, :nocc, :nocc, :nocc].copy() / nkpts
eris.ooov = eri[:, :, :, :nocc, :nocc, :nocc, nocc:].copy() / nkpts
eris.ovoo = eri[:, :, :, :nocc, nocc:, :nocc, :nocc].copy() / nkpts
eris.oovv = eri[:, :, :, :nocc, :nocc, nocc:, nocc:].copy() / nkpts
eris.ovov = eri[:, :, :, :nocc, nocc:, :nocc, nocc:].copy() / nkpts
eris.ovvv = eri[:, :, :, :nocc, nocc:, nocc:, nocc:].copy() / nkpts
eris.vvvv = eri[:, :, :, nocc:, nocc:, nocc:, nocc:].copy() / nkpts
log.timer('CCSD integral transformation', *cput0)
return eris
def check_antisymm_3412(cc, kpts, integrals):
kconserv = kpts_helper.get_kconserv(cc._scf.cell, cc.kpts)
nkpts = len(kpts)
diff = 0.0
for kp, kq, kr in kpts_helper.loop_kkk(nkpts):
ks = kconserv[kp, kr, kq]
for p in range(integrals.shape[3]):
for q in range(integrals.shape[4]):
for r in range(integrals.shape[5]):
for s in range(integrals.shape[6]):
pqrs = integrals[kp, kq, kr, p, q, r, s]
rspq = integrals[kq, kp, kr, q, p, r, s]
cdiff = numpy.linalg.norm(pqrs - rspq).real
if diff > 1e-5:
print("AS diff = %.15g" % cdiff, pqrs, rspq, kp, kq, kr, ks, p, q, r, s)
diff = max(diff, cdiff)
print("antisymmetrization : max diff = %.15g" % diff)
if diff > 1e-5:
print("Energy cutoff (or cell.mesh) is not enough to converge AO integrals.")
return diff
def check_antisymm_12(cc, kpts, integrals):
kconserv = kpts_helper.get_kconserv(cc._scf.cell, cc.kpts)
nkpts = len(kpts)
diff = 0.0
for kp, kq, kr in kpts_helper.loop_kkk(nkpts):
ks = kconserv[kp, kr, kq]
for p in range(integrals.shape[3]):
for q in range(integrals.shape[4]):
for r in range(integrals.shape[5]):
for s in range(integrals.shape[6]):
pqrs = integrals[kp, kq, kr, p, q, r, s]
qprs = integrals[kq, kp, kr, q, p, r, s]
cdiff = numpy.linalg.norm(pqrs + qprs).real
if diff > 1e-5:
print("AS diff = %.15g" % cdiff, pqrs, qprs, kp, kq, kr, ks, p, q, r, s)
diff = max(diff, cdiff)
print("antisymmetrization : max diff = %.15g" % diff)
if diff > 1e-5:
print("Energy cutoff (or cell.mesh) is not enough to converge AO integrals.")
def check_antisymm_34(cc, kpts, integrals):
kconserv = kpts_helper.get_kconserv(cc._scf.cell, cc.kpts)
nkpts = len(kpts)
diff = 0.0
for kp, kq, kr in kpts_helper.loop_kkk(nkpts):
ks = kconserv[kp, kr, kq]
for p in range(integrals.shape[3]):
for q in range(integrals.shape[4]):
for r in range(integrals.shape[5]):
for s in range(integrals.shape[6]):
pqrs = integrals[kp, kq, kr, p, q, r, s]
pqsr = integrals[kp, kq, ks, p, q, s, r]
cdiff = numpy.linalg.norm(pqrs + pqsr).real
if diff > 1e-5:
print("AS diff = %.15g" % cdiff, pqrs, pqsr, kp, kq, kr, ks, p, q, r, s)
diff = max(diff, cdiff)
print("antisymmetrization : max diff = %.15g" % diff)
if diff > 1e-5:
print("Energy cutoff (or cell.mesh) is not enough to converge AO integrals.")
|
package com.jcmore2.imageprocessing.utils;
import java.io.ByteArrayOutputStream;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.Bitmap.CompressFormat;
import android.graphics.BitmapFactory;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Environment;
import android.util.Log;
import android.view.Display;
import android.view.WindowManager;
/**
* Utils
* @author jcmore2 <EMAIL>
*
*/
public class Utils {
public static byte[] getBytes(Bitmap bitmap) {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bitmap.compress(CompressFormat.PNG, 0, stream);
return stream.toByteArray();
}
public static Bitmap getImage(byte[] image) {
return BitmapFactory.decodeByteArray(image, 0, image.length);
}
public static boolean isSdReadable() {
boolean mExternalStorageAvailable = false;
String state = Environment.getExternalStorageState();
if (Environment.MEDIA_MOUNTED.equals(state)) {
// We can read and write the media
mExternalStorageAvailable = true;
Log.i("isSdReadable", "External storage card is readable.");
} else if (Environment.MEDIA_MOUNTED_READ_ONLY.equals(state)) {
// We can only read the media
Log.i("isSdReadable", "External storage card is readable.");
mExternalStorageAvailable = true;
} else {
// Something else is wrong. It may be one of many other
// states, but all we need to know is we can neither read nor write
mExternalStorageAvailable = false;
}
return mExternalStorageAvailable;
}
public static Bitmap getBitmapFromDrawable(Drawable d) {
Bitmap bitmap = ((BitmapDrawable) d).getBitmap();
return bitmap;
}
public static Drawable getDrawableFromBitmap(Context context, Bitmap bm) {
Drawable d = new BitmapDrawable(context.getResources(), bm);
return d;
}
public static int getAltoPantalla(Context context) {
WindowManager wm = (WindowManager) context
.getSystemService(Context.WINDOW_SERVICE);
Display display = wm.getDefaultDisplay();
return display.getHeight();
}
public static int getAnchoPantalla(Context context) {
WindowManager wm = (WindowManager) context
.getSystemService(Context.WINDOW_SERVICE);
Display display = wm.getDefaultDisplay();
return display.getWidth();
}
private static int calculateInSampleSize(BitmapFactory.Options options,
int reqWidth, int reqHeight) {
// Raw height and width of image
final int height = options.outHeight;
final int width = options.outWidth;
int inSampleSize = 1;
if (height > reqHeight || width > reqWidth) {
if (width > height) {
inSampleSize = Math.round((float) height / (float) reqHeight);
} else {
inSampleSize = Math.round((float) width / (float) reqWidth);
}
}
return inSampleSize;
}
public static Bitmap decodeSampledBitmapFromResource(Resources res,
int resId, int reqWidth, int reqHeight) {
// First decode with inJustDecodeBounds=true to check dimensions
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeResource(res, resId, options);
// Calculate inSampleSize
options.inSampleSize = calculateInSampleSize(options, reqWidth,
reqHeight);
// Decode bitmap with inSampleSize set
options.inJustDecodeBounds = false;
return BitmapFactory.decodeResource(res, resId, options);
}
public static Bitmap decodeSampledBitmapFromFile(String filePath, int reqWidth, int reqHeight) {
// First decode with inJustDecodeBounds=true to check dimensions
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeFile(filePath, options);
// Calculate inSampleSize
options.inSampleSize = calculateInSampleSize(options, reqWidth,
reqHeight);
// Decode bitmap with inSampleSize set
options.inJustDecodeBounds = false;
return BitmapFactory.decodeFile(filePath, options);
}
}
|
fan.std.UuidFactoryPeer = function(){}
fan.std.UuidFactoryPeer.resolveMacAddr = function()
{
return 0;
}
|
<reponame>dimiro1/unchained
package sha1
import (
"testing"
)
func TestUnsaltedSHA1PasswordEncode(t *testing.T) {
encoded, err := NewUnsaltedSHA1PasswordHasher().Encode("this-is-my-password", "")
if err != nil {
t.Fatalf("Encode error: %s", err)
}
expected := "sha1$$47a0caaf95db24a7f6701f0681610b9eed7e880f"
if encoded != expected {
t.Fatalf("Encoded hash %s does not match %s.", encoded, expected)
}
}
func TestUnsaltedSHA1PasswordVerify(t *testing.T) {
valid, err := NewUnsaltedSHA1PasswordHasher().Verify("<PASSWORD>", "sha1$$47a0caaf95db24a7f6701f0681610b9eed7e880f")
if err != nil {
t.Fatalf("Verify error: %s", err)
}
if !valid {
t.Fatal("Password should be valid.")
}
}
func TestSHA1PasswordEncode(t *testing.T) {
encoded, err := NewSHA1PasswordHasher().Encode("this-is-my-password", "<PASSWORD>")
if err != nil {
t.Fatalf("Encode error: %s", err)
}
expected := "sha1$FJkZbdAmXSDF$972db6461472a5345bab667d0255d120e06a3415"
if encoded != expected {
t.Fatalf("Encoded hash %s does not match %s.", encoded, expected)
}
}
func TestSHA1PasswordVerify(t *testing.T) {
valid, err := NewSHA1PasswordHasher().Verify("this-is-my-password", "sha1$FJkZbdAmXSDF$972db6461472a5345bab667d0255d120e06a3415")
if err != nil {
t.Fatalf("Verify error: %s", err)
}
if !valid {
t.Fatal("Password should be valid.")
}
}
|
func secondElement(from array: [Int]) -> [Int] {
return array.enumerated().compactMap { index, element in
(index % 2 == 0) ? nil : element
}
} |
from typing import List
def max_profit(prices: List[int]) -> int:
if not prices:
return 0
min_price = prices[0]
max_profit = 0
for price in prices:
if price < min_price:
min_price = price
else:
max_profit = max(max_profit, price - min_price)
return max_profit |
<reponame>Joursoir/umt<filename>UefiMonitorTest/tests/Grayscale.c<gh_stars>0
#include <Library/DebugLib.h>
#include <Library/HiiLib.h>
#include <Library/MemoryAllocationLib.h>
#include "Grayscale.h"
STATIC UINT32 CurrentGrayTone = 50;
VOID
GrayscaleTestInit (
IN UMT_CONTEXT *Ctx
)
{
GRAPHICS_PIXEL_COLOR ColorOutput = { 0x00 };
GRAPHICS_CONTEXT *Graphics = Ctx->Graphics;
ColorOutput.Red = (CurrentGrayTone * 255) / 100;
ColorOutput.Green = (CurrentGrayTone * 255) / 100;
ColorOutput.Blue = (CurrentGrayTone * 255) / 100;
PutRect (Graphics,
0,
0,
Graphics->Width,
Graphics->Height,
&ColorOutput);
if (Ctx->ShowTip) {
GrayscaleTestTip (Ctx);
}
}
VOID
GrayscaleTestDoit (
IN UMT_CONTEXT *Ctx
)
{
}
VOID
GrayscaleTestTip (
IN UMT_CONTEXT *Ctx
)
{
GRAPHICS_CONTEXT *Graphics;
Graphics = Ctx->Graphics;
if (Ctx->ShowTip == FALSE) {
// Restore
GrayscaleTestInit (Ctx);
return;
}
DrawRectWithBorder (Graphics,
15,
Graphics->Height - 15 - 134,
430, Graphics->Height - 15,
3,
&gUmtColors[UMT_COLOR_WHITE].Color,
&gUmtColors[UMT_COLOR_NAVY].Color);
DrawHiiStringF (Graphics,
25,
Graphics->Height - 15 - 124,
&gUmtColors[UMT_COLOR_NAVY].Color,
STRING_TOKEN (STR_GRAYSCALE_TITLE), gUmtHiiHandle);
DrawHiiStringF (Graphics,
25,
Graphics->Height - 15 - 104,
&gUmtColors[UMT_COLOR_BLACK].Color,
STRING_TOKEN (STR_GRAYSCALE_MSG), gUmtHiiHandle,
CurrentGrayTone);
}
VOID
GrayscaleTestChangeParam (
IN UMT_CONTEXT *Ctx,
IN INT8 ParamStep
)
{
}
VOID
GrayscaleTestChangeValue (
IN UMT_CONTEXT *Ctx,
IN INT8 ValueStep
)
{
if (CurrentGrayTone == 5 && ValueStep < 0) {
return;
} else {
CurrentGrayTone += ValueStep * 5;
if (CurrentGrayTone > 95)
{
CurrentGrayTone = 95;
return;
}
}
GrayscaleTestInit (Ctx);
}
|
import cv2 as cv
import cvision as cvis
from picamera.array import PiRGBArray
from picamera import PiCamera
from time import sleep
from matplotlib import pyplot as plt
import numpy as np
import json
def nothing(x):
pass
# Pobranie danych o kamerze, macierzy homografii oraz kolejności podawania elementów
# z pliku konfiguracyjnego, ustawienie wskaźnika element na pierwszą wartość
config, order, mtx, dist, T, distRatio, thresholdValue, objectHeight = cvis.configRead('config.json')
# Początkowe ustawienia kamery
camera = PiCamera()
camera.resolution = (640, 480)
camera.framerate = 30
rawCapture = PiRGBArray(camera, size=(640, 480))
# Zatrzymanie programu aby kamera mogła się uruchomić
sleep(0.1)
scale = 40 # Zmiejszenie obrazu do 40% oryginalnego
width = int(camera.resolution[0] * scale / 100)
height = int(camera.resolution[1] * scale / 100)
dim = (width, 3*height) # Nowe wymiary obrazu
fig = plt.figure() # Inicjalizacja wykresu
# Inicjalizacja okna
cv.namedWindow("Threshold Setup")
cv.createTrackbar("Próg binaryzacji", "Threshold Setup", thresholdValue, 255, nothing)
for frame in camera.capture_continuous(rawCapture, format='bgr', use_video_port=True):
# Trójwymiarowa macierz o wymiarach szerokość, wysokość i kanał koloru
img = frame.array # Zapisanie akutalnego kadru do zmiennej
thresholdValue = cv.getTrackbarPos('Próg binaryzacji', "Threshold Setup") # Pobranie wartości z suwaka
hsv = cv.cvtColor(img, cv.COLOR_BGR2HSV) # Zmiana przestrzeni barw z BGR na HSV
h, s, v = cv.split(hsv) # Wydzielenie kanałów HSV obrazu na osobne zmienne
blurred = cv.GaussianBlur(v, (5, 5), 0) # Rozmycie obrazu
thresh = cv.threshold(blurred, thresholdValue, 255, cv.THRESH_BINARY)[1] # Binaryzacja obrazu
thresh = cv.cvtColor(thresh, cv.COLOR_GRAY2BGR) # Zmiana zbinaryzowanego obrazu na 3 kanały
hist = cv.calcHist([v], [0], None, [256], [0, 256]) # Obliczenie histogramu obrazu
plt.clf() # Usunięcie poprzedniego wykresu z okna
plt.plot(hist) # Utworzenie wykresu histogramu
plt.xlim([0, 255]) # Ustawienie limitu na osi X
fig.canvas.draw() # Narysowanie wykresu
hist_img = np.fromstring(fig.canvas.tostring_rgb(), dtype=np.uint8, sep='') # Zamiana wykresu na rysunek
hist_img = hist_img.reshape(fig.canvas.get_width_height()[::-1]+(3,)) # Zmiana rozmiaru tablicy
hist_img = cv.cvtColor(hist_img, cv.COLOR_RGB2BGR) # Zamiana przestrzeni barw z RGB na BGR
vertical_img = np.vstack((img, hist_img, thresh)) # Połączenie trzech obrazów w pionie
vertical_img = cv.resize(vertical_img, dim, interpolation=cv.INTER_AREA) # Zmiana rozdzielczości obrazu
cv.imshow("Threshold Setup", vertical_img) # Wyświetlenie obrazu
# Oczekiwanie na wciśnięcie klawisza przez użytkownika, klatka wyświetlana jest przez 1ms
# Operator logiczny AND sprawia że ważny jest tylko pierwszy bajt zwracany przez funkcję
# dzięki temu nie ma znaczenia czy klwaisz został wciśnięty z włączonym CapsLock czy nie
key = cv.waitKey(1) & 0xFF
if key == 13:
print('Zapisano w pliku konfiguracyjnym wartość progu binaryzacji')
print('Wszystkie piksele o wartości poniżej %d będą czarne,\na wszystkie > %d białe' %(thresholdValue, thresholdValue))
cv.imwrite('threshold_setup.png', vertical_img)
config['cam_calibration']['thresholdValue'] = thresholdValue # Zapisanie zmiennej do pliku konfiguracyjnego
with open('config.json', 'w') as config_file:
json.dump(config, config_file, sort_keys=True, indent=4)
cv.destroyAllWindows()
break
rawCapture.truncate(0)
|
# alias ddr="sudo dmidecode| grep 'memory device' -i -A 10" # 查看ddr3还是ddr4
alias ddr="sudo dmidecode | grep DDR -C 10" # 查看ddr3还是ddr4
|
package primitives
import (
"fnd.localhost/handshake/encoding"
"io"
)
type Output struct {
Value uint64
Address *Address
Covenant *Covenant
}
func (o *Output) Encode(w io.Writer) error {
if err := encoding.WriteUint64(w, o.Value); err != nil {
return err
}
if err := o.Address.Encode(w); err != nil {
return err
}
if err := o.Covenant.Encode(w); err != nil {
return err
}
return nil
}
func (o *Output) Decode(r io.Reader) error {
value, err := encoding.ReadUint64(r)
if err != nil {
return err
}
address := new(Address)
if err := address.Decode(r); err != nil {
return err
}
covenant := new(Covenant)
if err := covenant.Decode(r); err != nil {
return err
}
o.Value = value
o.Address = address
o.Covenant = covenant
return nil
}
|
#!/bin/bash
ctID=$1
ctIP=$2
ctRootpw="$3"
containername="$4"
script_path=$(realpath "$0" | sed 's|\(.*\)/.*|\1|' | cut -d/ -f1,2,3)
source "$script_path/helper/variables.sh"
source "$script_path/helper/functions.sh"
source "$shiot_configPath/$shiot_configFile"
source "$script_path/language/$var_language.sh"
pct exec $ctID -- bash -ci "add-apt-repository universe"
pct exec $ctID -- bash -ci "wget -qO - https://repo.jellyfin.org/ubuntu/jellyfin_team.gpg.key | apt-key add - > /dev/null 2>&1"
pct exec $ctID -- bash -ci "echo \"deb [arch=$( dpkg --print-architecture )] https://repo.jellyfin.org/ubuntu focal main\" | tee /etc/apt/sources.list.d/jellyfin.list > /dev/null 2>&1"
pct exec $ctID -- bash -ci "apt-get update > /dev/null 2>&1"
pct exec $ctID -- bash -ci "apt-get install -y jellyfin > /dev/null 2>&1"
pct exec $ctID -- bash -ci "mkdir -p /media/Movies/"
pct exec $ctID -- bash -ci "mkdir -p /media/Series/"
pct exec $ctID -- bash -ci "mkdir -p /media/Photos/"
pct exec $ctID -- bash -ci "systemctl start jellyfin && systemctl enable jellyfin > /dev/null 2>&1"
exit 0
|
<filename>hexa/plugins/connector_s3/migrations/0022_app_role_arn.py
# Generated by Django 3.2.7 on 2021-10-18 15:02
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("connector_s3", "0021_rename_dirname_object_parent_key"),
]
operations = [
migrations.RenameField(
model_name="credentials",
old_name="role_arn",
new_name="app_role_arn",
),
]
|
package com.proxycrawl;
import java.util.HashMap;
import org.junit.Test;
import org.junit.Rule;
import org.junit.rules.ExpectedException;
import static org.junit.Assert.assertEquals;
public class ScraperAPITest
{
@Rule
public ExpectedException exceptionRule = ExpectedException.none();
@Test
public void itThrowsExceptionWhenTokenIsNull()
{
exceptionRule.expect(RuntimeException.class);
exceptionRule.expectMessage("Token is required");
new ScraperAPI(null);
}
@Test
public void itThrowsExceptionWhenTokenIsEmpty()
{
exceptionRule.expect(RuntimeException.class);
exceptionRule.expectMessage("Token is required");
new ScraperAPI(null);
}
@Test
public void itThrowsExceptionWhenUrlIsNull()
{
exceptionRule.expect(RuntimeException.class);
exceptionRule.expectMessage("URL is required");
ScraperAPI api = new ScraperAPI("testtoken");
api.get(null);
}
@Test
public void itThrowsExceptionWhenUrlIsEmpty()
{
exceptionRule.expect(RuntimeException.class);
exceptionRule.expectMessage("URL is required");
ScraperAPI api = new ScraperAPI("testtoken");
api.get("");
}
@Test
public void itThrowsExceptionWhenUrlIsInvalid() {
exceptionRule.expect(RuntimeException.class);
exceptionRule.expectMessage("Server returned HTTP response code: 403 for URL: https://api.proxycrawl.com/scraper?token=testtoken&url=apple");
ScraperAPI api = new ScraperAPI("testtoken");
api.get("apple");
}
@Test
public void itAssignsToken() {
ScraperAPI api = new ScraperAPI("testtoken");
assertEquals(api.getToken(), "testtoken");
}
@Test
public void itThrowsExceptionWhenInvokingPost()
{
exceptionRule.expect(RuntimeException.class);
exceptionRule.expectMessage("Only GET is allowed for the ScraperAPI");
ScraperAPI api = new ScraperAPI("testtoken");
HashMap<String, Object> data = new HashMap<String, Object>();
HashMap<String, Object> options = new HashMap<String, Object>();
api.post("https://www.apple.com", data, options);
}
}
|
<reponame>fr1t2/docusign-node-client
/**
* DocuSign REST API
* The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign.
*
* OpenAPI spec version: v2.1
* Contact: <EMAIL>
*
* NOTE: This class is auto generated. Do not edit the class manually and submit a new issue instead.
*
*/
(function(root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define(['ApiClient', 'model/PathExtendedElement', 'model/PropertyMetadata'], factory);
} else if (typeof module === 'object' && module.exports) {
// CommonJS-like environments that support module.exports, like Node.
module.exports = factory(require('../ApiClient'), require('./PathExtendedElement'), require('./PropertyMetadata'));
} else {
// Browser globals (root is window)
if (!root.Docusign) {
root.Docusign = {};
}
root.Docusign.MergeField = factory(root.Docusign.ApiClient, root.Docusign.PathExtendedElement, root.Docusign.PropertyMetadata);
}
}(this, function(ApiClient, PathExtendedElement, PropertyMetadata) {
'use strict';
/**
* The MergeField model module.
* @module model/MergeField
* @version 3.0.0
*/
/**
* Constructs a new <code>MergeField</code>.
* Contains information for transfering values between Salesforce data fields and DocuSign Tabs.
* @alias module:model/MergeField
* @class
*/
var exports = function() {
var _this = this;
};
/**
* Constructs a <code>MergeField</code> from a plain JavaScript object, optionally creating a new instance.
* Copies all relevant properties from <code>data</code> to <code>obj</code> if supplied or a new instance if not.
* @param {Object} data The plain JavaScript object bearing properties of interest.
* @param {module:model/MergeField} obj Optional instance to populate.
* @return {module:model/MergeField} The populated <code>MergeField</code> instance.
*/
exports.constructFromObject = function(data, obj) {
if (data) {
obj = obj || new exports();
if (data.hasOwnProperty('allowSenderToEdit')) {
obj['allowSenderToEdit'] = ApiClient.convertToType(data['allowSenderToEdit'], 'String');
}
if (data.hasOwnProperty('allowSenderToEditMetadata')) {
obj['allowSenderToEditMetadata'] = PropertyMetadata.constructFromObject(data['allowSenderToEditMetadata']);
}
if (data.hasOwnProperty('configurationType')) {
obj['configurationType'] = ApiClient.convertToType(data['configurationType'], 'String');
}
if (data.hasOwnProperty('configurationTypeMetadata')) {
obj['configurationTypeMetadata'] = PropertyMetadata.constructFromObject(data['configurationTypeMetadata']);
}
if (data.hasOwnProperty('path')) {
obj['path'] = ApiClient.convertToType(data['path'], 'String');
}
if (data.hasOwnProperty('pathExtended')) {
obj['pathExtended'] = ApiClient.convertToType(data['pathExtended'], [PathExtendedElement]);
}
if (data.hasOwnProperty('pathExtendedMetadata')) {
obj['pathExtendedMetadata'] = PropertyMetadata.constructFromObject(data['pathExtendedMetadata']);
}
if (data.hasOwnProperty('pathMetadata')) {
obj['pathMetadata'] = PropertyMetadata.constructFromObject(data['pathMetadata']);
}
if (data.hasOwnProperty('row')) {
obj['row'] = ApiClient.convertToType(data['row'], 'String');
}
if (data.hasOwnProperty('rowMetadata')) {
obj['rowMetadata'] = PropertyMetadata.constructFromObject(data['rowMetadata']);
}
if (data.hasOwnProperty('writeBack')) {
obj['writeBack'] = ApiClient.convertToType(data['writeBack'], 'String');
}
if (data.hasOwnProperty('writeBackMetadata')) {
obj['writeBackMetadata'] = PropertyMetadata.constructFromObject(data['writeBackMetadata']);
}
}
return obj;
}
/**
* When set to **true**, the sender can modify the value of the custom tab during the sending process.
* @member {String} allowSenderToEdit
*/
exports.prototype['allowSenderToEdit'] = undefined;
/**
* @member {module:model/PropertyMetadata} allowSenderToEditMetadata
*/
exports.prototype['allowSenderToEditMetadata'] = undefined;
/**
* If merge field's are being used, specifies the type of the merge field. The only supported value is **salesforce**.
* @member {String} configurationType
*/
exports.prototype['configurationType'] = undefined;
/**
* @member {module:model/PropertyMetadata} configurationTypeMetadata
*/
exports.prototype['configurationTypeMetadata'] = undefined;
/**
* Sets the object associated with the custom tab. Currently this is the Salesforce Object.
* @member {String} path
*/
exports.prototype['path'] = undefined;
/**
*
* @member {Array.<module:model/PathExtendedElement>} pathExtended
*/
exports.prototype['pathExtended'] = undefined;
/**
* @member {module:model/PropertyMetadata} pathExtendedMetadata
*/
exports.prototype['pathExtendedMetadata'] = undefined;
/**
* @member {module:model/PropertyMetadata} pathMetadata
*/
exports.prototype['pathMetadata'] = undefined;
/**
* Specifies the row number in a Salesforce table that the merge field value corresponds to.
* @member {String} row
*/
exports.prototype['row'] = undefined;
/**
* @member {module:model/PropertyMetadata} rowMetadata
*/
exports.prototype['rowMetadata'] = undefined;
/**
* When wet to true, the information entered in the tab automatically updates the related Salesforce data when an envelope is completed.
* @member {String} writeBack
*/
exports.prototype['writeBack'] = undefined;
/**
* @member {module:model/PropertyMetadata} writeBackMetadata
*/
exports.prototype['writeBackMetadata'] = undefined;
return exports;
}));
|
<filename>cmd/json.go
/*
Copyright © 2020 NAME HERE <EMAIL ADDRESS>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package cmd
import (
"fmt"
"bytes"
"io/ioutil"
"encoding/json"
"github.com/spf13/cobra"
)
// Function to indent json data
func jsonBytes(b []byte) ([]byte, error) {
var out bytes.Buffer
err := json.Indent(&out, b, "", " ")
return out.Bytes(), err
}
// Function to print json file
func printJson(fileName string) error {
jsonFile, err := ioutil.ReadFile(fileName)
if err != nil {
return err
}
b := []byte(jsonFile)
b, err = jsonBytes(b)
if err != nil {
return err
}
fmt.Printf("%s\n", b)
return nil
}
// jsonCmd represents the json command
var jsonCmd = &cobra.Command{
Use: "json",
Short: "<~>",
Long: `command to print a json file in indented format
file must be in current directory tree`,
Run: func(cmd *cobra.Command, args []string) {
file, _ := cmd.Flags().GetString("file")
if file == "<>" {
fmt.Println("Error: missing file parameter")
} else {
err := printJson(file)
if err != nil {
fmt.Println(err.Error())
}
}
},
}
func init() {
rootCmd.AddCommand(jsonCmd)
// Here you will define your flags and configuration settings.
jsonCmd.Flags().StringP("file", "f", "<>", "json file")
}
|
package com.github.chen0040.leetcode.day14.easy;
import java.util.*;
/**
* Created by xschen on 9/8/2017.
*
* summary:
* Given a binary search tree with non-negative values, find the minimum absolute difference between values of any two nodes.
*
* link: https://leetcode.com/problems/minimum-absolute-difference-in-bst/description/
*/
public class MinimumAbsoluteDifferenceInBST {
public class TreeNode {
int val;
TreeNode left;
TreeNode right;
TreeNode(int x) { val = x; }
}
public class Solution {
public int getMinimumDifference(TreeNode root) {
Set<Integer> values = new HashSet<Integer>();
collect(root, values);
List<Integer> list = new ArrayList<Integer>(values);
Collections.sort(list);
if(list.size() < 2) return 0;
int minDiff = Integer.MAX_VALUE;
for(int i=1; i < list.size(); ++i) {
minDiff = Math.min(minDiff, list.get(i) - list.get(i-1));
}
return minDiff;
}
private void collect(TreeNode x, Set<Integer> set) {
if(x == null) {
return;
}
collect(x.left, set);
set.add(x.val);
collect(x.right, set);
}
}
}
|
import React from 'react'
import SbEditable from 'storyblok-react'
import Feature from './Feature'
import Teaser from './Teaser'
import Carousel from './Carousel'
import Gallery from './Gallery'
import Article from './Article'
import Testimonial from './Testimonial'
import Image from './Image'
const Components = {
'feature': Feature,
'teaser': Teaser,
'carousel': Carousel,
'gallery': Gallery,
'article': Article,
'testimonial': Testimonial,
'image': Image,
}
const DynamicComponent = ({ blok }) => {
if (typeof Components[blok.component] !== 'undefined') {
const Component = Components[blok.component]
return (<SbEditable content={blok}><Component blok={blok} /></SbEditable>)
}
return (<p>The component <strong>{blok.component}</strong> has not been created yet.</p>)
}
export default DynamicComponent |
#!/bin/sh
# This file is licensed under the BSD-3-Clause license.
# See the AUTHORS and LICENSE files for more information.
. ../spec_helper.sh
. ../../share/zfsnap/core.sh
ItReturns "IsTrue 'true'" 0
ItReturns "IsTrue 'false'" 1
ExitTests
|
#! /bin/bash
base=/net/cephfs/scratch/mathmu/map-volatility
scripts=$base/scripts
# run some lower Tatoeba languages from
# https://github.com/Helsinki-NLP/Tatoeba-Challenge/blob/master/subsets/lower.md
# that have at least 1k dev and test data, and Xk training data
LANG_PAIRS=(
"bel rus"
"deu lat"
"eng ido"
"epo fas"
"fra lat"
"kaz rus"
"lat por"
)
model_name="no_label_smoothing"
# without label smoothing
train_additional_args="--label-smoothing 0.0"
corpora="test slice-test"
for PAIR in "${LANG_PAIRS[@]}"; do
PAIR=($PAIR)
src=${PAIR[0]}
trg=${PAIR[1]}
. $scripts/tatoeba/run_tatoeba_generic.sh
done
|
REM pseudoRecords.sql
REM Chapter 11, Oracle9i PL/SQL Programming by <NAME>
REM This trigger shows that :old and :new are pseudo-records.
set echo on
CREATE OR REPLACE TRIGGER TempDelete
BEFORE DELETE ON temp_table
FOR EACH ROW
DECLARE
v_TempRec temp_table%ROWTYPE;
BEGIN
/* This is not a legal assignment, since :old is not truly
a record. */
v_TempRec := :old;
/* We can accomplish the same thing, however, by assigning
the fields individually. */
v_TempRec.char_col := :old.char_col;
v_TempRec.num_col := :old.num_col;
END TempDelete;
/
show errors
|
#!/bin/bash
while getopts "a:b:cdef" opt; do
case $opt in
a)
echo "this is -a the arg is ! $OPTARG"
;;
b)
echo "this is -b the arg is ! $OPTARG"
;;
c)
echo "this is -c the arg is ! $OPTARG"
;;
\?)
echo "Invalid option: -$OPTARG"
;;
esac
done
|
<filename>mojo/public/python/mojo/bindings/reflection.py
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""The metaclasses used by the mojo python bindings."""
import itertools
import logging
import sys
# pylint: disable=F0401
import mojo.bindings.messaging as messaging
import mojo.bindings.promise as promise
import mojo.bindings.serialization as serialization
class MojoEnumType(type):
"""Meta class for enumerations.
Usage:
class MyEnum(object):
__metaclass__ = MojoEnumType
VALUES = [
('A', 0),
'B',
('C', 5),
]
This will define a enum with 3 values, 'A' = 0, 'B' = 1 and 'C' = 5.
"""
def __new__(mcs, name, bases, dictionary):
dictionary['__slots__'] = ()
dictionary['__new__'] = None
for value in dictionary.pop('VALUES', []):
if not isinstance(value, tuple):
raise ValueError('incorrect value: %r' % value)
key, enum_value = value
if isinstance(key, str) and isinstance(enum_value, int):
dictionary[key] = enum_value
else:
raise ValueError('incorrect value: %r' % value)
return type.__new__(mcs, name, bases, dictionary)
def __setattr__(mcs, key, value):
raise AttributeError, 'can\'t set attribute'
def __delattr__(mcs, key):
raise AttributeError, 'can\'t delete attribute'
class MojoStructType(type):
"""Meta class for structs.
Usage:
class MyStruct(object):
__metaclass__ = MojoStructType
DESCRIPTOR = {
'constants': {
'C1': 1,
'C2': 2,
},
'enums': {
'ENUM1': [
('V1', 1),
'V2',
],
'ENUM2': [
('V1', 1),
'V2',
],
},
'fields': [
SingleFieldGroup('x', _descriptor.TYPE_INT32, 0, 0),
],
}
This will define an struct, with:
- 2 constants 'C1' and 'C2';
- 2 enums 'ENUM1' and 'ENUM2', each of those having 2 values, 'V1' and
'V2';
- 1 int32 field named 'x'.
"""
def __new__(mcs, name, bases, dictionary):
dictionary['__slots__'] = ('_fields')
descriptor = dictionary.pop('DESCRIPTOR', {})
# Add constants
dictionary.update(descriptor.get('constants', {}))
# Add enums
enums = descriptor.get('enums', {})
for key in enums:
dictionary[key] = MojoEnumType(key, (object,), { 'VALUES': enums[key] })
# Add fields
groups = descriptor.get('fields', [])
fields = list(
itertools.chain.from_iterable([group.descriptors for group in groups]))
fields.sort(key=lambda f: f.index)
for field in fields:
dictionary[field.name] = _BuildProperty(field)
# Add init
dictionary['__init__'] = _StructInit(fields)
# Add serialization method
serialization_object = serialization.Serialization(groups)
def Serialize(self, handle_offset=0):
return serialization_object.Serialize(self, handle_offset)
dictionary['Serialize'] = Serialize
# pylint: disable=W0212
def AsDict(self):
return self._fields
dictionary['AsDict'] = AsDict
def Deserialize(cls, data, handles):
result = cls.__new__(cls)
fields = {}
serialization_object.Deserialize(fields, data, handles)
result._fields = fields
return result
dictionary['Deserialize'] = classmethod(Deserialize)
dictionary['__eq__'] = _StructEq(fields)
dictionary['__ne__'] = _StructNe
return type.__new__(mcs, name, bases, dictionary)
# Prevent adding new attributes, or mutating constants.
def __setattr__(mcs, key, value):
raise AttributeError, 'can\'t set attribute'
# Prevent deleting constants.
def __delattr__(mcs, key):
raise AttributeError, 'can\'t delete attribute'
class MojoInterfaceType(type):
"""Meta class for interfaces.
Usage:
class MyInterface(object):
__metaclass__ = MojoInterfaceType
DESCRIPTOR = {
'client': MyInterfaceClient,
'methods': [
{
'name': 'FireAndForget',
'ordinal': 0,
'parameters': [
SingleFieldGroup('x', _descriptor.TYPE_INT32, 0, 0),
]
},
{
'name': 'Ping',
'ordinal': 1,
'parameters': [
SingleFieldGroup('x', _descriptor.TYPE_INT32, 0, 0),
],
'responses': [
SingleFieldGroup('x', _descriptor.TYPE_INT32, 0, 0),
],
},
],
}
"""
def __new__(mcs, name, bases, dictionary):
# If one of the base class is already an interface type, do not edit the
# class.
for base in bases:
if isinstance(base, mcs):
return type.__new__(mcs, name, bases, dictionary)
descriptor = dictionary.pop('DESCRIPTOR', {})
methods = [_MethodDescriptor(x) for x in descriptor.get('methods', [])]
for method in methods:
dictionary[method.name] = _NotImplemented
client_class_getter = descriptor.get('client', None)
interface_manager = InterfaceManager(name, methods, client_class_getter)
dictionary.update({
'client': None,
'manager': None,
'_interface_manager': interface_manager,
})
interface_class = type.__new__(mcs, name, bases, dictionary)
interface_manager.interface_class = interface_class
return interface_class
@property
def manager(mcs):
return mcs._interface_manager
# Prevent adding new attributes, or mutating constants.
def __setattr__(mcs, key, value):
raise AttributeError, 'can\'t set attribute'
# Prevent deleting constants.
def __delattr__(mcs, key):
raise AttributeError, 'can\'t delete attribute'
class InterfaceProxy(object):
"""
A proxy allows to access a remote interface through a message pipe.
"""
pass
class InterfaceRequest(object):
"""
An interface request allows to send a request for an interface to a remote
object and start using it immediately.
"""
def __init__(self, handle):
self._handle = handle
def IsPending(self):
return self._handle.IsValid()
def PassMessagePipe(self):
result = self._handle
self._handle = None
return result
class InterfaceManager(object):
"""
Manager for an interface class. The manager contains the operation that allows
to bind an implementation to a pipe, or to generate a proxy for an interface
over a pipe.
"""
def __init__(self, name, methods, client_class_getter):
self.name = name
self.methods = methods
self.interface_class = None
self._client_class_getter = client_class_getter
self._client_manager = None
self._client_manager_computed = False
self._proxy_class = None
self._stub_class = None
@property
def client_manager(self):
if not self._client_manager_computed:
self._client_manager_computed = True
if self._client_class_getter:
self._client_manager = self._client_class_getter().manager
return self._client_manager
def Proxy(self, handle):
router = messaging.Router(handle)
error_handler = _ProxyErrorHandler()
router.SetErrorHandler(error_handler)
router.Start()
return self._InternalProxy(router, error_handler)
# pylint: disable=W0212
def Bind(self, impl, handle):
router = messaging.Router(handle)
router.SetIncomingMessageReceiver(self._Stub(impl))
error_handler = _ProxyErrorHandler()
router.SetErrorHandler(error_handler)
# Retain the router, until an error happen.
retainer = _Retainer(router)
def Cleanup(_):
retainer.release()
error_handler.AddCallback(Cleanup)
if self.client_manager:
impl.client = self.client_manager._InternalProxy(router, error_handler)
# Give an instance manager to the implementation to allow it to close
# the connection.
impl.manager = InstanceManager(router)
router.Start()
def _InternalProxy(self, router, error_handler):
if not self._proxy_class:
dictionary = {
'__module__': __name__,
'__init__': _ProxyInit,
}
if self.client_manager:
dictionary['client'] = property(_ProxyGetClient, _ProxySetClient)
dictionary['manager'] = None
dictionary['_client_manager'] = self.client_manager
for method in self.methods:
dictionary[method.name] = _ProxyMethodCall(method)
self._proxy_class = type('%sProxy' % self.name,
(self.interface_class, InterfaceProxy),
dictionary)
proxy = self._proxy_class(router, error_handler)
# Give an instance manager to the proxy to allow to close the connection.
proxy.manager = InstanceManager(router)
return proxy
def _Stub(self, impl):
if not self._stub_class:
accept_method = _StubAccept(self.methods)
dictionary = {
'__module__': __name__,
'__init__': _StubInit,
'Accept': accept_method,
'AcceptWithResponder': accept_method,
}
self._stub_class = type('%sStub' % self.name,
(messaging.MessageReceiverWithResponder,),
dictionary)
return self._stub_class(impl)
class InstanceManager(object):
"""
Manager for the implementation of an interface or a proxy. The manager allows
to control the connection over the pipe.
"""
def __init__(self, router):
self.router = router
def Close(self):
self.router.Close()
def PassMessagePipe(self):
return self.router.PassMessagePipe()
class _MethodDescriptor(object):
def __init__(self, descriptor):
self.name = descriptor['name']
self.ordinal = descriptor['ordinal']
self.parameters_struct = _ConstructParameterStruct(
descriptor['parameters'], self.name, "Parameters")
self.response_struct = _ConstructParameterStruct(
descriptor.get('responses'), self.name, "Responses")
def _ConstructParameterStruct(descriptor, name, suffix):
if descriptor is None:
return None
parameter_dictionary = {
'__metaclass__': MojoStructType,
'__module__': __name__,
'DESCRIPTOR': descriptor,
}
return MojoStructType(
'%s%s' % (name, suffix),
(object,),
parameter_dictionary)
class _ProxyErrorHandler(messaging.ConnectionErrorHandler):
def __init__(self):
messaging.ConnectionErrorHandler.__init__(self)
self._callbacks = set()
def OnError(self, result):
exception = messaging.MessagingException('Mojo error: %d' % result)
for callback in list(self._callbacks):
callback(exception)
self._callbacks = None
def AddCallback(self, callback):
if self._callbacks is not None:
self._callbacks.add(callback)
def RemoveCallback(self, callback):
if self._callbacks:
self._callbacks.remove(callback)
class _Retainer(object):
# Set to force instances to be retained.
_RETAINED = set()
def __init__(self, retained):
self._retained = retained
_Retainer._RETAINED.add(self)
def release(self):
self._retained = None
_Retainer._RETAINED.remove(self)
def _StructInit(fields):
def _Init(self, *args, **kwargs):
if len(args) + len(kwargs) > len(fields):
raise TypeError('__init__() takes %d argument (%d given)' %
(len(fields), len(args) + len(kwargs)))
self._fields = {}
for f, a in zip(fields, args):
self.__setattr__(f.name, a)
remaining_fields = set(x.name for x in fields[len(args):])
for name in kwargs:
if not name in remaining_fields:
if name in (x.name for x in fields[:len(args)]):
raise TypeError(
'__init__() got multiple values for keyword argument %r' % name)
raise TypeError('__init__() got an unexpected keyword argument %r' %
name)
self.__setattr__(name, kwargs[name])
return _Init
def _BuildProperty(field):
"""Build the property for the given field."""
# pylint: disable=W0212
def Get(self):
if field.name not in self._fields:
self._fields[field.name] = field.GetDefaultValue()
return self._fields[field.name]
# pylint: disable=W0212
def Set(self, value):
self._fields[field.name] = field.field_type.Convert(value)
return property(Get, Set)
def _StructEq(fields):
def _Eq(self, other):
if type(self) is not type(other):
return False
for field in fields:
if getattr(self, field.name) != getattr(other, field.name):
return False
return True
return _Eq
def _StructNe(self, other):
return not self.__eq__(other)
def _ProxyInit(self, router, error_handler):
self._router = router
self._error_handler = error_handler
self._client = None
# pylint: disable=W0212
def _ProxyGetClient(self):
return self._client
# pylint: disable=W0212
def _ProxySetClient(self, client):
self._client = client
stub = self._client_manager._Stub(client)
self._router.SetIncomingMessageReceiver(stub)
# pylint: disable=W0212
def _ProxyMethodCall(method):
flags = messaging.NO_FLAG
if method.response_struct:
flags = messaging.MESSAGE_EXPECTS_RESPONSE_FLAG
def _Call(self, *args, **kwargs):
def GenerationMethod(resolve, reject):
message = _GetMessage(method, flags, *args, **kwargs)
if method.response_struct:
def Accept(message):
try:
assert message.header.message_type == method.ordinal
payload = message.payload
response = method.response_struct.Deserialize(payload.data,
payload.handles)
as_dict = response.AsDict()
if len(as_dict) == 1:
value = as_dict.values()[0]
if not isinstance(value, dict):
response = value
resolve(response)
return True
except Exception as e:
# Adding traceback similarly to python 3.0 (pep-3134)
e.__traceback__ = sys.exc_info()[2]
reject(e)
return False
finally:
self._error_handler.RemoveCallback(reject)
self._error_handler.AddCallback(reject)
if not self._router.AcceptWithResponder(
message, messaging.ForwardingMessageReceiver(Accept)):
self._error_handler.RemoveCallback(reject)
reject(messaging.MessagingException("Unable to send message."))
else:
if (self._router.Accept(message)):
resolve(None)
else:
reject(messaging.MessagingException("Unable to send message."))
return promise.Promise(GenerationMethod)
return _Call
def _GetMessage(method, flags, *args, **kwargs):
if flags == messaging.MESSAGE_IS_RESPONSE_FLAG:
struct = method.response_struct(*args, **kwargs)
else:
struct = method.parameters_struct(*args, **kwargs)
header = messaging.MessageHeader(method.ordinal, flags)
data = header.Serialize()
(payload, handles) = struct.Serialize()
data.extend(payload)
return messaging.Message(data, handles, header)
def _StubInit(self, impl):
self.impl = impl
def _StubAccept(methods):
methods_by_ordinal = dict((m.ordinal, m) for m in methods)
def Accept(self, message, responder=None):
try:
header = message.header
assert header.expects_response == bool(responder)
assert header.message_type in methods_by_ordinal
method = methods_by_ordinal[header.message_type]
payload = message.payload
parameters = method.parameters_struct.Deserialize(
payload.data, payload.handles).AsDict()
response = getattr(self.impl, method.name)(**parameters)
if header.expects_response:
def SendResponse(response):
if isinstance(response, dict):
response_message = _GetMessage(method,
messaging.MESSAGE_IS_RESPONSE_FLAG,
**response)
else:
response_message = _GetMessage(method,
messaging.MESSAGE_IS_RESPONSE_FLAG,
response)
response_message.header.request_id = header.request_id
responder.Accept(response_message)
p = promise.Promise.Resolve(response).Then(SendResponse)
if self.impl.manager:
# Close the connection in case of error.
p.Catch(lambda _: self.impl.manager.Close())
return True
# pylint: disable=W0702
except:
# Close the connection in case of error.
logging.warning(
'Error occured in accept method. Connection will be closed.')
if self.impl.manager:
self.impl.manager.Close()
return False
return Accept
def _NotImplemented(*_1, **_2):
raise NotImplementedError()
|
#!/bin/bash
# Script to deploy a very simple web application.
# The web app has a customizable image and some text.
cat << EOM > /var/www/html/index.html
<html>
<head><title>Meow!</title></head>
<body>
<div style="width:800px;margin: 0 auto">
<!-- BEGIN -->
<center><img src="http://${PLACEHOLDER}/${WIDTH}/${HEIGHT}"></img></center>
<center><h2>Meow World!</h2></center>
Welcome to ${PREFIX}'s app. WE are here to serve you.
<!-- END -->
</div>
</body>
</html>
EOM
echo "Script complete."
|
export default {
title: 'Read Dead is back!',
subtitle: "Come see John's new adventures",
backgroundImage: '/img/red-dead-img.png',
buttonLabel: 'Buy now',
buttonLink: '/rdr2',
alignment: 'right'
}
|
#!/bin/bash
#
# provision.sh
#
# This file is specified in Vagrantfile and is loaded by Vagrant as the primary
# provisioning script whenever the commands `vagrant up`, `vagrant provision`,
# or `vagrant reload` are used. It provides all of the default packages and
# configurations included with Varying Vagrant Vagrants.
# By storing the date now, we can calculate the duration of provisioning at the
# end of this script.
start_seconds="$(date +%s)"
# Network Detection
#
# Make an HTTP request to google.com to determine if outside access is available
# to us. If 3 attempts with a timeout of 5 seconds are not successful, then we'll
# skip a few things further in provisioning rather than create a bunch of errors.
if [[ "$(wget --tries=3 --timeout=5 --spider http://google.com 2>&1 | grep 'connected')" ]]; then
echo "Network connection detected..."
ping_result="Connected"
else
echo "Network connection not detected. Unable to reach google.com..."
ping_result="Not Connected"
fi
# PACKAGE INSTALLATION
#
# Build a bash array to pass all of the packages we want to install to a single
# apt-get command. This avoids doing all the leg work each time a package is
# set to install. It also allows us to easily comment out or add single
# packages. We set the array as empty to begin with so that we can append
# individual packages to it as required.
apt_package_install_list=()
# Start with a bash array containing all packages we want to install in the
# virtual machine. We'll then loop through each of these and check individual
# status before adding them to the apt_package_install_list array.
apt_package_check_list=(
# PHP5
#
# Our base packages for php5. As long as php5-fpm and php5-cli are
# installed, there is no need to install the general php5 package, which
# can sometimes install apache as a requirement.
php5-fpm
php5-cli
# Common and dev packages for php
php5-common
php5-dev
# Extra PHP modules that we find useful
php5-memcache
php5-imagick
php5-mcrypt
php5-mysql
php5-imap
php5-curl
php-pear
php5-gd
# nginx is installed as the default web server
nginx
# memcached is made available for object caching
memcached
# mysql is the default database
mysql-server
# other packages that come in handy
imagemagick
subversion
git-core
zip
unzip
ngrep
curl
make
vim
colordiff
postfix
# Req'd for i18n tools
gettext
# Req'd for Webgrind
graphviz
# dos2unix
# Allows conversion of DOS style line endings to something we'll have less
# trouble with in Linux.
dos2unix
# nodejs for use by grunt
g++
nodejs
)
echo "Check for apt packages to install..."
# Loop through each of our packages that should be installed on the system. If
# not yet installed, it should be added to the array of packages to install.
for pkg in "${apt_package_check_list[@]}"; do
package_version="$(dpkg -s $pkg 2>&1 | grep 'Version:' | cut -d " " -f 2)"
if [[ -n "${package_version}" ]]; then
space_count="$(expr 20 - "${#pkg}")" #11
pack_space_count="$(expr 30 - "${#package_version}")"
real_space="$(expr ${space_count} + ${pack_space_count} + ${#package_version})"
printf " * $pkg %${real_space}.${#package_version}s ${package_version}\n"
else
echo " *" $pkg [not installed]
apt_package_install_list+=($pkg)
fi
done
# MySQL
#
# Use debconf-set-selections to specify the default password for the root MySQL
# account. This runs on every provision, even if MySQL has been installed. If
# MySQL is already installed, it will not affect anything.
echo mysql-server mysql-server/root_password password root | debconf-set-selections
echo mysql-server mysql-server/root_password_again password root | debconf-set-selections
# Postfix
#
# Use debconf-set-selections to specify the selections in the postfix setup. Set
# up as an 'Internet Site' with the host name 'vvv'. Note that if your current
# Internet connection does not allow communication over port 25, you will not be
# able to send mail, even with postfix installed.
echo postfix postfix/main_mailer_type select Internet Site | debconf-set-selections
echo postfix postfix/mailname string vvv | debconf-set-selections
# Disable ipv6 as some ISPs/mail servers have problems with it
echo "inet_protocols = ipv4" >> /etc/postfix/main.cf
# Provide our custom apt sources before running `apt-get update`
ln -sf /srv/config/apt-source-append.list /etc/apt/sources.list.d/vvv-sources.list
echo "Linked custom apt sources"
if [[ $ping_result == "Connected" ]]; then
# If there are any packages to be installed in the apt_package_list array,
# then we'll run `apt-get update` and then `apt-get install` to proceed.
if [[ ${#apt_package_install_list[@]} = 0 ]]; then
echo -e "No apt packages to install.\n"
else
# Before running `apt-get update`, we should add the public keys for
# the packages that we are installing from non standard sources via
# our appended apt source.list
# Retrieve the Nginx signing key from nginx.org
echo "Applying Nginx signing key..."
wget --quiet http://nginx.org/keys/nginx_signing.key -O- | apt-key add -
# Apply the nodejs assigning key
echo "Applying nodejs signing key..."
apt-key adv --quiet --keyserver hkp://keyserver.ubuntu.com:80 --recv-key C7917B12 2>&1 | grep "gpg:"
apt-key export C7917B12 | apt-key add -
# update all of the package references before installing anything
echo "Running apt-get update..."
apt-get update --assume-yes
# install required packages
echo "Installing apt-get packages..."
apt-get install --assume-yes ${apt_package_install_list[@]}
# Clean up apt caches
apt-get clean
fi
# Make sure we have the latest npm version
npm install -g npm
# xdebug
#
# XDebug 2.2.3 is provided with the Ubuntu install by default. The PECL
# installation allows us to use a later version. Not specifying a version
# will load the latest stable.
pecl install xdebug
# ack-grep
#
# Install ack-rep directory from the version hosted at beyondgrep.com as the
# PPAs for Ubuntu Precise are not available yet.
if [[ -f /usr/bin/ack ]]; then
echo "ack-grep already installed"
else
echo "Installing ack-grep as ack"
curl -s http://beyondgrep.com/ack-2.04-single-file > /usr/bin/ack && chmod +x /usr/bin/ack
fi
# COMPOSER
#
# Install Composer if it is not yet available.
if [[ ! -n "$(composer --version --no-ansi | grep 'Composer version')" ]]; then
echo "Installing Composer..."
curl -sS https://getcomposer.org/installer | php
chmod +x composer.phar
mv composer.phar /usr/local/bin/composer
fi
# Update both Composer and any global packages. Updates to Composer are direct from
# the master branch on its GitHub repository.
if [[ -n "$(composer --version --no-ansi | grep 'Composer version')" ]]; then
echo "Updating Composer..."
COMPOSER_HOME=/usr/local/src/composer composer self-update
COMPOSER_HOME=/usr/local/src/composer composer -q global require --no-update phpunit/phpunit:4.3.*
COMPOSER_HOME=/usr/local/src/composer composer -q global require --no-update phpunit/php-invoker:1.1.*
COMPOSER_HOME=/usr/local/src/composer composer -q global require --no-update mockery/mockery:0.9.*
COMPOSER_HOME=/usr/local/src/composer composer -q global require --no-update d11wtq/boris:v1.0.8
COMPOSER_HOME=/usr/local/src/composer composer -q global config bin-dir /usr/local/bin
COMPOSER_HOME=/usr/local/src/composer composer global update
fi
# Grunt
#
# Install or Update Grunt based on current state. Updates are direct
# from NPM
if [[ "$(grunt --version)" ]]; then
echo "Updating Grunt CLI"
npm update -g grunt-cli &>/dev/null
npm update -g grunt-sass &>/dev/null
npm update -g grunt-cssjanus &>/dev/null
else
echo "Installing Grunt CLI"
npm install -g grunt-cli &>/dev/null
npm install -g grunt-sass &>/dev/null
npm install -g grunt-cssjanus &>/dev/null
fi
# Graphviz
#
# Set up a symlink between the Graphviz path defined in the default Webgrind
# config and actual path.
echo "Adding graphviz symlink for Webgrind..."
ln -sf /usr/bin/dot /usr/local/bin/dot
else
echo -e "\nNo network connection available, skipping package installation"
fi
# Configuration for nginx
if [[ ! -e /etc/nginx/server.key ]]; then
echo "Generate Nginx server private key..."
vvvgenrsa="$(openssl genrsa -out /etc/nginx/server.key 2048 2>&1)"
echo $vvvgenrsa
fi
if [[ ! -e /etc/nginx/server.csr ]]; then
echo "Generate Certificate Signing Request (CSR)..."
openssl req -new -batch -key /etc/nginx/server.key -out /etc/nginx/server.csr
fi
if [[ ! -e /etc/nginx/server.crt ]]; then
echo "Sign the certificate using the above private key and CSR..."
vvvsigncert="$(openssl x509 -req -days 365 -in /etc/nginx/server.csr -signkey /etc/nginx/server.key -out /etc/nginx/server.crt 2>&1)"
echo $vvvsigncert
fi
echo -e "\nSetup configuration files..."
# Used to to ensure proper services are started on `vagrant up`
cp /srv/config/init/vvv-start.conf /etc/init/vvv-start.conf
echo " * /srv/config/init/vvv-start.conf -> /etc/init/vvv-start.conf"
# Copy nginx configuration from local
cp /srv/config/nginx-config/nginx.conf /etc/nginx/nginx.conf
cp /srv/config/nginx-config/nginx-wp-common.conf /etc/nginx/nginx-wp-common.conf
if [[ ! -d /etc/nginx/custom-sites ]]; then
mkdir /etc/nginx/custom-sites/
fi
rsync -rvzh --delete /srv/config/nginx-config/sites/ /etc/nginx/custom-sites/
echo " * /srv/config/nginx-config/nginx.conf -> /etc/nginx/nginx.conf"
echo " * /srv/config/nginx-config/nginx-wp-common.conf -> /etc/nginx/nginx-wp-common.conf"
echo " * /srv/config/nginx-config/sites/ -> /etc/nginx/custom-sites"
# Copy php-fpm configuration from local
cp /srv/config/php5-fpm-config/php5-fpm.conf /etc/php5/fpm/php5-fpm.conf
cp /srv/config/php5-fpm-config/www.conf /etc/php5/fpm/pool.d/www.conf
cp /srv/config/php5-fpm-config/php-custom.ini /etc/php5/fpm/conf.d/php-custom.ini
cp /srv/config/php5-fpm-config/opcache.ini /etc/php5/fpm/conf.d/opcache.ini
cp /srv/config/php5-fpm-config/xdebug.ini /etc/php5/mods-available/xdebug.ini
# Find the path to Xdebug and prepend it to xdebug.ini
XDEBUG_PATH=$( find /usr -name 'xdebug.so' | head -1 )
sed -i "1izend_extension=\"$XDEBUG_PATH\"" /etc/php5/mods-available/xdebug.ini
echo " * /srv/config/php5-fpm-config/php5-fpm.conf -> /etc/php5/fpm/php5-fpm.conf"
echo " * /srv/config/php5-fpm-config/www.conf -> /etc/php5/fpm/pool.d/www.conf"
echo " * /srv/config/php5-fpm-config/php-custom.ini -> /etc/php5/fpm/conf.d/php-custom.ini"
echo " * /srv/config/php5-fpm-config/opcache.ini -> /etc/php5/fpm/conf.d/opcache.ini"
echo " * /srv/config/php5-fpm-config/xdebug.ini -> /etc/php5/mods-available/xdebug.ini"
# Copy memcached configuration from local
cp /srv/config/memcached-config/memcached.conf /etc/memcached.conf
echo " * /srv/config/memcached-config/memcached.conf -> /etc/memcached.conf"
# Copy custom dotfiles and bin file for the vagrant user from local
cp /srv/config/bash_profile /home/vagrant/.bash_profile
cp /srv/config/bash_aliases /home/vagrant/.bash_aliases
cp /srv/config/vimrc /home/vagrant/.vimrc
if [[ ! -d /home/vagrant/.subversion ]]; then
mkdir /home/vagrant/.subversion
fi
cp /srv/config/subversion-servers /home/vagrant/.subversion/servers
if [[ ! -d /home/vagrant/bin ]]; then
mkdir /home/vagrant/bin
fi
rsync -rvzh --delete /srv/config/homebin/ /home/vagrant/bin/
echo " * /srv/config/bash_profile -> /home/vagrant/.bash_profile"
echo " * /srv/config/bash_aliases -> /home/vagrant/.bash_aliases"
echo " * /srv/config/vimrc -> /home/vagrant/.vimrc"
echo " * /srv/config/subversion-servers -> /home/vagrant/.subversion/servers"
echo " * /srv/config/homebin -> /home/vagrant/bin"
# If a bash_prompt file exists in the VVV config/ directory, copy to the VM.
if [[ -f /srv/config/bash_prompt ]]; then
cp /srv/config/bash_prompt /home/vagrant/.bash_prompt
echo " * /srv/config/bash_prompt -> /home/vagrant/.bash_prompt"
fi
# RESTART SERVICES
#
# Make sure the services we expect to be running are running.
echo -e "\nRestart services..."
service nginx restart
service memcached restart
# Disable PHP Xdebug module by default
php5dismod xdebug
service php5-fpm restart
# If MySQL is installed, go through the various imports and service tasks.
exists_mysql="$(service mysql status)"
if [[ "mysql: unrecognized service" != "${exists_mysql}" ]]; then
echo -e "\nSetup MySQL configuration file links..."
# Copy mysql configuration from local
cp /srv/config/mysql-config/my.cnf /etc/mysql/my.cnf
cp /srv/config/mysql-config/root-my.cnf /home/vagrant/.my.cnf
echo " * /srv/config/mysql-config/my.cnf -> /etc/mysql/my.cnf"
echo " * /srv/config/mysql-config/root-my.cnf -> /home/vagrant/.my.cnf"
# MySQL gives us an error if we restart a non running service, which
# happens after a `vagrant halt`. Check to see if it's running before
# deciding whether to start or restart.
if [[ "mysql stop/waiting" == "${exists_mysql}" ]]; then
echo "service mysql start"
service mysql start
else
echo "service mysql restart"
service mysql restart
fi
# IMPORT SQL
#
# Create the databases (unique to system) that will be imported with
# the mysqldump files located in database/backups/
if [[ -f /srv/database/init-custom.sql ]]; then
mysql -u root -proot < /srv/database/init-custom.sql
echo -e "\nInitial custom MySQL scripting..."
else
echo -e "\nNo custom MySQL scripting found in database/init-custom.sql, skipping..."
fi
# Setup MySQL by importing an init file that creates necessary
# users and databases that our vagrant setup relies on.
mysql -u root -proot < /srv/database/init.sql
echo "Initial MySQL prep..."
# Process each mysqldump SQL file in database/backups to import
# an initial data set for MySQL.
/srv/database/import-sql.sh
else
echo -e "\nMySQL is not installed. No databases imported."
fi
# Run wp-cli as vagrant user
if (( $EUID == 0 )); then
wp() { sudo -EH -u vagrant -- wp "$@"; }
fi
if [[ $ping_result == "Connected" ]]; then
# WP-CLI Install
if [[ ! -d /srv/www/wp-cli ]]; then
echo -e "\nDownloading wp-cli, see http://wp-cli.org"
git clone git://github.com/wp-cli/wp-cli.git /srv/www/wp-cli
cd /srv/www/wp-cli
composer install
else
echo -e "\nUpdating wp-cli..."
cd /srv/www/wp-cli
git pull --rebase origin master
composer update
fi
# Link `wp` to the `/usr/local/bin` directory
ln -sf /srv/www/wp-cli/bin/wp /usr/local/bin/wp
# Download and extract phpMemcachedAdmin to provide a dashboard view and
# admin interface to the goings on of memcached when running
if [[ ! -d /srv/www/default/memcached-admin ]]; then
echo -e "\nDownloading phpMemcachedAdmin, see https://code.google.com/p/phpmemcacheadmin/"
cd /srv/www/default
wget -q -O phpmemcachedadmin.tar.gz 'https://phpmemcacheadmin.googlecode.com/files/phpMemcachedAdmin-1.2.2-r262.tar.gz'
mkdir memcached-admin
tar -xf phpmemcachedadmin.tar.gz --directory memcached-admin
rm phpmemcachedadmin.tar.gz
else
echo "phpMemcachedAdmin already installed."
fi
# Checkout Opcache Status to provide a dashboard for viewing statistics
# about PHP's built in opcache.
if [[ ! -d /srv/www/default/opcache-status ]]; then
echo -e "\nDownloading Opcache Status, see https://github.com/rlerdorf/opcache-status/"
cd /srv/www/default
git clone https://github.com/rlerdorf/opcache-status.git opcache-status
else
echo -e "\nUpdating Opcache Status"
cd /srv/www/default/opcache-status
git pull --rebase origin master
fi
# Webgrind install (for viewing callgrind/cachegrind files produced by
# xdebug profiler)
if [[ ! -d /srv/www/default/webgrind ]]; then
echo -e "\nDownloading webgrind, see https://github.com/jokkedk/webgrind"
git clone git://github.com/jokkedk/webgrind.git /srv/www/default/webgrind
else
echo -e "\nUpdating webgrind..."
cd /srv/www/default/webgrind
git pull --rebase origin master
fi
# PHP_CodeSniffer (for running WordPress-Coding-Standards)
if [[ ! -d /srv/www/phpcs ]]; then
echo -e "\nDownloading PHP_CodeSniffer (phpcs), see https://github.com/squizlabs/PHP_CodeSniffer"
git clone git://github.com/squizlabs/PHP_CodeSniffer.git /srv/www/phpcs
else
cd /srv/www/phpcs
if [[ $(git rev-parse --abbrev-ref HEAD) == 'master' ]]; then
echo -e "\nUpdating PHP_CodeSniffer (phpcs)..."
git pull --no-edit origin master
else
echo -e "\nSkipped updating PHP_CodeSniffer since not on master branch"
fi
fi
# Sniffs WordPress Coding Standards
if [[ ! -d /srv/www/phpcs/CodeSniffer/Standards/WordPress ]]; then
echo -e "\nDownloading WordPress-Coding-Standards, sniffs for PHP_CodeSniffer, see https://github.com/WordPress-Coding-Standards/WordPress-Coding-Standards"
git clone git://github.com/WordPress-Coding-Standards/WordPress-Coding-Standards.git /srv/www/phpcs/CodeSniffer/Standards/WordPress
else
cd /srv/www/phpcs/CodeSniffer/Standards/WordPress
if [[ $(git rev-parse --abbrev-ref HEAD) == 'master' ]]; then
echo -e "\nUpdating PHP_CodeSniffer WordPress Coding Standards..."
git pull --no-edit origin master
else
echo -e "\nSkipped updating PHPCS WordPress Coding Standards since not on master branch"
fi
fi
# Install the standards in PHPCS
/srv/www/phpcs/scripts/phpcs --config-set installed_paths ./CodeSniffer/Standards/WordPress/
/srv/www/phpcs/scripts/phpcs -i
# Install and configure the latest stable version of WordPress
if [[ ! -d /srv/www/wordpress-default ]]; then
echo "Downloading WordPress Stable, see http://wordpress.org/"
cd /srv/www/
curl -L -O https://wordpress.org/latest.tar.gz
tar -xvf latest.tar.gz
mv wordpress wordpress-default
rm latest.tar.gz
cd /srv/www/wordpress-default
echo "Configuring WordPress Stable..."
wp core config --dbname=wordpress_default --dbuser=wp --dbpass=wp --quiet --extra-php <<PHP
define( 'WP_DEBUG', true );
PHP
wp core install --url=local.wordpress.dev --quiet --title="Local WordPress Dev" --admin_name=admin --admin_email="admin@local.dev" --admin_password="password"
else
echo "Updating WordPress Stable..."
cd /srv/www/wordpress-default
wp core upgrade
fi
# Test to see if an svn upgrade is needed
svn_test=$( svn status -u /srv/www/wordpress-develop/ 2>&1 );
if [[ $svn_test == *"svn upgrade"* ]]; then
# If the wordpress-develop svn repo needed an upgrade, they probably all need it
for repo in $(find /srv/www -maxdepth 5 -type d -name '.svn'); do
svn upgrade "${repo/%\.svn/}"
done
fi;
# Checkout, install and configure WordPress trunk via core.svn
if [[ ! -d /srv/www/wordpress-trunk ]]; then
echo "Checking out WordPress trunk from core.svn, see http://core.svn.wordpress.org/trunk"
svn checkout http://core.svn.wordpress.org/trunk/ /srv/www/wordpress-trunk
cd /srv/www/wordpress-trunk
echo "Configuring WordPress trunk..."
wp core config --dbname=wordpress_trunk --dbuser=wp --dbpass=wp --quiet --extra-php <<PHP
define( 'WP_DEBUG', true );
PHP
wp core install --url=local.wordpress-trunk.dev --quiet --title="Local WordPress Trunk Dev" --admin_name=admin --admin_email="admin@local.dev" --admin_password="password"
else
echo "Updating WordPress trunk..."
cd /srv/www/wordpress-trunk
svn up --ignore-externals
fi
# Checkout, install and configure WordPress trunk via develop.svn
if [[ ! -d /srv/www/wordpress-develop ]]; then
echo "Checking out WordPress trunk from develop.svn, see http://develop.svn.wordpress.org/trunk"
svn checkout http://develop.svn.wordpress.org/trunk/ /srv/www/wordpress-develop
cd /srv/www/wordpress-develop/src/
echo "Configuring WordPress develop..."
wp core config --dbname=wordpress_develop --dbuser=wp --dbpass=wp --quiet --extra-php <<PHP
// Allow (src|build).wordpress-develop.dev to share the same database
if ( 'build' == basename( dirname( __FILE__) ) ) {
define( 'WP_HOME', 'http://build.wordpress-develop.dev' );
define( 'WP_SITEURL', 'http://build.wordpress-develop.dev' );
}
define( 'WP_DEBUG', true );
PHP
wp core install --url=src.wordpress-develop.dev --quiet --title="WordPress Develop" --admin_name=admin --admin_email="admin@local.dev" --admin_password="password"
cp /srv/config/wordpress-config/wp-tests-config.php /srv/www/wordpress-develop/
cd /srv/www/wordpress-develop/
npm install &>/dev/null
else
echo "Updating WordPress develop..."
cd /srv/www/wordpress-develop/
if [[ -e .svn ]]; then
svn up
else
if [[ $(git rev-parse --abbrev-ref HEAD) == 'master' ]]; then
git pull --no-edit git://develop.git.wordpress.org/ master
else
echo "Skip auto git pull on develop.git.wordpress.org since not on master branch"
fi
fi
npm install &>/dev/null
fi
if [[ ! -d /srv/www/wordpress-develop/build ]]; then
echo "Initializing grunt in WordPress develop... This may take a few moments."
cd /srv/www/wordpress-develop/
grunt
fi
# Download phpMyAdmin
if [[ ! -d /srv/www/default/database-admin ]]; then
echo "Downloading phpMyAdmin 4.2.11..."
cd /srv/www/default
wget -q -O phpmyadmin.tar.gz 'http://sourceforge.net/projects/phpmyadmin/files/phpMyAdmin/4.2.11/phpMyAdmin-4.2.11-all-languages.tar.gz/download'
tar -xf phpmyadmin.tar.gz
mv phpMyAdmin-4.2.11-all-languages database-admin
rm phpmyadmin.tar.gz
else
echo "PHPMyAdmin already installed."
fi
cp /srv/config/phpmyadmin-config/config.inc.php /srv/www/default/database-admin/
else
echo -e "\nNo network available, skipping network installations"
fi
# Find new sites to setup.
# Kill previously symlinked Nginx configs
# We can't know what sites have been removed, so we have to remove all
# the configs and add them back in again.
find /etc/nginx/custom-sites -name 'vvv-auto-*.conf' -exec rm {} \;
# Look for site setup scripts
for SITE_CONFIG_FILE in $(find /srv/www -maxdepth 5 -name 'vvv-init.sh'); do
DIR="$(dirname $SITE_CONFIG_FILE)"
(
cd $DIR
source vvv-init.sh
)
done
# Look for Nginx vhost files, symlink them into the custom sites dir
for SITE_CONFIG_FILE in $(find /srv/www -maxdepth 5 -name 'vvv-nginx.conf'); do
DEST_CONFIG_FILE=${SITE_CONFIG_FILE//\/srv\/www\//}
DEST_CONFIG_FILE=${DEST_CONFIG_FILE//\//\-}
DEST_CONFIG_FILE=${DEST_CONFIG_FILE/%-vvv-nginx.conf/}
DEST_CONFIG_FILE="vvv-auto-$DEST_CONFIG_FILE-$(md5sum <<< $SITE_CONFIG_FILE | cut -c1-32).conf"
# We allow the replacement of the {vvv_path_to_folder} token with
# whatever you want, allowing flexible placement of the site folder
# while still having an Nginx config which works.
DIR="$(dirname $SITE_CONFIG_FILE)"
sed "s#{vvv_path_to_folder}#$DIR#" $SITE_CONFIG_FILE > /etc/nginx/custom-sites/$DEST_CONFIG_FILE
done
# RESTART SERVICES AGAIN
#
# Make sure the services we expect to be running are running.
echo -e "\nRestart Nginx..."
service nginx restart
# Parse any vvv-hosts file located in www/ or subdirectories of www/
# for domains to be added to the virtual machine's host file so that it is
# self aware.
#
# Domains should be entered on new lines.
echo "Cleaning the virtual machine's /etc/hosts file..."
sed -n '/# vvv-auto$/!p' /etc/hosts > /tmp/hosts
mv /tmp/hosts /etc/hosts
echo "Adding domains to the virtual machine's /etc/hosts file..."
find /srv/www/ -maxdepth 5 -name 'vvv-hosts' | \
while read hostfile; do
while IFS='' read -r line || [ -n "$line" ]; do
if [[ "#" != ${line:0:1} ]]; then
if [[ -z "$(grep -q "^127.0.0.1 $line$" /etc/hosts)" ]]; then
echo "127.0.0.1 $line # vvv-auto" >> /etc/hosts
echo " * Added $line from $hostfile"
fi
fi
done < $hostfile
done
end_seconds="$(date +%s)"
echo "-----------------------------"
echo "Provisioning complete in "$(expr $end_seconds - $start_seconds)" seconds"
if [[ $ping_result == "Connected" ]]; then
echo "External network connection established, packages up to date."
else
echo "No external network available. Package installation and maintenance skipped."
fi
echo "For further setup instructions, visit http://vvv.dev"
|
#!/bin/bash
make
if [[ $1 == "-p" ]]
then
./preprocessa.sh $2
EXT=$(echo $2 | rev | cut -f 1 -d '.')
NAME=$(basename $2 .${EXT})
./pl2 < input/${NAME}_corrigido.txt > output/${NAME}_corrigido.txt
else
NAME=$(basename $1)
./pl2 < $1 > output/$NAME
fi
make clean
|
#!/bin/bash
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
set -e
set -x
source tensorflow/tools/ci_build/release/common.sh
install_bazelisk
# Pick a more recent version of xcode
export DEVELOPER_DIR=/Applications/Xcode_11.3.app/Contents/Developer
export MACOSX_DEPLOYMENT_TARGET=10.14
sudo xcode-select -s "${DEVELOPER_DIR}"
# Install macos pip dependencies
install_macos_pip_deps sudo pip3.8
# Export required variables for running pip_new.sh
export OS_TYPE="MACOS"
export CONTAINER_TYPE="CPU"
export TF_PYTHON_VERSION='python3.8'
export TF_BUILD_BOTH_CPU_PACKAGES=1
# Run configure.
export PYTHON_BIN_PATH=$(which ${TF_PYTHON_VERSION})
yes "" | "$PYTHON_BIN_PATH" configure.py
# Export optional variables for running pip.sh
export TF_BUILD_FLAGS="--config=release_cpu_macos"
export TF_TEST_FLAGS="--define=no_tensorflow_py_deps=true --test_lang_filters=py --test_output=errors --verbose_failures=true --keep_going --test_env=TF2_BEHAVIOR=1"
export TF_TEST_TARGETS="//tensorflow/python/..."
export TF_PIP_TESTS="test_pip_virtualenv_non_clean test_pip_virtualenv_clean"
export TF_TEST_FILTER_TAGS='-nomac,-no_mac,-no_oss,-oss_serial,-no_oss_py38,-v1only,-gpu,-tpu,-benchmark-test'
#export IS_NIGHTLY=0 # Not nightly; uncomment if building from tf repo.
export TF_PROJECT_NAME="tensorflow"
export TF_PIP_TEST_ROOT="pip_test"
./tensorflow/tools/ci_build/builds/pip_new.sh
|
<reponame>ebjerkelund/azure-iot-sdk-node
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
// UPLOAD TO BLOB ADVANCED SAMPLE
// This is a new api for upload to blob that allows for greater control over the blob upload calls.
// Instead of a single API call that wraps the Storage SDK, the user in this sample retrieves the linked
// Storage Account SAS Token from IoT Hub using a new API call, uses the Azure Storage Blob package to upload the local file to blob storage.
// Additionally - it exposes two new APIs:
//
// getBlobSharedAccessSignature
// > Using a HTTP POST, retrieve a SAS Token for the Storage Account linked to your IoT Hub.
//
// notifyBlobUploadStatus
// > Using HTTP POST, notify IoT Hub of the status of a finished file upload (success/failure).
//
// More information on Uploading Files with IoT Hub can be found here:
// https://docs.microsoft.com/en-us/azure/iot-hub/iot-hub-devguide-file-upload
import { Client } from 'azure-iot-device';
import { Mqtt as Protocol } from 'azure-iot-device-mqtt';
import { errors } from 'azure-iot-common';
import { AnonymousCredential, BlobUploadCommonResponse, BlockBlobClient, newPipeline, Pipeline } from '@azure/storage-blob';
// make sure you set these environment variables prior to running the sample.
const deviceConnectionString: string = process.env.DEVICE_CONNECTION_STRING || '';
const filePath: string = process.env.PATH_TO_FILE || '';
const storageBlobName: string = 'testblob.txt';
// check for connection string
if (deviceConnectionString === '') {
console.log('device connection string not set');
process.exit(-1);
}
// check for file path
if (filePath === '') {
console.log('file path is not set');
process.exit(-1);
}
async function uploadToBlob(localFilePath: string, client: Client): Promise<void> {
const blobInfo = await client.getBlobSharedAccessSignature(storageBlobName);
if (!blobInfo) {
throw new errors.ArgumentError('Invalid upload parameters');
}
const pipeline: Pipeline = newPipeline(new AnonymousCredential(), {
retryOptions: { maxTries: 4 },
// telemetry: { value: 'HighLevelSample V1.0.0' }, // Customized telemetry string
keepAliveOptions: { enable: false }
});
// Construct the blob URL to construct the blob client for file uploads
const { hostName, containerName, blobName, sasToken } = blobInfo;
const blobUrl: string = `https://${hostName}/${containerName}/${blobName}${sasToken}`;
// Create the BlockBlobClient for file upload to the Blob Storage Blob
const blobClient = new BlockBlobClient(blobUrl, pipeline);
// Setup blank status notification arguments to be filled in on success/failure
let isSuccess: boolean;
let statusCode: number;
let statusDescription: string;
try {
const uploadStatus: BlobUploadCommonResponse = await blobClient.uploadFile(localFilePath);
console.log('uploadStreamToBlockBlob success');
// Save successful status notification arguments
isSuccess = true;
statusCode = uploadStatus._response.status;
statusDescription = 'upload success';
// Notify IoT Hub of upload to blob status (success)
console.log('notifyBlobUploadStatus success');
} catch (err) {
isSuccess = false;
statusCode = err.code;
statusDescription = err.message;
console.log('notifyBlobUploadStatus failed');
console.log(err);
}
await client.notifyBlobUploadStatus(blobInfo.correlationId, isSuccess, statusCode, statusDescription);
}
// Create a client device from the connection string and upload the local file to blob storage.
const deviceClient = Client.fromConnectionString(deviceConnectionString, Protocol);
uploadToBlob(filePath, deviceClient)
.catch((err) => {
console.log(err);
})
.finally(() => {
process.exit();
});
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package utils_test
import (
"strconv"
"testing"
"github.com/apache/arrow/go/v7/arrow/bitutil"
"github.com/apache/arrow/go/v7/parquet/internal/testutils"
"github.com/apache/arrow/go/v7/parquet/internal/utils"
)
func randomBitsBuffer(nbits, setPct int64) []byte {
rag := testutils.NewRandomArrayGenerator(23)
prob := float64(0)
if setPct != -1 {
prob = float64(setPct) / 100.0
}
buf := make([]byte, int(bitutil.BytesForBits(nbits)))
rag.GenerateBitmap(buf, nbits, prob)
if setPct == -1 {
wr := bitutil.NewBitmapWriter(buf, 0, int(nbits))
for i := int64(0); i < nbits; i++ {
if i%2 == 0 {
wr.Set()
} else {
wr.Clear()
}
wr.Next()
}
}
return buf
}
func testBitRunReader(rdr utils.BitRunReader) (setTotal int64) {
for {
br := rdr.NextRun()
if br.Len == 0 {
break
}
if br.Set {
setTotal += br.Len
}
}
return
}
func BenchmarkBitRunReader(b *testing.B) {
const numBits = 4096
for _, pct := range []int64{1, 0, 10, 25, 50, 60, 75, 99} {
buf := randomBitsBuffer(numBits, pct)
b.Run("set pct "+strconv.Itoa(int(pct)), func(b *testing.B) {
b.Run("linear", func(b *testing.B) {
b.SetBytes(numBits / 8)
for i := 0; i < b.N; i++ {
rdr := linearBitRunReader{bitutil.NewBitmapReader(buf, 0, numBits)}
testBitRunReader(rdr)
}
})
b.Run("internal", func(b *testing.B) {
b.SetBytes(numBits / 8)
for i := 0; i < b.N; i++ {
rdr := utils.NewBitRunReader(buf, 0, numBits)
testBitRunReader(rdr)
}
})
})
}
}
func testSetBitRunReader(rdr utils.SetBitRunReader) (setTotal int64) {
for {
br := rdr.NextRun()
if br.Length == 0 {
break
}
setTotal += br.Length
}
return
}
func BenchmarkSetBitRunReader(b *testing.B) {
const numBits = 4096
for _, pct := range []int64{1, 0, 10, 25, 50, 60, 75, 99} {
buf := randomBitsBuffer(numBits, pct)
b.Run("set pct "+strconv.Itoa(int(pct)), func(b *testing.B) {
b.Run("reader", func(b *testing.B) {
b.SetBytes(numBits / 8)
for i := 0; i < b.N; i++ {
rdr := utils.NewSetBitRunReader(buf, 0, numBits)
testSetBitRunReader(rdr)
}
})
b.Run("reverse rdr", func(b *testing.B) {
b.SetBytes(numBits / 8)
for i := 0; i < b.N; i++ {
rdr := utils.NewReverseSetBitRunReader(buf, 0, numBits)
testSetBitRunReader(rdr)
}
})
})
}
}
|
<filename>childpages/basepage.py
#!/usr/bin/python
# -*- coding: utf-8 -*-
from qframer.qt import QtGui
from qframer.qt import QtCore
class BasePage(QtGui.QWidget):
def __init__(self, parent=None):
super(BasePage, self).__init__(parent)
self.parent = parent
|
<reponame>izikaj/meta_manager
module MetaManager
module Helper
include ::ActionView::Helpers::TagHelper
def self.included(base)
base.send :helper_method, :render_meta_tags, :render_page_title
end
def render_meta_tags(record)
return if record.nil?
dynamic = self.instance_variable_get("@meta_dynamic")
tags = []
get_actual_meta_tags(record, dynamic).each do |meta_tag|
unless meta_tag.name == 'title'
type = meta_tag.name =~ /og:/ ? 'property' : 'name'
tags << tag(:meta, type => meta_tag.name, :content => render_meta_tag_content(meta_tag))
end
end
tags.join("\n\s\s")
end
def render_page_title(record=nil, options = {})
dynamic = self.instance_variable_get("@meta_dynamic")
meta_tags = get_actual_meta_tags(record, dynamic)
meta_tags.detect{|t| t.name == 'title'}.try(:get_content, self) || get_page_title(record, options)
end
protected
# Call render_meta_tag_content_description if method exists
# Controller:
# protected
#
# def render_meta_tag_content_description(meta_tag)
# if !params[:page].blank? && params[:page] != '1'
# meta_tag.content += " - page #{params[:page].to_i}"
# end
# end
#
def render_meta_tag_content(meta_tag)
method_name = "render_meta_tag_content_#{meta_tag.name}".to_sym
send(method_name, meta_tag) if respond_to?(method_name, true)
meta_tag.get_content(self)
end
private
def get_page_title(record, options)
options = { :spliter => ' - ', :append_title => true }.merge(options)
view_title = record.respond_to?(:title) ? record.title : (record.respond_to?(:name) ? record.name : nil) unless record.nil?
page_title = []
page_title << options[:title] if options.key?(:title)
page_title << view_title unless record.nil?
page_title << I18n.t("page.title") if options[:append_title]
page_title.flatten.compact.uniq.join(options[:spliter])
end
def get_actual_meta_tags(record, dynamic)
meta_tags = []
_meta_tags = record && record.respond_to?(:meta_tags) ? record.meta_tags : []
_meta_tags.group_by(&:name).each do |name, items|
meta_tags << (items.detect{|r| r.is_dynamic && dynamic} || items.detect{|r| !r.is_dynamic})
end
meta_tags.compact
end
end
end
|
def calculate_total_profit(chosen_offers):
profit = 0
left_to_sell = total_amount_to_sell # Initialize total amount left to sell
for offer in chosen_offers:
cityname, username, amount, price, dist, destination_city_id = offer
cityname = cityname.strip()
amount = int(amount.replace(',', '').replace('.', ''))
price = int(price)
sell = min(amount, left_to_sell)
left_to_sell -= sell
profit += sell * price
prompt_message = _('Sell {} of {} for a total of {}? [Y/n]').format(addDot(left_to_sell), materials_names[resource_type], addDot(profit))
rta = read(values=['y', 'Y', 'n', 'N', ''])
if rta.lower() == 'n':
event.set()
return profit |
#!/bin/bash
#export GIT_SSH_COMMAND="/usr/bin/ssh -i /vagrant/knarr.pem"
# wget command to download programs/data
export WGET="$(which wget) -nc"
# Root dir
export ROOT=/home/icse22ae/confetti-artifact
# Location of fuzz_output
export FUZZ_OUTPUT=/home/icse22ae/confetti-artifact/icse_22_fuzz_output
if [ -z "$ROOT" ]; then
echo "Please set project ROOT on file scripts/env.sh";
exit 1
fi
# Where everything is downloaded
export DOWNLOAD_DIR=$ROOT/downloads
# Where everything is installed
export INSTALL_DIR=$ROOT/software
#export JAVA_HOME=$INSTALL_DIR/jvm
if [ -z "$JAVA_HOME" ]; then
echo "Please export JAVA_HOME (or set it on file scripts/env.sh)";
exit 1
fi
export PATH=$JAVA_HOME/bin:$PATH
# Results directory
export RESULTS_DIR=$ROOT/results
# Plots directory
export PLOTS_DIR=$ROOT/plots
# Reports directory
export REPORTS_DIR=$ROOT/reports
# Data directory
export DATA_DIR=$ROOT/data
# Patch directory
export PATCH_DIR=$ROOT/patches
# Cloned repos dir
export REPOS_DIR=$ROOT/repos
export GREEN_REPO_URL=https://github.com/gmu-swe/green-solver.git
export GREEN_BRANCH=master
export GREEN_DIR=$INSTALL_DIR/green
export KNARR_REPO_URL="https://github.com/gmu-swe/knarr.git" ;
export KNARR_BRANCH=icse-22-confetti-evaluation
export KNARR_DIR=$INSTALL_DIR/knarr
export Z3_DIR=$INSTALL_DIR/z3
export CLOSURE_REPO_URL=git@github.com:google/closure-compiler.git
export CLOSURE_VERSION=v20190415
export CLOSURE_BRANCH=closure-compiler-parent-$CLOSURE_VERSION
export CLOSURE_DIR=$INSTALL_DIR/closure
export RHINO_DIR=$INSTALL_DIR/rhino
export MAVEN_DIR=$INSTALL_DIR/maven
export ANT_DIR=$INSTALL_DIR/ant
export BCEL_DIR=$INSTALL_DIR/bcel
export BATIK_DIR=$INSTALL_DIR/batik
export JQF_REPO_URL=https://github.com/neu-se/confetti
export JQF_BRANCH=icse-22-evaluation
export JQF_DIR=$INSTALL_DIR/jqf
export JQF_VANILLA_REPO_URL=https://github.com/neu-se/jqf-non-colliding-coverage.git
export JQF_VANILLA_BRANCH=jqf-1.1-with-non-colliding-coverage
export JQF_VANILLA_DIR=$INSTALL_DIR/jqf-vanilla
|
#!/bin/sh
FrameworkPathOverride=$(dirname "$(which mono)")/../lib/mono/4.5/
export FrameworkPathOverride
pwsh ./build.ps1
|
<filename>src/models/posts/postQuery.spec.ts<gh_stars>10-100
import test from 'ava';
import WPGraphQL from '../../index';
const transport = new WPGraphQL('http://localhost:8080/wp-json', { auth: { username: 'root', password: '<PASSWORD>' } });
test('/posts with no arguments', async t => {
const expected = {
posts: [
{
id: 1,
guid: {
rendered: 'http://localhost:8080/?p=1',
},
},
],
};
const actual = await transport.send(`
{
posts {
id
guid {
rendered
}
}
}
`);
t.deepEqual(actual, expected);
});
test('/posts with several arguments', async t => {
const expected = {
posts: [
{
id: 1,
slug: 'hello-world',
title: {
rendered: 'Hello world!',
},
},
],
};
const actual = await transport.send(`
{
posts(orderby: id, order: asc, per_page: 1, status: [pending, publish]) {
id
slug
title {
rendered
}
}
}
`);
t.deepEqual(actual, expected);
});
test('/post/<id> with no arguments', async t => {
const expected = {
post: {
id: 1,
slug: 'hello-world',
},
};
const actual = await transport.send(`
{
post(id: 1) {
id
slug
}
}
`);
t.deepEqual(actual, expected);
});
test('/post/<id> with 1 argument', async t => {
const expected = {
post: {
id: 1,
slug: 'hello-world',
},
};
const actual = await transport.send(`
{
post(id: 1, context: view) {
id
slug
}
}
`);
t.deepEqual(actual, expected);
});
|
aalength="30" #minimum aminoacid sequence length
search_type="single" #single or set
gene="r_opsin" #gene(set) name
evalue="0.00000000000000000001" #E-value threshold for BLAST search
blasthits="100" #Number of BLAST hits to retain for the analysis
for file in *.fasta ; do
mkdir pia
cd pia
mkdir results_${file}
cd results_${file}
cp ../../${file} ./
python ~/apps/pia/get_orfs_or_cdss.py $file fasta 1 ORF open top $aalength both ORF_nuc.fasta ORF_prot.fasta > stdout 2>&1
perl ~/apps/pia/pia.pl ORF_prot.fasta $search_type $gene mafft $evalue $blasthits
perl ~/apps/pia/phylographics/makeRtrees.pl treeout.tab trees.pdf phylogram no None Rfile yes no >tree.R
R --vanilla < tree.R 2>log.txt
~/apps/pia/post_pia.sh
cd ../../
done
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = void 0;
var colors = {
blue: '#007EFF',
darkBlue: '#1C5DE7',
mediumBlue: '#005EEA',
lightBlue: '#0097F6',
blueTxt: '#333740',
blueGradient: 'linear-gradient(to right top, #306ded, #2f78f1, #3283f3, #3a8ef6, #4598f7)',
orangeGradient: 'linear-gradient(to bottom right, #F65A1D, #F68E0E)',
brightGrey: '#9ea7b8',
lightGrey: '#E3E9F3',
lightGreyAlpha: 'rgba(0,0,0,0.1)',
orange: '#F64D0A',
darkOrange: '#F64D0A',
orangeBorder: 'rgba(255, 0, 0, 0.2)',
orangeBkgd: 'rgba(255, 0, 0, 0.15)',
black: '#000000',
white: '#ffffff',
green: '#6DBB1A',
greyPlaceholder: '#919BAE',
greyIconBkgd: '#FAFAFB',
greyIconColor: '#B3B5B9',
blueBorder: '#78caff',
greyToggle: '#CED3DB',
greyHeader: '#F3F3F4',
greySubtitle: '#787E8F',
greyHover: '#F7F8F8',
greySeparator: '#f1f1f2',
blueIconTxt: '#367CF7',
blueIconBkgd: '#E8F0FA',
greyTabBkgd: '#F2F2F2',
blueTabBorder: '#1C5DE7',
greyLink: '#F5F5F5',
button: {
primary: {
borderColor: '#007EFF',
color: '#ffffff',
backgroundColor: '#007EFF'
},
secondary: {
borderColor: '#007EFF',
color: '#007EFF',
backgroundColor: '#ffffff'
},
cancel: {
borderColor: '#9ea7b8',
color: '#9ea7b8',
backgroundColor: 'transparent'
},
"delete": {
borderColor: '#F64D0A',
color: '#ffffff',
backgroundColor: '#F64D0A'
},
success: {
borderColor: '#6DBB1A',
color: '#ffffff',
backgroundColor: '#6DBB1A'
},
disabled: {
borderColor: '#E9EAEB',
color: '#B4B6BA',
backgroundColor: '#E9EAEB'
}
}
};
var _default = colors;
exports["default"] = _default; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.