text stringlengths 1 1.05M |
|---|
<reponame>AndriodDream/AndroidLogin<filename>MyDemo/xiaoxin/src/main/java/com/xiaoxin/dao/SginIn.java<gh_stars>1-10
package com.xiaoxin.dao;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.database.sqlite.SQLiteDatabase;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.text.TextUtils;
import android.view.View;
import android.widget.EditText;
import android.widget.Toast;
import com.xiaoxin.db.MyDBOpenHelper;
import com.xiaoxin.feng.R;
import com.xiaoxin.util.UserInfoUtils;
/**
* Created by Administrator on 2016/3/26.
*/
public class SginIn extends AppCompatActivity {
EditText addName;
EditText addPwd;
private ContactInfoDao contactInfoDao;
public SginIn() {}
//数据库打开的帮助类
private MyDBOpenHelper helper;
//在构造方法里面完成类的初始化
public SginIn(Context context) {
helper = new MyDBOpenHelper(context);
}
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.add_name_activity);
contactInfoDao = new ContactInfoDao(this);
//声明数据库的文件类
MyDBOpenHelper helper = new MyDBOpenHelper(this);
//关键代码,必须通过帮助类。获取一个数据库文件
helper.getWritableDatabase();
addName = (EditText) findViewById(R.id.et_add_user_name);
addPwd = (EditText)findViewById(R.id.et_add_user_pwd);
}
public void addUserAndPwd(View view) {
String phone = addName.getText().toString().trim();
String pwd = addPwd.getText().toString().trim();
//判断name和pwd是否为空
if(TextUtils.isEmpty(phone) || TextUtils.isEmpty(pwd)) {
Toast.makeText(SginIn.this, "用户名或密码为空", Toast.LENGTH_SHORT).show();
}else {
boolean result = contactInfoDao.add(phone,pwd);
if(result) {
Toast.makeText(SginIn.this, "注册成功", Toast.LENGTH_SHORT).show();
//1.声明一个意图对象
Intent intent = new Intent();
//2.指定清单文件中声明的动作
intent.setAction("com.xiaoxin.ui.openWelCome");
//3.指定清单文件中声明的类别
intent.addCategory("android.intent.category.DEFAULT");
//开启界面
startActivity(intent);
}else {
Toast.makeText(SginIn.this, "注册失败", Toast.LENGTH_SHORT).show();
}
}
}
}
|
#!/bin/bash
# This script ensures that the Prow configuration checked into git has
# deterministic ordering so that bots who modify them submit reasonably
# readable diffs
set -o errexit
set -o nounset
set -o pipefail
workdir="$( mktemp -d )"
# trap 'rm -rf "${workdir}"' EXIT
jobs_dir="$( dirname "${BASH_SOURCE[0]}" )/../ci-operator/jobs"
cp -r "${jobs_dir}" "${workdir}"
"$( dirname "${BASH_SOURCE[0]}" )/order-prow-job-config.sh"
if ! diff -Naupr -I '^[[:space:]]*#.*' "${workdir}/jobs" "${jobs_dir}"> "${workdir}/diff"; then
cat << EOF
ERROR: This check enforces Prow Job configuration YAML file format (ordering,
ERROR: linebreaks, indentation) to be consistent over the whole repository. We have
ERROR: automation in place that manipulates these configs and consistent formatting
[ERORR] helps reviewing the changes the automation does.
ERROR: Run the following command to re-format the Prow jobs:
ERROR: $ docker run -it -v \$(pwd)/ci-operator/jobs:/jobs:z registry.svc.ci.openshift.org/ci/determinize-prow-jobs:latest --prow-jobs-dir /jobs
ERROR: The following errors were found:
EOF
cat "${workdir}/diff"
exit 1
fi
|
from sklearn.metrics import accuracy_score
def evaluate_model_accuracy(actual_labels, predicted_labels):
"""
Evaluate the accuracy of a machine learning model.
Args:
actual_labels (list): A list of actual class labels.
predicted_labels (list): A list of predicted class labels.
Returns:
float: Accuracy score of the model.
"""
accuracy = accuracy_score(actual_labels, predicted_labels)
return accuracy |
#!/usr/bin/env sh
export APP_NAME=my-app
cd ${APP_NAME}
# @todo shell function with parameters
npx @api-platform/client-generator http://laravel.localhost/docs src/ --format swagger --resource adres
# npx @api-platform/client-generator http://laravel.localhost/docs src/ --format swagger --resource politiebureaus
# npx @api-platform/client-generator http://laravel.localhost/docs src/ --format swagger --resource links
cd .. && sbin/edited.sh |
#!/bin/bash
# Uncomment set command below for code debuging bash
# set -x
RED='\033[0;31m'
GREEN='\033[0;32m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
if [ -z "$1" ];then
url="site16.example.com"
else
url=$1
fi
echo -e "\nHTTP app only.\nUsage: $0 site16.example.com"
echo -e "\nTarget:${GREEN} $url ${NC}\n"
for i in {1..10};
do
curl -s -o /dev/null -w '404demo.php\tstatus: %{http_code}\tbytes: %{size_download}\ttime: %{time_total}\n' http://$url/404demo.php
sleep 10;
done
echo -e "\n${BLUE}Simulator 404 completed.${NC}\n" |
/*
* drivers/char/sunxi-scr/smartcard.h
*
* Copyright (C) 2016 Allwinner.
* fuzhaoke <<EMAIL>>
*
* Header file ISO7816 smart card
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation; either version 2 of
* the License, or (at your option) any later version.
*/
#ifndef __SMARTCARD_H__
#define __SMARTCARD_H__
#include "sunxi-scr-user.h"
void smartcard_atr_decode(struct smc_atr_para *pscatr, struct smc_pps_para *psmc_pps,
uint8_t *pdata, uint8_t with_ts);
#endif
|
<gh_stars>0
package Inheritance;
public class P9 {
int a=10;
}
class P13{
int e=50;
}
class P10 extends P9{
int b=20;
public static void main(String[] args) {
System.out.println(new P9().a + new P10().b + " From first child");
}
}
class P11 extends P9{
int c=30;
public static void main(String[] args) {
System.out.println(new P9().a + new P11().c + " From second child");
}
}
class P12 extends P10{
int d=40;
public static void main(String[] args) {
System.out.println(new P12().d + new P10().b + " P10's child");
}
} |
require_relative 'base'
module PushFormats
class Bibtex < Base
def initialize
@filename = 'export.bib'
@to_format = 'bibtex'
@mimetype = 'application/x-bibtex'
@name = 'BibTeX'
super()
end
end
end
|
#!/usr/bin/env bash
# Do NOT set -v or -x or your GitHub API token will be leaked!
set -ue # exit with nonzero exit code if anything fails
echo "Parse memote.ini for values."
deployment=$(awk -F '=' '{if (! ($0 ~ /^;/) && $0 ~ /deployment/) print $2}' memote.ini | tr -d ' ')
location=$(awk -F '=' '{if (! ($0 ~ /^;/) && $0 ~ /location/) print $2}' memote.ini | tr -d ' ')
echo "Configure Travis git user."
git config --global user.email "deploy@travis-ci.org"
git config --global user.name "Travis CI Deployment Bot"
if [[ "${TRAVIS_PULL_REQUEST}" != "false" || "${TRAVIS_REPO_SLUG}" != "jonovik/memote-salarecon-7" ]]; then
echo "Untracked build."
memote run --ignore-git
echo "Skip deploy."
exit 0
else
# Always need the deployment branch available locally for storing results.
git checkout "${deployment}"
git checkout "${TRAVIS_BRANCH}"
echo "Tracked build."
memote run
echo "Start deploy to ${deployment}..."
fi
# Generate the history report on the deployment branch.
output="index.html"
git checkout "${deployment}"
echo "Generating updated history report '${output}'."
memote report history --filename="${output}"
# Add, commit and push the files.
git add "${output}"
git commit -m "Travis report #${TRAVIS_BUILD_NUMBER}"
echo ********** TESTING TESTING TESTING TESTING TESTING TESTING TESTING TESTING TESTING ************
git pull -s recursive -X ours
git push --quiet "https://${GITHUB_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git" "${deployment}" > /dev/null
echo "Your new report will be visible at https://jonovik.github.io/memote-salarecon-7 in a moment."
|
<reponame>taowu750/LeetCodeJourney<gh_stars>0
package training.graph;
import org.junit.jupiter.api.Test;
import java.util.*;
import java.util.function.ToIntBiFunction;
import static org.junit.jupiter.api.Assertions.assertEquals;
/**
* 您前面有 4 个圆形轮盘的锁。每个轮盘有10个插槽:“0”,“1”,“2”,“3”,“4”,“5”,“6”,“7”,“8”,“9”。
* 轮盘可以自由旋转:例如,我们可以将“9”转为“0”,或者将“0”转为“9”。每一步将一个轮盘转动一个槽。
* <p>
* 锁最初从'0000'开始,代表 4 个轮盘的状态的字符串。
* <p>
* 给定“deadend”列表,这意味着如果锁显示列表中的任何一个字符串,则轮盘将停止转动,
* <p>
* 给定一个解锁的目标字符串,返回打开锁所需的最小总转数,如果不可能则返回 -1。
* <p>
* 例 1:
* Input: deadends = ["0201","0101","0102","1212","2002"], target = "0202"
* Output: 6
* Explanation:
* "0000" -> "1000" -> "1100" -> "1200" -> "1201" -> "1202" -> "0202"。
* 需要注意的是,"0000" -> "0001" -> "0002" -> "0102" -> "0202" 是不行的,
* 因为 "0102" 是 deadend。
* <p>
* 例 2:
* Input: deadends = ["8888"], target = "0009"
* Output: 1
* Explanation:
* "0000" -> "0009"
* <p>
* 例 3:
* Input: deadends = ["8887","8889","8878","8898","8788","8988","7888","9888"], target = "8888"
* Output: -1
* <p>
* 例 4:
* Input: deadends = ["0000"], target = "8888"
* Output: -1
* <p>
* 约束:
* - 1 <= deadends.length <= 500
* - deadends[i].length == 4
* - target.length == 4
* - target 不会出现在 deadend 列表中
* - target 和 deadends[i] 只由数字组成
*/
public class E752_Medium_OpenTheLock {
static void test(ToIntBiFunction<String[], String> method) {
String[] deadends = {"0201", "0101", "0102", "1212", "2002"};
String target = "0202";
assertEquals(method.applyAsInt(deadends, target), 6);
deadends = new String[]{"8888"};
target = "0009";
assertEquals(method.applyAsInt(deadends, target), 1);
deadends = new String[]{"8887", "8889", "8878", "8898", "8788", "8988", "7888", "9888"};
target = "8888";
assertEquals(method.applyAsInt(deadends, target), -1);
deadends = new String[]{"0000"};
target = "8888";
assertEquals(method.applyAsInt(deadends, target), -1);
deadends = new String[]{"5557", "5553", "5575", "5535", "5755", "5355", "7555", "3555",
"6655", "6455", "4655", "4455", "5665", "5445", "5645", "5465", "5566", "5544",
"5564", "5546", "6565", "4545", "6545", "4565", "5656", "5454", "5654", "5456",
"6556", "4554", "4556", "6554"};
target = "5555";
assertEquals(method.applyAsInt(deadends, target), -1);
}
/**
* 常规的 BFS 方法。从起始状态出发,遍历整个状态空间直到解锁或无法推进。
*/
public int openLock(String[] deadends, String target) {
Set<String> deadendSet = new HashSet<>(Arrays.asList(deadends));
Set<String> visited = new HashSet<>();
Queue<String> queue = new LinkedList<>();
visited.add("0000");
queue.offer("0000");
StringBuilder codeBuilder = new StringBuilder("0000");
int level = 0;
while (!queue.isEmpty()) {
for (int size = queue.size(); size > 0; size--) {
String s = queue.remove();
// 死锁,则跳过
if (deadendSet.contains(s))
continue;
if (s.equals(target))
return level;
// 使用 codeBuilder 构造下一个轮盘序列
for (int i = 0; i < 4; i++) {
codeBuilder.setCharAt(i, s.charAt(i));
}
for (int i = 0; i < 4; i++) {
char ch = s.charAt(i);
// 添加加 1 的轮盘序列
codeBuilder.setCharAt(i, ch == '9' ? '0' : (char) (ch + 1));
String nextCode = codeBuilder.toString();
if (!visited.contains(nextCode) && !deadendSet.contains(nextCode)) {
queue.add(nextCode);
visited.add(nextCode);
}
// 添加减 1 的轮盘序列
codeBuilder.setCharAt(i, ch == '0' ? '9' : (char) (ch - 1));
nextCode = codeBuilder.toString();
if (!visited.contains(nextCode) && !deadendSet.contains(nextCode)) {
queue.add(nextCode);
visited.add(nextCode);
}
codeBuilder.setCharAt(i, ch);
}
}
level++;
}
return -1;
}
@Test
public void testOpenLock() {
test(this::openLock);
}
int[] bases = {1, 10, 100, 1000};
/**
* 双向 BFS,从起始状态和结束状态往中间遍历,相遇后即解锁。
* 和普通的 BFS 相比,不用保存所有状态,大大减少了空间和时间需求。
*
* 假设图的分支因子为 b,起始点到终点的最短距离为 d,则普通的 BFS 时间复杂度
* 为 O(b**d),而双向 BFS 的时间复杂度为 O(b**(d/2) + b**(d/2))。
*
* 详细说明参见:
* https://www.geeksforgeeks.org/bidirectional-search/
*
* 又通过将字符串转为数字,避免了耗时的字符串处理。
*
* LeetCode 耗时:17ms - 98.48%
*/
public int bidirectionalBFS(String[] deadends, String target) {
Set<Integer> visited = new HashSet<>();
for (String deadend : deadends) {
visited.add(Integer.parseInt(deadend));
}
Set<Integer> begin = new HashSet<>();
Set<Integer> end = new HashSet<>();
begin.add(0);
end.add(Integer.parseInt(target));
Set<Integer> temp;
int level = 0;
while (!begin.isEmpty() && !end.isEmpty()) {
// 始终使用包含较小状态数的进行遍历,进一步减少时间消耗
if (begin.size() > end.size()) {
temp = begin;
begin = end;
end = temp;
}
temp = new HashSet<>();
for (int code : begin) {
if (end.contains(code))
return level;
if (visited.contains(code))
continue;
// 防止已经遍历过的状态再次被遍历
visited.add(code);
// 添加下一轮状态
for (int i = 0; i < 4; i++) {
// 添加加 1 的轮盘序列
int d = (code / bases[i]) % 10;
int nextCode = d == 9 ? code - 9 * bases[i] : code + bases[i];
if (!visited.contains(nextCode))
temp.add(nextCode);
// 添加减 1 的轮盘序列
nextCode = d == 0 ? code + 9 * bases[i] : code - bases[i];
if (!visited.contains(nextCode))
temp.add(nextCode);
}
}
level++;
begin = temp;
}
return -1;
}
@Test
public void testBidirectionalBFS() {
test(this::bidirectionalBFS);
}
}
|
ActiveSupport::Reloader.to_prepare do
Hyrax::PcdmCollection.class_eval do
def reindex_extent=(_extent)
Hyrax.logger.warn("#reindex_extent called on PcdmCollection")
Hyrax.logger.warn("This is a noop to satify Hyrax::Dashboard::CollectionMembersController#update_members")
end
end
end
|
#!/bin/bash
set -eu
set -o pipefail
exe=$1
DATA_FILE=ip_filter.tsv
REFERENCE_VALUE=24e7a7b2270daee89c64d3ca5fb3da1a
echo testing with "$(which "$exe")"
md5=$($exe < $DATA_FILE | md5sum | cut -d' ' -f1)
test "$md5" == "$REFERENCE_VALUE"
echo OK
|
#!/bin/bash
git stash
git pull
chmod u+x mqtt-dht.py
cp config.ini.bak config.ini
|
#!/bin/bash
mv /root/conf-hadoop-master/* $HADOOP_HOME/etc/hadoop/
if [ -f /root/serf-master.json ]; then
mv /root/serf-master.json /etc/serf/serf-master.json
fi
if [ -f /root/masters ]; then
mv /root/masters $HADOOP_HOME/etc/hadoop/
fi
/usr/bin/supervisord -c /etc/supervisor/supervisord-master.conf
|
// Triangle
// 2021.10.17
#include <algorithm>
int solution(vector<int>& A)
{
int size = A.size();
if (size < 3)
{
return 0;
}
sort(A.begin(), A.end());
for (int i = 0; i < size - 2; i++)
{
if ((long long)A[i] + (long long)A[i + 1] > (long long)A[i + 2])
{
return 1;
}
}
return 0;
}
|
import React from 'react';
import { isEqual, has, find } from "lodash/fp";
export const ACTIVE = "0";
export const THRILLER = "t";
export const DISCO = "d";
export const CHICKEN = "c";
export const BREAKDANCE = "b";
export const MOONWALK = "m";
export const NAMES = {
[THRILLER]: "thriller",
[DISCO]: "disco",
[CHICKEN]: "chicken",
[BREAKDANCE]: "breakdance",
[MOONWALK]: "moonwalk",
};
const STRENGTHS = [
[CHICKEN, DISCO],
[DISCO, THRILLER],
[THRILLER, BREAKDANCE],
[BREAKDANCE, MOONWALK],
[MOONWALK, CHICKEN],
[CHICKEN, BREAKDANCE],
[BREAKDANCE, DISCO],
[DISCO, MOONWALK],
[MOONWALK, THRILLER],
[THRILLER, CHICKEN]
];
export function compare(a, b) {
if (!has(a, NAMES)) {
throw new Error(`Invalid move "${a}" for player 1`);
}
if (!has(b, NAMES)) {
throw new Error(`Invalid move "${b}" for player 2`);
}
return Boolean(find(isEqual([a, b]), STRENGTHS));
} |
#!/bin/bash
python3 ./src/main.py "$@"
|
<gh_stars>0
package com.esri.ges.manager.stops;
import java.util.ArrayList;
import java.util.List;
public class CallbackParameter
{
private String name;
private String displayName;
private List<Object> values;
private Object defaultValue;
public String getName() { return name; }
public void setName(String name) { this.name = name; }
public String getDisplayName() { return displayName; }
public void setDisplayName(String displayName) { this.displayName = displayName; }
public List<Object> getValues() { return values; }
public void setValues(List<Object> values) { this.values = values; }
public void addValue(Object value)
{
if (value == null)
return;
if (values == null)
values = new ArrayList<Object>();
values.add(value);
}
public Object getDefaultValue() { return defaultValue; }
public void setDefaultValue(Object defaultValue) { this.defaultValue = defaultValue; }
@Override
public String toString()
{
StringBuffer sb = new StringBuffer();
sb.append("Parameter(");
sb.append(name);
sb.append(",");
sb.append(displayName);
sb.append(",[");
if (values != null)
for (Object v : values)
if (v != null)
sb.append(" "+v.toString());
sb.append("],");
sb.append(defaultValue);
sb.append(")");
return sb.toString();
}
}
|
package eu.rasus.fer.rasus.contactList;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import com.squareup.picasso.Picasso;
import java.util.Collections;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import eu.rasus.fer.rasus.R;
public class ChatUserAdapter extends BaseAdapter {
private final Context context;
private final List<ChatUser> users;
public ChatUserAdapter(final Context context, final List<ChatUser> users) {
this.context = context;
this.users = users;
Collections.sort(users);
}
@Override
public View getView(final int position, View view, final ViewGroup parent) {
ChatUser user = users.get(position);
ViewHolder viewHolder;
if (view == null) {
LayoutInflater inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
view = inflater.inflate(R.layout.user_preview_item, parent, false);
viewHolder = new ViewHolder(view, context);
view.setTag(viewHolder);
} else {
viewHolder = (ViewHolder) view.getTag();
}
viewHolder.fillView(user);
return view;
}
@Override
public int getCount() {
return users.size();
}
@Override
public Object getItem(final int position) {
return users.get(position);
}
@Override
public long getItemId(final int position) {
return position;
}
public void add(ChatUser object) {
users.add(object);
}
@Override
public void notifyDataSetChanged() {
Collections.sort(users);
super.notifyDataSetChanged();
}
static class ViewHolder {
@BindView(R.id.user_image)
ImageView image;
@BindView(R.id.user_username)
TextView username;
Context context;
public ViewHolder(View view, Context context) {
ButterKnife.bind(this, view);
this.context = context;
}
public void fillView(ChatUser user) {
username.setText(user.username);
Picasso.with(context).cancelRequest(image);
if (user.image != null && !user.image.equals("")) {
Picasso.with(context).load(user.image).placeholder(R.drawable.placeholder).error(R.drawable.placeholder).fit().centerCrop().noFade().into(image);
}
else image.setImageResource(R.drawable.placeholder);
}
}
}
|
"use strict";
const express = require('express');
const helmet = require('helmet');
const app = express();
var cors = require('cors')
const fs = require('fs');
var Web3 = require('web3');
const DESKTOPMINERACCOUNT = 3
let AnatokenContract;
let contract;
const ContractLoader = (contractList, web3) => {
let contracts = []
for (let c in contractList) {
try {
AnatokenContract = require(`../client/src/contracts/${contractList[c]}.json`)
const networkId = web3.eth.net.getId();
const deployedToken = AnatokenContract.networks[5777];
const instance = new web3.eth.Contract(
AnatokenContract.abi,
deployedToken && deployedToken.address,
);
console.log(deployedToken.address)
contract = instance;
} catch (e) {
console.log(e)
}
}
return contracts
}
var bodyParser = require('body-parser')
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
app.use(helmet());
app.use(cors())
let contracts;
var web3 = new Web3();
web3.setProvider(new web3.providers.HttpProvider('http://0.0.0.0:7545'));
let transactions = {}
let accounts;
web3.eth.getAccounts().then((_accounts) => {
accounts = _accounts
console.log("ACCOUNTS", accounts)
})
console.log("LOADING CONTRACTS")
contracts = ContractLoader(["AnaToken"], web3);
app.get('/', (req, res) => {
res.setHeader('Access-Control-Allow-Origin', '*');
console.log("/")
res.set('Content-Type', 'application/json');
res.end(JSON.stringify({ hello: "world" }));
});
app.get('/miner', (req, res) => {
res.setHeader('Access-Control-Allow-Origin', '*');
console.log("/miner")
res.set('Content-Type', 'application/json');
res.end(JSON.stringify({ address: accounts[DESKTOPMINERACCOUNT] }));
});
app.get('/txs/:account', (req, res) => {
res.setHeader('Access-Control-Allow-Origin', '*');
console.log("/txs/" + req.params.account)
let thisTxsKey = req.params.account.toLowerCase()
console.log("Getting Transactions for ", thisTxsKey)
let allTxs = transactions[thisTxsKey]
let recentTxns = []
for (let a in allTxs) {
let age = Date.now() - allTxs[a].time
if (age < 120000) {
recentTxns.push(allTxs[a])
}
}
res.set('Content-Type', 'application/json');
res.end(JSON.stringify(allTxs));
});
app.post('/tx', async (req, res) => {
res.setHeader('Access-Control-Allow-Origin', '*');
console.log("/tx", req.body)
console.log("RECOVER:", req.body.message, req.body.sig)
let account = web3.eth.accounts.recover(req.body.message, req.body.sig).toLowerCase()
console.log("RECOVERED:", account)
//if(whitelist.indexOf(account)>=0){
console.log("Correct sig (whitelisted) ... relay transaction to contract... might want more filtering here, but just blindly do it for now")
console.log("Forwarding tx to yyyy with local account ", accounts[DESKTOPMINERACCOUNT])
let txparams = {
from: accounts[DESKTOPMINERACCOUNT],
gas: req.body.gas,
gasPrice: Math.round(4 * 1000000000)
}
console.log("calling method", req.body.method, "on contract")
console.log("TX", req.body.sig, ...req.body.args)
console.log("PARAMS", txparams)
contract.methods["" + req.body.method](req.body.sig, ...req.body.args).send(
txparams, (error, transactionHash) => {
console.log("TX CALLBACK", error, transactionHash)
res.set('Content-Type', 'application/json');
res.end(JSON.stringify({ transactionHash: transactionHash }));
let fromAddress = account
if (!transactions[fromAddress]) {
transactions[fromAddress] = []
}
if (transactions[fromAddress].indexOf(transactions) < 0) {
transactions[fromAddress].push({ hash: transactionHash, time: Date.now(), metatx: true, miner: accounts[DESKTOPMINERACCOUNT] })
}
}
)
.on('error', (err, receiptMaybe) => {
console.log("TX ERROR", err, receiptMaybe)
})
.on('transactionHash', (transactionHash) => {
console.log("TX HASH", transactionHash)
})
.on('receipt', (receipt) => {
console.log("TX RECEIPT", receipt)
})
.then((receipt) => {
console.log("TX THEN", receipt)
})
});
app.listen(9999);
console.log(`http listening on 9999`); |
#!/bin/bash
FN="ragene20stprobeset.db_8.7.0.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.10/data/annotation/src/contrib/ragene20stprobeset.db_8.7.0.tar.gz"
"https://bioarchive.galaxyproject.org/ragene20stprobeset.db_8.7.0.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-ragene20stprobeset.db/bioconductor-ragene20stprobeset.db_8.7.0_src_all.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-ragene20stprobeset.db/bioconductor-ragene20stprobeset.db_8.7.0_src_all.tar.gz"
)
MD5="dd4e8701fd716e8564da2581d52ce1db"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
#!/bin/bash
function main {
local CERTS_DIR=`docker_image_env api-proxy-tests CERTS_DIR`
local LOGS_DIR=`docker_image_env api-proxy-tests LOGS_DIR`
local ENV_NAME=`docker_image_env api-proxy-tests ENV_NAME`
local ENVS_DIR="$HOME/.openradiant/envs"
local tenants=()
if [[ `echo "$CI" | tr '[:upper:]' '[:lower:]'` == true && `check_for_tenants "$@"` == false ]]; then
local CREDS_DIR="$ENVS_DIR/$ENV_NAME/creds.json"
IFS=$'\n' tenants=(`cat "$CREDS_DIR" \
| grep -oP "((?<=\"TLS_path\":\")|(?<=\"Space_id\":\"))[^\"]+" \
| rev \
| sed 'N;s/\(.*\)\n\([^/]*\)\(\/\)\([^/]*\)\(.*\)/t-\n\2\:\1\:\4/' \
| rev`)
fi
docker run -v "$ENVS_DIR":"$CERTS_DIR":ro -v api-proxy-tests-logs:"$LOGS_DIR" --net="host" api-proxy-tests "${tenants[@]}" "$@"
}
function docker_image_env {
docker inspect -f "{{ .Config.Env }}" "$1" | grep -oP "(?<=$2\=)([^\s\]]+)"
}
function check_for_tenants {
local have_tenant=false
while [[ $# -gt 0 && $have_tenant == false ]]; do
case "$1" in
-t)
shift
have_tenant=true
;;
*)
shift
;;
esac
done
echo "$have_tenant"
}
main "$@"
|
<gh_stars>0
package doodle
package examples
import doodle.core._
import doodle.core.Image._
import doodle.syntax._
import doodle.random._
import cats.instances.all._
import cats.syntax.all._
object BrownianMotion {
def brownianMotion(start: Point, drift: Vec): Random[Point] =
jitter(start) map { pt => pt + drift }
def jitter(point: Point): Random[Point] = {
val noise = Random.normal(0, 5.0)
(noise, noise) mapN { (dx, dy) =>
Point.cartesian(point.x + dx, point.y + dy)
}
}
val start = Point.zero
val drift = Vec(3, 0)
val smoke: Random[Image] = {
val alpha = Random.normal(0.5, 0.1) map (a => a.normalized)
val hue = Random.double.map(h => (h * 0.1 + 0.7).turns)
val saturation = Random.double.map(s => (s * 0.8).normalized)
val lightness = Random.normal(0.4, 0.1) map (a => a.normalized)
val color =
(hue, saturation, lightness, alpha) mapN {
(h, s, l, a) => Color.hsla(h, s, l, a)
}
val points = Random.int(3,7)
val radius = Random.normal(2, 1)
val rotation = Random.double.map { r => r.turns }
val shape = (points, radius, rotation).mapN { (pts, r, rot) =>
star(pts, r, r * 0.5, rot)
}
(shape, color).mapN { (shape, line) =>
shape.lineColor(line).lineWidth(2).noFill
}
}
def walk(steps: Int): Random[Image] = {
def iter(step: Int, start: Random[Point], shape: Image): List[Random[Image]] =
step match {
case 0 =>
Nil
case n =>
val here = start.map (shape at _.toVec)
val next = start flatMap (pt => brownianMotion(pt, drift))
here :: iter(step-1, next, shape)
}
smoke.flatMap { shape =>
iter(steps, Random.always(start), shape).sequence.map { imgs =>
imgs.foldLeft(Image.empty){ _ on _ }
}
}
}
def walkParticles(nParticles: Int, steps: Int): Random[Image] =
(1 to nParticles).toList.map { _ => walk(steps) }.sequence.map { imgs =>
imgs.foldLeft(Image.empty){ _ on _ }
}
val image: Random[Image] =
walkParticles(10, 100)
}
|
#!/bin/sh
set -x
docker container stop rutherford-site-webhook rutherford-site-server
docker image rm rutherford-site-webhook
cd scripts/local-ci
docker build . -t rutherford-site-webhook
cd ../../
docker run --rm -d -p 58778:9000 \
--name=rutherford-site-webhook \
-v `pwd`:/site \
--env-file=/keybase/team/rutherford_nj.website/local_ci.env \
rutherford-site-webhook
docker run -d --rm -p 0.0.0.0:58777:8777 \
--name=rutherford-site-server \
-v `pwd`:/srv/jekyll \
jekyll/builder:latest jekyll serve --watch -P 8777 -p /dev/null
|
<filename>src/main/java/org/slos/permission/SetActivePlayRequest.java
package org.slos.permission;
public class SetActivePlayRequest {
private Boolean active;
public Boolean getActive() {
return active;
}
public void setActive(Boolean active) {
this.active = active;
}
public static Boolean adaptToBoolean(SetActivePlayRequest setActivePlayRequest) {
return setActivePlayRequest.getActive();
}
}
|
#!/bin/bash
hash git 2>/dev/null || { echo >&2 "git not found, exiting."; }
# get the most recent commit which modified any of "$@"
function fileCommit() {
git log -1 --format='format:%H' HEAD -- "$@"
}
tpath='tags'
url='https://github.com/keymetrics/docker-pm2'
self="$(basename "${BASH_SOURCE[0]}")"
date=$(date +'%Y-%m-%d %H:%M:%S')
declare -A versions
versions['latest']='alpine|stretch|jessie|slim'
versions['12']='alpine|stretch|jessie|slim'
versions['10']='alpine|stretch|jessie|slim|wheezy'
versions['8']='alpine|stretch|jessie|slim|wheezy'
versions['6']='alpine|stretch|jessie|slim|wheezy'
versions['4']='alpine|stretch|jessie|slim|wheezy'
echo "# This file is generated via $url/blob/$(fileCommit "$self")/$self"
echo "# $date"
echo
echo "Maintainers: Keymetrics.io <$url> (@keymetrics)"
echo "GitRepo: $url.git"
echo
for version in "${!versions[@]}"
do
variants=(${versions[$version]//|/ })
for i in "${!variants[@]}"
do
variant=${variants[i]}
commit="$(fileCommit "$tpath/$version/$variant")"
echo "Tags: $version"-"$variant"
echo "GitCommit: $commit"
echo "Directory: $tpath/$version/$variant"
echo
done
done
|
namespace CodenameGenerator
{
public class CodenameGenerator
{
private List<string> adjectives = new List<string> { "Swift", "Stealthy", "Cunning", "Sneaky", "Silent" };
private List<string> nouns = new List<string> { "Shadow", "Phantom", "Ghost", "Whisper", "Specter" };
private HashSet<string> usedCodenames = new HashSet<string>();
public string GenerateCodename()
{
if (usedCodenames.Count == adjectives.Count * nouns.Count)
{
throw new InvalidOperationException("All possible codenames have been exhausted.");
}
string codename;
do
{
string adjective = adjectives[new Random().Next(adjectives.Count)];
string noun = nouns[new Random().Next(nouns.Count)];
codename = $"{adjective} {noun}";
} while (usedCodenames.Contains(codename));
usedCodenames.Add(codename);
return codename;
}
}
} |
<reponame>NajibAdan/kitsu-server
class Types::Manga < Types::BaseObject
implements Types::Media
end
|
#!/usr/bin/env bash
# shellcheck disable=SC2230
# shellcheck disable=SC1090
# This is the primary driver of the GitLab CI infrastructure.
# Run `ci.sh usage` for usage information.
set -Eeuo pipefail
# Configuration:
HACKAGE_INDEX_STATE="2020-12-21T14:48:20Z" # TODO dedup with yaml's def
MIN_HAPPY_VERSION="1.20"
MIN_ALEX_VERSION="3.2.6"
TOP="$(pwd)"
if [ ! -d "$TOP/.gitlab" ]; then
echo "This script expects to be run from the root of a ghc checkout"
fi
CABAL_CACHE="$TOP/${CABAL_CACHE:-cabal-cache}"
source "$TOP/.gitlab/common.sh"
function time_it() {
local name="$1"
shift
local start=$(date +%s)
local res=0
set +e
( set -e ; $@ )
res=$?
set -e
local end=$(date +%s)
local delta=$(expr $end - $start)
echo "$name took $delta seconds"
printf "%15s | $delta" > ci-timings
return $res
}
function usage() {
cat <<EOF
$0 - GHC continuous integration driver
Common Modes:
usage Show this usage message.
setup Prepare environment for a build.
configure Run ./configure.
clean Clean the tree
shell Run an interactive shell with a configured build environment.
save_cache Preserve the cabal cache
Make build system:
build_make Build GHC via the make build system
test_make Test GHC via the make build system
Hadrian build system
build_hadrian Build GHC via the Hadrian build system
test_hadrian Test GHC via the Hadrian build system
Environment variables affecting both build systems:
CROSS_TARGET Triple of cross-compilation target.
VERBOSE Set to non-empty for verbose build output
RUNTEST_ARGS Arguments passed to runtest.py
MSYSTEM (Windows-only) Which platform to build form (MINGW64 or MINGW32).
IGNORE_PERF_FAILURES
Whether to ignore perf failures (one of "increases",
"decreases", or "all")
HERMETIC Take measures to avoid looking at anything in \$HOME
CONFIGURE_ARGS Arguments passed to configure script.
ENABLE_NUMA Whether to enable numa support for the build (disabled by default)
INSTALL_CONFIGURE_ARGS
Arguments passed to the binary distribution configure script
during installation of test toolchain.
NIX_SYSTEM On Darwin, the target platform of the desired toolchain
(either "x86-64-darwin" or "aarch-darwin")
Environment variables determining build configuration of Make system:
BUILD_FLAVOUR Which flavour to build.
BUILD_SPHINX_HTML Whether to build Sphinx HTML documentation.
BUILD_SPHINX_PDF Whether to build Sphinx PDF documentation.
INTEGER_LIBRARY Which integer library to use (integer-simple or integer-gmp).
HADDOCK_HYPERLINKED_SOURCES
Whether to build hyperlinked Haddock sources.
TEST_TYPE Which test rule to run.
Environment variables determining build configuration of Hadrian system:
BUILD_FLAVOUR Which flavour to build.
Environment variables determining bootstrap toolchain (Linux):
GHC Path of GHC executable to use for bootstrapping.
CABAL Path of cabal-install executable to use for bootstrapping.
ALEX Path of alex executable to use for bootstrapping.
HAPPY Path of alex executable to use for bootstrapping.
Environment variables determining bootstrap toolchain (non-Linux):
GHC_VERSION Which GHC version to fetch for bootstrapping.
CABAL_INSTALL_VERSION
Cabal-install version to fetch for bootstrapping.
EOF
}
function setup_locale() {
# Musl doesn't provide locale support at all...
if ! which locale > /dev/null; then
info "No locale executable. Skipping locale setup..."
return
fi
# BSD grep terminates early with -q, consequently locale -a will get a
# SIGPIPE and the pipeline will fail with pipefail.
shopt -o -u pipefail
if locale -a | grep -q C.UTF-8; then
# Debian
export LANG=C.UTF-8
elif locale -a | grep -q C.utf8; then
# Fedora calls it this
export LANG=C.utf8
elif locale -a | grep -q en_US.UTF-8; then
# Centos doesn't have C.UTF-8
export LANG=en_US.UTF-8
elif locale -a | grep -q en_US.utf8; then
# Centos doesn't have C.UTF-8
export LANG=en_US.utf8
else
error "Failed to find usable locale"
info "Available locales:"
locale -a
fail "No usable locale, aborting..."
fi
info "Using locale $LANG..."
export LC_ALL=$LANG
shopt -o -s pipefail
}
function mingw_init() {
case "$MSYSTEM" in
MINGW32)
target_triple="i386-unknown-mingw32"
boot_triple="i386-unknown-mingw32" # triple of bootstrap GHC
;;
MINGW64)
target_triple="x86_64-unknown-mingw32"
boot_triple="x86_64-unknown-mingw32" # triple of bootstrap GHC
;;
*)
fail "win32-init: Unknown MSYSTEM $MSYSTEM"
;;
esac
# Bring mingw toolchain into PATH.
# This is extracted from /etc/profile since this script inexplicably fails to
# run under gitlab-runner.
# shellcheck disable=SC1091
source /etc/msystem
MINGW_MOUNT_POINT="${MINGW_PREFIX}"
PATH="$MINGW_MOUNT_POINT/bin:$PATH"
# We always use mingw64 Python to avoid path length issues like #17483.
export PYTHON="/mingw64/bin/python3"
}
# This will contain GHC's local native toolchain
toolchain="$TOP/toolchain"
mkdir -p "$toolchain/bin"
PATH="$toolchain/bin:$PATH"
export METRICS_FILE="$TOP/performance-metrics.tsv"
cores="$(mk/detect-cpu-count.sh)"
# Use a local temporary directory to ensure that concurrent builds don't
# interfere with one another
mkdir -p "$TOP/tmp"
export TMP="$TOP/tmp"
export TEMP="$TOP/tmp"
function show_tool() {
local tool="$1"
info "$tool = ${!tool}"
${!tool} --version
}
function set_toolchain_paths() {
case "$(uname -m)-$(uname)" in
# Linux toolchains are included in the Docker image
*-Linux) toolchain_source="env" ;;
# Darwin toolchains are provided via .gitlab/darwin/toolchain.nix
*-Darwin) toolchain_source="nix" ;;
*) toolchain_source="extracted" ;;
esac
case "$toolchain_source" in
extracted)
# These are populated by setup_toolchain
GHC="$toolchain/bin/ghc$exe"
CABAL="$toolchain/bin/cabal$exe"
HAPPY="$toolchain/bin/happy$exe"
ALEX="$toolchain/bin/alex$exe"
;;
nix)
if [[ ! -f toolchain.sh ]]; then
case "$NIX_SYSTEM" in
x86_64-darwin|aarch64-darwin) ;;
*) fail "unknown NIX_SYSTEM" ;;
esac
nix build -f .gitlab/darwin/toolchain.nix --argstr system "$NIX_SYSTEM" -o toolchain.sh
cat toolchain.sh
fi
source toolchain.sh ;;
env)
# These are generally set by the Docker image but
# we provide these handy fallbacks in case the
# script isn't run from within a GHC CI docker image.
if [ -z "$GHC" ]; then GHC="$(which ghc)"; fi
if [ -z "$CABAL" ]; then CABAL="$(which cabal)"; fi
if [ -z "$HAPPY" ]; then HAPPY="$(which happy)"; fi
if [ -z "$ALEX" ]; then ALEX="$(which alex)"; fi
;;
*) fail "bad toolchain_source"
esac
export GHC
export CABAL
export HAPPY
export ALEX
}
function cabal_update() {
"$CABAL" update --index="$HACKAGE_INDEX_STATE"
}
# Extract GHC toolchain
function setup() {
echo "=== TIMINGS ===" > ci-timings
if [ -d "$CABAL_CACHE" ]; then
info "Extracting cabal cache from $CABAL_CACHE to $CABAL_DIR..."
mkdir -p "$CABAL_DIR"
cp -Rf "$CABAL_CACHE"/* "$CABAL_DIR"
fi
case $toolchain_source in
extracted) time_it "setup" setup_toolchain ;;
*) ;;
esac
cabal_update
# Make sure that git works
git config user.email "ghc-ci@gitlab-haskell.org"
git config user.name "GHC GitLab CI"
info "====================================================="
info "Toolchain versions"
info "====================================================="
show_tool GHC
show_tool CABAL
show_tool HAPPY
show_tool ALEX
}
function fetch_ghc() {
if [ ! -e "$GHC" ]; then
local v="$GHC_VERSION"
if [[ -z "$v" ]]; then
fail "neither GHC nor GHC_VERSION are not set"
fi
start_section "fetch GHC"
url="https://downloads.haskell.org/~ghc/${GHC_VERSION}/ghc-${GHC_VERSION}-${boot_triple}.tar.xz"
info "Fetching GHC binary distribution from $url..."
curl "$url" > ghc.tar.xz || fail "failed to fetch GHC binary distribution"
$TAR -xJf ghc.tar.xz || fail "failed to extract GHC binary distribution"
case "$(uname)" in
MSYS_*|MINGW*)
cp -r "ghc-${GHC_VERSION}"/* "$toolchain"
;;
*)
pushd "ghc-${GHC_VERSION}"
./configure --prefix="$toolchain"
"$MAKE" install
popd
;;
esac
rm -Rf "ghc-${GHC_VERSION}" ghc.tar.xz
end_section "fetch GHC"
fi
}
function fetch_cabal() {
if [ ! -e "$CABAL" ]; then
local v="$CABAL_INSTALL_VERSION"
if [[ -z "$v" ]]; then
fail "neither CABAL nor CABAL_INSTALL_VERSION are not set"
fi
start_section "fetch GHC"
case "$(uname)" in
# N.B. Windows uses zip whereas all others use .tar.xz
MSYS_*|MINGW*)
case "$MSYSTEM" in
MINGW32) cabal_arch="i386" ;;
MINGW64) cabal_arch="x86_64" ;;
*) fail "unknown MSYSTEM $MSYSTEM" ;;
esac
url="https://downloads.haskell.org/~cabal/cabal-install-$v/cabal-install-$v-$cabal_arch-unknown-mingw32.zip"
info "Fetching cabal binary distribution from $url..."
curl "$url" > "$TMP/cabal.zip"
unzip "$TMP/cabal.zip"
mv cabal.exe "$CABAL"
;;
*)
local base_url="https://downloads.haskell.org/~cabal/cabal-install-$v/"
case "$(uname)" in
Darwin) cabal_url="$base_url/cabal-install-$v-x86_64-apple-darwin17.7.0.tar.xz" ;;
FreeBSD)
#cabal_url="$base_url/cabal-install-$v-x86_64-portbld-freebsd.tar.xz" ;;
cabal_url="http://home.smart-cactus.org/~ben/ghc/cabal-install-3.0.0.0-x86_64-portbld-freebsd.tar.xz" ;;
*) fail "don't know where to fetch cabal-install for $(uname)"
esac
echo "Fetching cabal-install from $cabal_url"
curl "$cabal_url" > cabal.tar.xz
$TAR -xJf cabal.tar.xz
mv cabal "$toolchain/bin"
;;
esac
end_section "fetch GHC"
fi
}
# For non-Docker platforms we prepare the bootstrap toolchain
# here. For Docker platforms this is done in the Docker image
# build.
function setup_toolchain() {
fetch_ghc
fetch_cabal
cabal_update
local cabal_install="$CABAL v2-install \
--with-compiler=$GHC \
--index-state=$HACKAGE_INDEX_STATE \
--installdir=$toolchain/bin \
--overwrite-policy=always"
# Avoid symlinks on Windows
case "$(uname)" in
MSYS_*|MINGW*) cabal_install="$cabal_install --install-method=copy" ;;
*) ;;
esac
info "Building happy..."
$cabal_install happy --constraint="happy>=$MIN_HAPPY_VERSION"
info "Building alex..."
$cabal_install alex --constraint="alex>=$MIN_ALEX_VERSION"
}
function cleanup_submodules() {
start_section "clean submodules"
info "Cleaning submodules..."
# On Windows submodules can inexplicably get into funky states where git
# believes that the submodule is initialized yet its associated repository
# is not valid. Avoid failing in this case with the following insanity.
git submodule sync --recursive || git submodule deinit --force --all
git submodule update --init --recursive
git submodule foreach git clean -xdf
end_section "clean submodules"
}
function prepare_build_mk() {
if [[ -z "$BUILD_FLAVOUR" ]]; then fail "BUILD_FLAVOUR is not set"; fi
if [[ -z ${BUILD_SPHINX_HTML:-} ]]; then BUILD_SPHINX_HTML=YES; fi
if [[ -z ${BUILD_SPHINX_PDF:-} ]]; then BUILD_SPHINX_PDF=YES; fi
if [[ -z ${BIGNUM_BACKEND:-} ]]; then BIGNUM_BACKEND=gmp; fi
cat > mk/build.mk <<EOF
BIGNUM_BACKEND=${BIGNUM_BACKEND}
include mk/flavours/${BUILD_FLAVOUR}.mk
GhcLibHcOpts+=-haddock
EOF
if [ -n "${HADDOCK_HYPERLINKED_SOURCES:-}" ]; then
echo "EXTRA_HADDOCK_OPTS += --hyperlinked-source --quickjump" >> mk/build.mk
fi
info "build.mk is:"
cat mk/build.mk
}
function configure() {
start_section "booting"
run python3 boot
end_section "booting"
read -r -a args <<< "${CONFIGURE_ARGS:-}"
if [[ -n "${target_triple:-}" ]]; then
args+=("--target=$target_triple")
fi
if [[ -n "${ENABLE_NUMA:-}" ]]; then
args+=("--enable-numa")
else
args+=("--disable-numa")
fi
start_section "configuring"
# See https://stackoverflow.com/questions/7577052 for a rationale for the
# args[@] symbol-soup below.
run ./configure \
--enable-tarballs-autodownload \
"${args[@]+"${args[@]}"}" \
GHC="$GHC" \
HAPPY="$HAPPY" \
ALEX="$ALEX" \
|| ( cat config.log; fail "configure failed" )
end_section "configuring"
}
function build_make() {
prepare_build_mk
if [[ -z "$BIN_DIST_PREP_TAR_COMP" ]]; then
fail "BIN_DIST_PREP_TAR_COMP is not set"
fi
if [[ -n "${VERBOSE:-}" ]]; then
MAKE_ARGS="${MAKE_ARGS:-} V=1"
else
MAKE_ARGS="${MAKE_ARGS:-} V=0"
fi
run "$MAKE" -j"$cores" "$MAKE_ARGS"
run "$MAKE" -j"$cores" binary-dist-prep TAR_COMP_OPTS=-1
ls -lh "$BIN_DIST_PREP_TAR_COMP"
}
function fetch_perf_notes() {
info "Fetching perf notes..."
"$TOP/.gitlab/test-metrics.sh" pull
}
function push_perf_notes() {
if [ -n "${CROSS_TARGET:-}" ]; then
info "Can't test cross-compiled build."
return
fi
info "Pushing perf notes..."
"$TOP/.gitlab/test-metrics.sh" push
}
# Figure out which commit should be used by the testsuite driver as a
# performance baseline. See Note [The CI Story].
function determine_metric_baseline() {
if [ -n "${CI_MERGE_REQUEST_DIFF_BASE_SHA:-}" ]; then
PERF_BASELINE_COMMIT="$CI_MERGE_REQUEST_DIFF_BASE_SHA"
export PERF_BASELINE_COMMIT
info "Using $PERF_BASELINE_COMMIT for performance metric baseline..."
fi
}
function test_make() {
if [ -n "${CROSS_TARGET:-}" ]; then
info "Can't test cross-compiled build."
return
fi
run "$MAKE" test_bindist TEST_PREP=YES
(unset $(compgen -v | grep CI_*);
run "$MAKE" V=0 VERBOSE=1 test \
THREADS="$cores" \
JUNIT_FILE=../../junit.xml \
EXTRA_RUNTEST_OPTS="${RUNTEST_ARGS:-}")
}
function build_hadrian() {
if [ -z "${BIN_DIST_NAME:-}" ]; then
fail "BIN_DIST_NAME not set"
fi
# N.B. First build Hadrian, unsetting MACOSX_DEPLOYMENT_TARGET which may warn
# if the bootstrap libraries were built with a different version expectation.
MACOSX_DEPLOYMENT_TARGET="" run_hadrian stage1:exe:ghc-bin
run_hadrian binary-dist -V
mv _build/bindist/ghc*.tar.xz "$BIN_DIST_NAME.tar.xz"
}
function test_hadrian() {
if [ -n "${CROSS_TARGET:-}" ]; then
info "Can't test cross-compiled build."
return
fi
# Ensure that statically-linked builds are actually static
if [[ "${BUILD_FLAVOUR}" = *static* ]]; then
bad_execs=""
for binary in _build/stage1/bin/*; do
if ldd "${binary}" &> /dev/null; then
warn "${binary} is not static!"
ldd "${binary}"
echo
bad_execs="$bad_execs $binary"
fi
done
if [ -n "$bad_execs" ]; then
fail "the following executables contain dynamic-object references: $bad_execs"
fi
fi
cd _build/bindist/ghc-*/
case "$(uname)" in
MSYS_*|MINGW*)
mkdir -p "$TOP"/_build/install
cp -a * "$TOP"/_build/install
;;
*)
read -r -a args <<< "${INSTALL_CONFIGURE_ARGS:-}"
run ./configure --prefix="$TOP"/_build/install "${args[@]}"
run "$MAKE" install
;;
esac
cd ../../../
run_hadrian \
test \
--test-root-dirs=testsuite/tests/stage1 \
--test-compiler=stage1 \
"runtest.opts+=${RUNTEST_ARGS:-}"
run_hadrian \
test \
--summary-junit=./junit.xml \
--test-have-intree-files \
--test-compiler="$TOP/_build/install/bin/ghc$exe" \
"runtest.opts+=${RUNTEST_ARGS:-}"
}
function cabal_test() {
if [ -z "$OUT" ]; then
fail "OUT not set"
fi
start_section "Cabal test: $OUT"
mkdir -p "$OUT"
run "$HC" \
-hidir tmp -odir tmp -fforce-recomp \
-dumpdir "$OUT/dumps" -ddump-timings \
+RTS --machine-readable "-t$OUT/rts.log" -RTS \
-ilibraries/Cabal/Cabal/src -XNoPolyKinds Distribution.Simple \
"$@" 2>&1 | tee $OUT/log
rm -Rf tmp
end_section "Cabal test: $OUT"
}
function run_perf_test() {
if [ -z "$HC" ]; then
fail "HC not set"
fi
mkdir -p out
git -C libraries/Cabal/ rev-parse HEAD > out/cabal_commit
$HC --print-project-git-commit-id > out/ghc_commit
OUT=out/Cabal-O0 cabal_test -O0
OUT=out/Cabal-O1 cabal_test -O1
OUT=out/Cabal-O2 cabal_test -O2
}
function save_cache () {
info "Storing cabal cache from $CABAL_DIR to $CABAL_CACHE..."
cp -Rf "$CABAL_DIR" "$CABAL_CACHE"
}
function clean() {
rm -R tmp
run "$MAKE" --quiet clean || true
run rm -Rf _build
}
function run_hadrian() {
if [ -z "${BUILD_FLAVOUR:-}" ]; then
fail "BUILD_FLAVOUR not set"
fi
if [ -z "${BIGNUM_BACKEND:-}" ]; then BIGNUM_BACKEND="gmp"; fi
read -r -a args <<< "${HADRIAN_ARGS:-}"
if [ -n "${VERBOSE:-}" ]; then args+=("-V"); fi
# Before running the compiler, unset variables gitlab env vars as these
# can destabilise the performance test (see #20341)
(unset $(compgen -v | grep CI_*);
run "${HADRIAN_PATH:-hadrian/build-cabal}" \
--flavour="$BUILD_FLAVOUR" \
-j"$cores" \
--broken-test="${BROKEN_TESTS:-}" \
--bignum=$BIGNUM_BACKEND \
"${args[@]+"${args[@]}"}" \
"$@")
}
# A convenience function to allow debugging in the CI environment.
function shell() {
local cmd="*@"
if [ -z "$cmd" ]; then
cmd="bash -i"
fi
run "$cmd"
}
setup_locale
# Platform-specific environment initialization
if [ -n "${HERMETIC:-}" ]; then
export CABAL_DIR="$TOP/cabal"
# We previously set HOME=/nonexistent but apparently nix wants $HOME to exist
# so sadly we must settle for someplace writable.
export HOME="$TOP/tmp-home"
else
BIN_DIST_NAME="${BIN_DIST_NAME:-}"
case "$(uname)" in
MSYS_*|MINGW*) CABAL_DIR="$APPDATA/cabal" ;;
*) CABAL_DIR="$HOME/.cabal" ;;
esac
fi
case "$(uname)" in
MSYS_*|MINGW*)
exe=".exe"
# N.B. cabal-install expects CABAL_DIR to be a Windows path
CABAL_DIR="$(cygpath -w "$CABAL_DIR")"
;;
*)
exe=""
;;
esac
MAKE="make"
TAR="tar"
case "$(uname)" in
MSYS_*|MINGW*) mingw_init ;;
Darwin) boot_triple="x86_64-apple-darwin" ;;
FreeBSD)
boot_triple="x86_64-portbld-freebsd"
MAKE="gmake"
TAR="gtar"
;;
Linux) ;;
*) fail "uname $(uname) is not supported" ;;
esac
if [ -n "${CROSS_TARGET:-}" ]; then
info "Cross-compiling for $CROSS_TARGET..."
target_triple="$CROSS_TARGET"
fi
echo "Branch name ${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME:-}"
# Ignore performance improvements in @marge-bot batches.
# See #19562.
if [ "${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME:-}" == "wip/marge_bot_batch_merge_job" ]; then
if [ -z "${IGNORE_PERF_FAILURES:-}" ]; then
IGNORE_PERF_FAILURES="decreases"
echo "Ignoring perf failures"
fi
fi
echo "CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH:-}"
echo "CI_PROJECT_PATH: ${CI_PROJECT_PATH:-}"
if [ "${CI_COMMIT_BRANCH:-}" == "master" ] && [ "${CI_PROJECT_PATH:-}" == "ghc/ghc" ]; then
if [ -z "${IGNORE_PERF_FAILURES:-}" ]; then
IGNORE_PERF_FAILURES="decreases"
echo "Ignoring perf failures"
fi
fi
if [ -n "${IGNORE_PERF_FAILURES:-}" ]; then
RUNTEST_ARGS="--ignore-perf-failures=$IGNORE_PERF_FAILURES"
fi
determine_metric_baseline
set_toolchain_paths
case $1 in
usage) usage ;;
setup) setup && cleanup_submodules ;;
configure) time_it "configure" configure ;;
build_make) time_it "build" build_make ;;
test_make)
fetch_perf_notes
res=0
time_it "test" test_make || res=$?
push_perf_notes
exit $res ;;
build_hadrian) time_it "build" build_hadrian ;;
# N.B. Always push notes, even if the build fails. This is okay to do as the
# testsuite driver doesn't record notes for tests that fail due to
# correctness.
test_hadrian)
fetch_perf_notes
res=0
time_it "test" test_hadrian || res=$?
push_perf_notes
exit $res ;;
run_hadrian) shift; run_hadrian "$@" ;;
perf_test) run_perf_test ;;
cabal_test) cabal_test ;;
clean) clean ;;
save_cache) save_cache ;;
shell) shell "$@" ;;
*) fail "unknown mode $1" ;;
esac
|
#!/bin/bash -e
# hello-go-deploy-gce remove-github-ssh-keys.sh
echo " "
echo "************************************************************************"
echo "************************************ remove-github-ssh-keys.sh (START) *"
echo "You are root in /home/packer"
echo " "
echo "cd /root/.ssh"
cd /root/.ssh
echo "Remove keys"
rm id_rsa
rm id_rsa.pub
echo " "
echo "************************************** remove-github-ssh-keys.sh (END) *"
echo "************************************************************************"
echo " " |
# mkdir -p cifar10
# #CIFAR-10 dataset
# wget -O cifar10/cifar.tar.gz http://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz
# tar -xzvf cifar10/cifar.tar.gz -C cifar10 --strip-components=1
# rm cifar10/cifar.tar.gz
# #CIFAR-100 dataset
# mkdir -p cifar100
# wget -O cifar100/cifar100.tar.gz https://www.cs.toronto.edu/~kriz/cifar-100-python.tar.gz
# tar -xzvf cifar100/cifar100.tar.gz -C cifar100 --strip-components=1
# rm cifar100/cifar100.tar.gz
mkdir -p mnist
#MNIST dataset
wget -O mnist/train-images-idx3-ubyte.gz http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz
wget -O mnist/train-labels-idx1-ubyte.gz http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz
wget -O mnist/t10k-images-idx3-ubyte.gz http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz
wget -O mnist/t10k-labels-idx1-ubyte.gz http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz
# mkdir -p svhn
# #SVHN dataset
# wget -O svhn/train_32x32.mat http://ufldl.stanford.edu/housenumbers/train_32x32.mat
# wget -O svhn/test_32x32.mat http://ufldl.stanford.edu/housenumbers/test_32x32.mat
# wget -O svhn/extra_32x32.mat http://ufldl.stanford.edu/housenumbers/extra_32x32.mat
|
<gh_stars>0
import NetworkCommunities, { Banner, SearchBar, CommunitiesList, CommunityCard } from './NetworkCommunities'
import { shallow } from 'enzyme'
import React from 'react'
describe('NetworkCommunities', () => {
it('renders correctly', () => {
const network = { id: 78 }
const communities = [
{ id: 9 }, { id: 8 }, { id: 7 }
]
const wrapper = shallow(<NetworkCommunities
network={network}
communities={communities}
search='fo'
sortBy='name'
fetchNetwork={jest.fn()}
setSearch={() => {}}
setSort={() => {}}
fetchMoreCommunities={() => {}} />)
expect(wrapper).toMatchSnapshot()
})
})
describe('Banner', () => {
it('renders correctly', () => {
const network = {
id: 78,
memberCount: 123
}
const wrapper = shallow(<Banner
network={network}
communitiesTotal='12' />)
expect(wrapper).toMatchSnapshot()
})
})
describe('SearchBar', () => {
it('renders correctly', () => {
const wrapper = shallow(<SearchBar
sortBy='name'
search='fo'
setSearch={() => {}}
setSort={() => {}} />)
expect(wrapper).toMatchSnapshot()
})
})
describe('CommunitiesList', () => {
it('renders correctly', () => {
const communities = [
{ id: 9 }, { id: 8 }, { id: 7 }
]
const wrapper = shallow(<CommunitiesList
communities={communities}
fetchMoreCommunities={() => {}} />)
expect(wrapper).toMatchSnapshot()
})
})
describe('CommunitiesList', () => {
it('renders correctly', () => {
const community = {
name: 'Foom',
avatarUrl: 'foom.png',
numMembers: 77
}
const wrapper = shallow(<CommunityCard
community={community} />)
expect(wrapper).toMatchSnapshot()
})
})
|
#!/usr/bin/env bash
pytest -rA
|
describe('pc.URI', function () {
it("Parsed, all sections", function () {
var s = "http://a/b/c/d;p?q=r#l";
var uri = new pc.URI(s);
expect(uri.scheme).to.equal("http");
expect(uri.authority).to.equal("a");
expect(uri.path).to.equal("/b/c/d;p");
expect(uri.query).to.equal("q=r");
expect(uri.fragment).to.equal("l");
});
it("Parse, no scheme", function () {
var s = "//a/b/c/d;p?q=r#l";
var uri = new pc.URI(s);
var undef;
expect(uri.scheme).to.equal(undef);
expect(uri.authority).to.equal("a");
expect(uri.path).to.equal("/b/c/d;p");
expect(uri.query).to.equal("q=r");
expect(uri.fragment).to.equal("l");
});
it("Parse, no authority", function () {
var s = "/b/c/d;p?q=r#l";
var uri = new pc.URI(s);
var undef;
expect(uri.scheme).to.equal(undef);
expect(uri.authority).to.equal(undef);
expect(uri.path).to.equal("/b/c/d;p");
expect(uri.query).to.equal("q=r");
expect(uri.fragment).to.equal("l");
});
it("Parse, no query", function () {
var s = "http://a/b/c/d;p#l";
var uri = new pc.URI(s);
var undef;
expect(uri.scheme).to.equal("http");
expect(uri.authority).to.equal("a");
expect(uri.path).to.equal("/b/c/d;p");
expect(uri.query).to.equal(undef);
expect(uri.fragment).to.equal("l");
});
it("Parse, no fragment", function () {
var s = "http://a/b/c/d;p?q=r";
var uri = new pc.URI(s);
var undef;
expect(uri.scheme).to.equal("http");
expect(uri.authority).to.equal("a");
expect(uri.path).to.equal("/b/c/d;p");
expect(uri.query).to.equal("q=r");
expect(uri.fragment).to.equal(undef);
});
it("toString", function () {
var s = "http://a/b/c/d;p?q=r#l";
var uri = new pc.URI(s);
var r = uri.toString();
expect(s).to.equal(r);
});
it("Edit query", function() {
var s = "http://example.com";
var uri = new pc.URI(s);
uri.query = "q=abc";
expect(uri.toString()).to.equal("http://example.com?q=abc");
uri.query = "";
expect(uri.toString()).to.equal(s);
});
it("getQuery", function () {
var s = "http://example.com/test?a=1&b=string&c=something%20spaced";
var uri = new pc.URI(s);
var q = uri.getQuery();
expect(q.a).to.equal("1");
expect(q.b).to.equal("string");
expect(q.c).to.equal("something spaced");
});
it("getQuery: emtpy", function () {
var s = "http://example.com/test";
var uri = new pc.URI(s);
var q = uri.getQuery();
expect(Object.keys(q).length).to.equal(0);
});
it("setQuery", function () {
var uri = new pc.URI("http://example.com/test");
var q = {
key: "value",
"with space": "\""
};
uri.setQuery(q);
expect("key=value&with%20space=%22").to.equal(uri.query)
});
it("createURI", function () {
var uri;
uri = pc.createURI({
scheme: "http",
authority: "example.com",
path: "/abc"
});
expect("http://example.com/abc").to.equal(uri);
uri = pc.createURI({
host: "http://example.com",
path: "/abc"
});
expect("http://example.com/abc").to.equal(uri);
uri = pc.createURI({
hostpath: "http://example.com/abc",
});
expect("http://example.com/abc").to.equal(uri);
uri = pc.createURI({
hostpath: "http://example.com/abc",
query: "a=b&c=d"
});
expect("http://example.com/abc?a=b&c=d").to.equal(uri);
});
it("createURI, exceptions", function () {
expect(function() {
pc.createURI({
scheme: "http",
host: "http://test.com"
});
}).to.throw();
expect(function() {
pc.createURI({
authority: "http",
host: "http://test.com"
});
}).to.throw();
expect(function() {
pc.createURI({
scheme: "http",
hostpath: "http://test.com"
});
}).to.throw();
expect(function() {
pc.createURI({
authority: "http",
hostpath: "http://test.com"
});
}).to.throw();
expect(function() {
pc.createURI({
scheme: "http",
authority: "e.com",
host: "http://test.com"
});
}).to.throw();
expect(function() {
pc.createURI({
scheme: "abc",
authority: "http",
hostpath: "http://test.com"
});
}).to.throw();
expect(function() {
pc.createURI({
host: "http://test.com",
hostpath: "http://test.com"
});
}).to.throw();
});
});
|
#!/usr/bin/env bash
set -euo pipefail
LOGS=${TMPDIR:-/app/logs}
if ! pgrep -x postgres > /dev/null; then
echo "Starting Postgresql"
(
cd "$PGDATA"
echo > "$LOGS/postgres.log"
pg_ctl -l "$LOGS/postgres.log" start
psql -h localhost --dbname="local-development-cdn" -f /app/docker/cdn-broker-schema.sql
psql -h localhost --dbname="local-development-domain" -f /app/docker/domain-broker-schema.sql
)
fi
if ! pgrep -x pebble > /dev/null; then
echo "Starting Pebble"
(
cd /
PEBBLE_VA_ALWAYS_VALID=1 PEBBLE_WFE_NONCEREJECT=0 pebble \
-config="/test/config/pebble-config.json" \
-dnsserver="127.0.0.1:8053" \
-strict \
> "$LOGS/pebble.log" 2>&1 &
)
fi
if ! pgrep -x redis-server > /dev/null; then
echo "Starting Redis"
(
cd /app
redis-server tests/redis.conf \
> "$LOGS/redis.log" 2>&1 &
)
fi
if ! pgrep -f 'python -m smtpd' > /dev/null; then
echo "Starting fake smtpd"
(
python -m smtpd -n -c DebuggingServer localhost:1025 \
>> "$LOGS/smtpd.log" 2>&1 &
)
fi
|
n = 20
def calculate_sum_of_primes(n):
result = 0
if n >= 2:
primes = [2]
for i in range(3, n + 1, 2):
for j in primes:
if i % j == 0:
break
else:
primes.append(i)
result = sum(primes)
return result
print(calculate_sum_of_primes(n)) |
#!/bin/bash
# Resolve symbolic links and set the absolute path to $SOURCE variable
SOURCE="$(readlink -f "$0")"
# Set the necessary environment variables by sourcing the files ./files/common and ./out/common
source "$(dirname "$SOURCE")/files/common"
source "$(dirname "$SOURCE")/out/common"
# Change the directory to the resolved path stored in the $DIR variable
DIR="$(dirname "$SOURCE")"
cd "$DIR"
# Additional commands or operations as required for the script
# ... |
#!/bin/sh
set -e
BASEDIR=$(dirname "$0")
if [ "$1" = "-f" -o "$1" = "--force" ]; then
echo "Forced to replace Vagrantfile"
CMD="cp"
else
echo "Generate Vagrantfile if not exist"
CMD="cp -n"
fi
$CMD $BASEDIR/Vagrantfile.bak $BASEDIR/Vagrantfile
# 关键点
# 1. 用到了 set -e 见shell根目录 set -e.md
# 2. 用到了 $(dirname "$0") $0: 脚本本身文件名称 dirname见根目录 dirname.md
# echo $0
# echo $BASEDIR
# 打印结果如下:
# runme.sh
# .
# 3. 最终就是根据脚本执行时传入的参数 决定是否强制复制替换文件 最终命令:
# cp ./Vagrantfile.bak ./Vagrantfile
# 4. cp -n
# -n ,-no-clobber : 不要覆盖已存在的文件(使前面的 -i 选项失效)
|
<reponame>drorkrief/merkazakdusha2<gh_stars>0
const jwt = require('jsonwebtoken');
const secret = 'ys0$-@d';
function createToken(user){
const validTimeSec = 20*60; // == token expire after 20 minutes
const expirationDate = Date.now() / 1000 + validTimeSec ;
const token = jwt.sign({email : user.email , exp : expirationDate}, secret );
return token;
}
module.exports = {
createToken: createToken ,
secret : secret
};
|
<filename>jsource/ideal/development/types/concrete_type_action.java<gh_stars>0
/*
* Copyright 2014-2020 The Ideal Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE file or at
* https://developers.google.com/open-source/licenses/bsd
*/
package ideal.development.types;
import ideal.library.elements.*;
import ideal.runtime.elements.*;
import ideal.development.elements.*;
public class concrete_type_action extends type_action {
private final type the_type;
concrete_type_action(type the_type, position source) {
super(source);
assert the_type != null;
this.the_type = the_type;
// This may happen if we are creating a concrete type action in the import.
// TODO: cleaner way to handle this.
if (false && the_type == core_types.error_type()) {
utilities.panic("Error action that's not an error_signal");
}
}
@Override
public type get_type() {
return the_type;
}
}
|
#!/bin/bash
find . -name "*.pyc" -exec rm {} \;
coverage run -p --source=tests,dmyplant -m unittest
if [ "$?" = "0" ]; then
coverage combine
echo -e "\n\n================================================"
echo "Test Coverage"
coverage report
echo -e "\nrun \"coverage html\" for full report"
echo -e "\n"
fi
|
import path from 'path'
import webpack from 'webpack'
import {ModuleMapPlugin, AssetListPlugin} from '@karma.run/webpack'
export default (mode: string) =>
//@ts-ignore
({
entry: {
client: './src/client/index.ts',
worker: './src/worker/index.ts'
},
output: {
filename: mode === 'production' ? '[name].[chunkhash].js' : '[name].js',
path: path.resolve(__dirname, 'assets'),
publicPath: mode === 'production' ? '/assets' : 'http://localhost:5001/'
},
resolve: {
extensions: ['.ts', '.tsx', '.js'],
alias: {
'react-dom': '@hot-loader/react-dom'
}
},
module: {
rules: [
{
test: /\.tsx?$/,
exclude: /node_modules/,
loader: 'babel-loader'
}
]
},
devtool: mode === 'production' ? 'source-map' : 'cheap-module-source-map',
plugins: [
new ModuleMapPlugin({filename: './dist/moduleMap.json'}),
new AssetListPlugin({filename: './dist/assetList.json'})
],
devServer: {
writeToDisk: true,
public: 'http://localhost:5001/',
publicPath: 'http://localhost:5001/',
host: '0.0.0.0',
port: 5001,
headers: {'Access-Control-Allow-Origin': '*'}
}
} as webpack.Configuration)
|
<filename>components/sublanding/ArticleCard.tsx
import { ArticleGuide } from 'components/context/ProductSubLandingContext'
import { Label } from '@primer/components'
type Props = {
card: ArticleGuide
typeLabel: string
}
export const ArticleCard = ({ card, typeLabel }: Props) => {
return (
<div data-testid="article-card" className="d-flex col-12 col-md-4 pr-0 pr-md-6 pr-lg-8">
<a className="no-underline d-flex flex-column py-3 border-bottom" href={card.href}>
<h4 className="h4 color-fg-default mb-1" dangerouslySetInnerHTML={{ __html: card.title }} />
<div className="h6 text-uppercase" data-testid="article-card-type">
{typeLabel}
</div>
<p className="color-fg-muted my-3" dangerouslySetInnerHTML={{ __html: card.intro }} />
{card.topics.length > 0 && (
<div>
{card.topics.map((topic) => {
return (
<Label
key={topic}
data-testid="article-card-topic"
variant="small"
sx={{ bg: 'accent.emphasis', mr: 1 }}
>
{topic}
</Label>
)
})}
</div>
)}
</a>
</div>
)
}
|
# ! download
localdir=/cygdrive/c/Users/duongdb/Documents/FH_OCT_08172021/Stylegan2
mkdir $localdir
maindir=/data/duongdb/FH_OCT_08172021/Stylegan2
for mod in 00000-Tf256RmFold3+EyePos+FH-paper256-kimg3000-ada-target0.8-resumeffhq256-divlabel4
do
mkdir $localdir/$mod
scp duongdb@helix.nih.gov:$maindir/$mod/*png $localdir/$mod
done
|
#!/usr/bin/env bash
# Edit this file to use your 16-color configuration. The colors are chosen in the style of a
# 16-color terminal.
# black
color0="#282a2e"
color8="#373b41"
# red
color1="#a54242"
color9="#cc6666"
# green
color2="#8c9440"
colorA="#b5bd68"
# yellow
color3="#de935f"
colorB="#f0c674"
# blue
color4="#5f819d"
colorC="#81a2be"
# magenta
color5="#85678f"
colorD="#b294bb"
# cyan
color6="#5e8d87"
colorE="#8abeb7"
# white
color7="#707880"
colorF="#c5c8c6"
|
package uk.co.cpascoe.huffman;
import java.lang.*;
import java.util.*;
public abstract class Node implements Comparable<Node> {
public abstract int getFrequency();
public abstract byte decode(BitManager bm);
public void print(StringBuilder str) {
this.print(str, "");
}
public abstract void generateEncoding(HuffmanEncodingTables encoding, List<Byte> currentEncoding);
public abstract void print(StringBuilder str, String line);
public int compareTo(Node other) {
return this.getFrequency() - other.getFrequency();
}
}
|
#!/bin/bash
set -e
# Remove the local sitespeed-result dir and node modules to start clean
rm -fR sitespeed-result
# Login early
docker login
# Super simple release script for sitespeed.io
# Lets use it it for now and make it better over time :)
# You need np for this to work
# npm install --global np
np $* --no-yarn --branch main
PACKAGE_VERSION=$(node -e 'console.log(require("./package").version)')
docker build --no-cache -t sitespeedio/sitespeed.io:$PACKAGE_VERSION -t sitespeedio/sitespeed.io:latest .
docker push sitespeedio/sitespeed.io:$PACKAGE_VERSION
docker push sitespeedio/sitespeed.io:latest
docker build --no-cache -t sitespeedio/sitespeed.io:$PACKAGE_VERSION-slim --file Dockerfile-slim .
docker push sitespeedio/sitespeed.io:$PACKAGE_VERSION-slim
docker build -t sitespeedio/sitespeed.io:$PACKAGE_VERSION-plus1 --build-arg version=$PACKAGE_VERSION --file docker/Dockerfile-plus1 .
docker push sitespeedio/sitespeed.io:$PACKAGE_VERSION-plus1
docker build -t sitespeedio/sitespeed.io:$PACKAGE_VERSION-webpagetest --build-arg version=$PACKAGE_VERSION --file docker/Dockerfile-webpagetest .
docker push sitespeedio/sitespeed.io:$PACKAGE_VERSION-webpagetest
# Update to latest version in the docs
bin/sitespeed.js --version | tr -d '\n' > docs/_includes/version/sitespeed.io.txt
# Generate the help for the docs
bin/sitespeed.js --help > docs/documentation/sitespeed.io/configuration/config.md
# Generate friendly names from code
node release/friendlyNames.js > docs/documentation/sitespeed.io/configure-html/friendlynames.md
node release/friendlyNamesBudget.js > docs/documentation/sitespeed.io/performance-budget/friendlynames.md
# Generate the RSS feeds
node release/feed.js |
<filename>2021-05-09/打卡签到/pages/code/code.js
// pages/code/code.js
var Api = require('../../utils/api.js')
Page({
data:{},
onLoad:function(options){
// 页面初始化 options为页面跳转所带来的参数
this.setData({
encrypt: options.encrypt
})
wx.getStorage({
key: 'token',
success: (res) => {
// success
this.setData({
token: res.data
})
},
fail: function() {
wx.redirectTo({ url: '/pages/login/login' })
}
})
},
onReady:function(){
// 页面渲染完成
this.getLocation()
},
onShow:function(){
// 页面显示
},
onHide:function(){
// 页面隐藏
},
onUnload:function(){
// 页面关闭
},
getLocation: function(cb) {
wx.getLocation({
type: 'wgs84', // 默认为 wgs84 返回 gps 坐标,gcj02 返回可用于 wx.openLocation 的坐标
success: (res) => {
// success
this.punch(res.latitude, res.longitude)
wx.showToast({
title: '正在打卡',
icon: 'loading',
duration: 2000
})
},
fail: function() {
wx.redirectTo({
url: '/pages/fail/fail',
})
}
})
},
punch: function (latitude, longitude) {
var now = new Date()
wx.request({
url: Api.punch + 'encrypt=' + this.data.encrypt + '&token=' + this.data.token,
data: {
latitude: latitude,
longitude: longitude,
time: now
},
method: 'POST',
success: (res) => {
// success
if (res.statusCode == 201) {
wx.navigateTo({
url: '/pages/success/success?place=' + res.data.place + '&time=' + res.data.h_m_s + '&status=' + res.data.owner.status,
})
}
else if (res.statusCode == 403) {
wx.navigateTo({
url: '/pages/fail/fail?info=' + '超出范围',
})
}
else {
wx.navigateTo({
url: '/pages/fail/fail',
})
}
}
})
},
}) |
<filename>encoding/encode.go
// Copyright (c) 2019 Cisco and/or its affiliates.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Mapping funtions between objects and key-value pairs.
package encoding
import (
"encoding/json"
"errors"
"fmt"
"reflect"
"strings"
)
var ErrFirstSlash = errors.New("Key must start with /")
var ErrWrongFieldType = errors.New("Provided field is of wrong type")
var ErrWrongFieldName = errors.New("Provided field does not exist")
var ErrNotImplemented = errors.New("Not implemented")
var ErrUnsupportedType = errors.New("Object type not supported")
var ErrFindPathPastObject = errors.New("Provided path goes past an encoded object")
var ErrFindObjectNotFound = errors.New("Object was not found")
var ErrFindKeyNotFound = errors.New("Key was not found in map")
var ErrFindKeyInvalid = errors.New("Invalid key for this object")
var ErrFindPathNotFound = errors.New("Object not found at specified path")
var ErrFindSetNoExists = errors.New("Cannot set non existent object")
var ErrFindSetWrongType = errors.New("The provided object is of wrong type")
var ErrScalarType = errors.New("Cannot recursively store scalar type")
var ErrTagFirstSlash = errors.New("Structure field tag cannot start with /")
var ErrFindKeyWrongType = errors.New("Provided map key field is of wrong type")
var ErrNotMapIndex = errors.New("Specified object is not a map index")
// State storing keys and values before they get stored for one or multiple objects
type encodeState struct {
kvs map[string]string
}
// State representing an object as well as its path in some parent opbjects.
// This path is not absolute, and this struct does not keep memory of
// the path root.
type objectPath struct {
// A value pointing to the current object.
// It may be non-addressable or contain a Zero value if the object does not exist currently.
value reflect.Value
// The type of the current object, used to go through the structure hierarchy even when
// no value is found.
vtype reflect.Type
// The key path used to reach the current object, but not referring to the
// object path itself (see 'format').
keypath []string
// The set of specific fields (attributes names, keys and indexes) used
// to arrive to this object.
fields []interface{}
// The key format to be used to encode children of this object
// (attributes, map values, array values, etc...).
// e.g. [], This object is stored as a JSON blob.
// e.g. [""], Attributes of the struct are stored directly following the struct path.
// e.g. [ <prefix>... , ""], <prefix> will prefix all attribute paths.
// e.g. [ <prefix>... , "{key}", <suffix>... ], map values are stored at "prefix/<key>" using <suffix> format.
// e.g. [ <prefix>... , "{index}", <suffix>... ], array or slice values are stored at "prefix/<index>" using <suffix> format.
format []string
// When setting a value, traversing a map will make a value non-addressible.
// We have to remember which is the last crossed map, such as to make the traversal addressable if necessary.
lastMapIndirection *objectPath
}
type findOptions struct {
// Creates the searched object if it does not exists yet.
Create bool
// When non-nil, sets the searched object by serializing the string into the searched object.
SetValue *string
// If Create and SetValue are set, and the provided string fails unmarshaling,
// the default value is used instead.
IgnoreUnmarshalFailure bool
// When non-nil, sets the searched object with the given value.
SetObject interface{}
// Next time a map entry is crossed, it will be made addressable for the rest of the way
MakeMapAddressable bool
}
// Returns the format
func getStructFieldFormat(f reflect.StructField) ([]string, error) {
tag := f.Tag.Get("kvs")
if tag == "" {
return []string{f.Name}, nil
} else if tag[:1] == "/" {
return nil, ErrTagFirstSlash
} else {
return strings.Split(tag, "/"), nil
}
}
func serializeValue(v reflect.Value) (string, error) {
if v.Type().Kind() == reflect.String {
return v.Interface().(string), nil
}
arr, err := json.Marshal(v.Interface())
if err != nil {
return "", err
}
return string(arr), nil
}
func unserializeValue(val string, t reflect.Type) (reflect.Value, error) {
v := reflect.New(t).Elem()
if t.Kind() == reflect.String {
v.Set(reflect.ValueOf(val))
return v, nil
}
err := json.Unmarshal([]byte(val), v.Addr().Interface())
if err != nil {
return reflect.Zero(t), err
}
return v, nil
}
func serializeMapKey(v reflect.Value) (string, error) {
if v.Type().Kind() == reflect.String {
return v.Interface().(string), nil
}
arr, err := json.Marshal(v.Interface())
if err != nil {
return "<ERROR>", err
}
return string(arr), nil
}
func unserializeMapKey(s string, t reflect.Type) (reflect.Value, error) {
v := reflect.New(t).Elem()
if t.Kind() == reflect.String {
v.Set(reflect.ValueOf(s))
return v, nil
}
err := json.Unmarshal([]byte(s), v.Addr().Interface())
if err != nil {
return reflect.Zero(t), err
}
return v, nil
}
func (state *encodeState) encodeStruct(o objectPath) error {
v := o.value
for i := 0; i < v.NumField(); i++ {
f := v.Type().Field(i)
if f.PkgPath != "" {
// Attribute is not exported
continue
}
format, err := getStructFieldFormat(f)
if err != nil {
return err
}
o.value = v.Field(i)
o.format = format
err = state.encode(o)
if err != nil {
return err
}
}
return nil
}
func (state *encodeState) encodeMap(o objectPath) error {
if len(o.format) == 0 || o.format[0] != "{key}" {
return fmt.Errorf("Map format must contain a '{key}' element")
}
o.format = o.format[1:] //Remove "{key}" from format
v := o.value
for _, k := range v.MapKeys() {
key_string, err := serializeMapKey(k)
if err != nil {
return err
}
o.value = reflect.Indirect(v.MapIndex(k))
o.keypath = append(o.keypath, key_string)
err = state.encode(o)
if err != nil {
return err
}
o.keypath = o.keypath[:len(o.keypath)-1]
}
return nil
}
func (state *encodeState) encodeJson(o objectPath) error {
key := strings.Join(o.keypath, "/")
if v, ok := state.kvs[key]; ok {
return fmt.Errorf("Key '%s' is already used by value '%s'", key, v)
}
val, err := serializeValue(o.value)
if err != nil {
return err
}
state.kvs[key] = val
return nil
}
func (state *encodeState) encode(o objectPath) error {
if o.value.Type().Kind() == reflect.Ptr {
o.value = o.value.Elem()
return state.encode(o)
}
for len(o.format) != 0 {
if o.format[0] == "" || o.format[0] == "{key}" || o.format[0] == "{index}" {
break
}
o.keypath = append(o.keypath, o.format[0])
o.format = o.format[1:]
}
if len(o.format) == 0 {
// This element is stored as blob
return state.encodeJson(o)
}
switch o.value.Type().Kind() {
case reflect.Struct:
return state.encodeStruct(o)
case reflect.Map:
return state.encodeMap(o)
case reflect.Slice:
return ErrNotImplemented
case reflect.Array:
return ErrNotImplemented
case reflect.Chan, reflect.Func, reflect.Interface, reflect.Invalid, reflect.UnsafePointer:
return ErrUnsupportedType
default:
return ErrScalarType
}
}
func findByFieldsMap(o objectPath, fields []interface{}, opt findOptions) (objectPath, error) {
o2 := o
o.lastMapIndirection = &o2
if len(o.format) == 0 || o.format[0] != "{key}" {
return o, fmt.Errorf("Map format must contain a '{key}' element")
}
o.format = o.format[1:] //Remove "{key}" from format
key_type := o.vtype.Key()
key := reflect.ValueOf(fields[0])
if key.Type() != key_type {
return o, ErrFindKeyWrongType
}
keystr, err := serializeMapKey(key)
if err != nil {
return o, err
}
m := o.value
if o.value.IsValid() {
if o.value.IsNil() && opt.Create {
if !o.value.CanSet() {
return findByFieldsRevertAddressable(o, fields, opt)
}
n := reflect.MakeMap(o.vtype) // Create new map
o.value.Set(n) // Set the pointer value to the current value
m = o.value
}
// Set object to inner object
// Note: Use of indirect here is probably weird.
// It is used to dereference pointers whenever the map stores pointers,
// such that the object is addressible.
// But it would also work if it does not.
val := o.value.MapIndex(key)
if val.IsValid() {
o.value = val
} else if opt.Create {
val = reflect.New(o.vtype.Elem()) // Get pointer to a new value
o.value.SetMapIndex(key, val.Elem()) // Set the value in the map
o.value = o.value.MapIndex(key) // Get the value
} else {
o.value = val
}
}
o.vtype = o.vtype.Elem() // Get type of the element
o.keypath = append(o.keypath, keystr) // Add object key to keypath
o.fields = append(o.fields, key.Interface()) // Set field to key object
if opt.MakeMapAddressable {
// Note that MakeMapAddressable requires the value to exist. We do not check here.
val := reflect.New(o.vtype)
val.Elem().Set(o.value) // Make a copy of the current value
o.value = val.Elem()
opt.MakeMapAddressable = false
o, err = findByFields(o, fields[1:], opt) //Iterate on the addressable value
if err != nil {
return o, err
}
m.SetMapIndex(key, val.Elem()) // Set the addressable value in the map
return o, err
} else {
// Iterate within the object
return findByFields(o, fields[1:], opt)
}
}
func findByFieldsStruct(o objectPath, fields []interface{}, opt findOptions) (objectPath, error) {
name, ok := fields[0].(string)
if !ok {
return o, ErrWrongFieldType
}
fields = fields[1:]
f, ok := o.vtype.FieldByName(name)
if !ok {
return o, ErrWrongFieldName
}
format, err := getStructFieldFormat(f)
if err != nil {
return o, err
}
if o.value.IsValid() {
o.value = o.value.FieldByIndex(f.Index)
}
o.vtype = o.vtype.FieldByIndex(f.Index).Type
o.format = format
o.fields = append(o.fields, name)
return findByFields(o, fields, opt)
}
func findByFieldsPtr(o objectPath, fields []interface{}, opt findOptions) (objectPath, error) {
if o.value.IsValid() { // Value represents an actual pointer
if o.value.Elem().IsValid() {
// Pointer contains a valide value
o.value = o.value.Elem() // Dereference
} else if opt.Create {
// Create object
if !o.value.CanSet() {
// But can't set !
return findByFieldsRevertAddressable(o, fields, opt) // Revert to last addressable
}
n := reflect.New(o.vtype.Elem()) // Get pointer to a new value
o.value.Set(n) // Set the pointer value to the current value
o.value = o.value.Elem() // Dereference
} else {
o.value = o.value.Elem() // Just dereference
}
}
o.vtype = o.vtype.Elem() // Dereference type
return findByFields(o, fields, opt)
}
func findByFieldsFormat(o objectPath, fields []interface{}) (objectPath, []interface{}, error) {
for len(o.format) != 0 {
if o.format[0] == "" && len(o.format) == 1 {
// This object is supposed to be encoded within the given key path
break
} else if o.format[0] == "{key}" || o.format[0] == "{index}" {
//We stop here and can format a map or list element
break
} else {
// Just stack up the format in the keypath
o.keypath = append(o.keypath, o.format[0])
o.format = o.format[1:]
}
}
return o, fields, nil
}
func findByFieldsRevertAddressable(o objectPath, fields []interface{}, opt findOptions) (objectPath, error) {
if o.lastMapIndirection == nil {
return o, fmt.Errorf("Object is not addressable")
}
fields = append(o.fields[len(o.lastMapIndirection.fields):], fields...) // Reconstruct the fields before they were consumed
o = *o.lastMapIndirection
opt.MakeMapAddressable = true
return findByFieldsMap(o, fields, opt)
}
func findByFieldsSetMaybe(o objectPath, fields []interface{}, opt findOptions) (objectPath, error) {
// If no set is needed, return ok
if opt.SetObject == nil && opt.SetValue == nil {
return o, nil
}
// Can only set if the value exists (opt.Create should be set if intent is to create too)
if !o.value.IsValid() {
return o, ErrFindSetNoExists
}
// If object cannot be set, try to rollback
if !o.value.CanSet() {
return findByFieldsRevertAddressable(o, fields, opt)
}
var value reflect.Value
var err error
// If set by string, parse the string
if opt.SetObject == nil {
value, err = unserializeValue(*opt.SetValue, o.vtype)
if err != nil {
if opt.IgnoreUnmarshalFailure {
value = reflect.New(o.vtype)
} else {
return o, err
}
}
} else {
value = reflect.ValueOf(opt.SetObject)
}
// Check the type
if value.Type() != o.vtype {
return o, ErrFindSetWrongType
}
// Set the value
o.value.Set(value)
return o, nil
}
// Goes directely down an object
func findByFields(o objectPath, fields []interface{}, opt findOptions) (objectPath, error) {
// First we always dereference pointers, even though the value may become invalid
if o.vtype.Kind() == reflect.Ptr {
return findByFieldsPtr(o, fields, opt)
}
o, fields, err := findByFieldsFormat(o, fields)
if err != nil {
return o, err
}
// Now we have removed all leading objects
if len(fields) == 0 {
// This is the end of the journey, buddy.
return findByFieldsSetMaybe(o, fields, opt)
}
if len(o.format) == 0 {
// The object is supposed to be encoded as a blob
// NOTE: It would make sense to check if fields correspond to an inner object, and possibly
// return it with the reduced key.
// For now let's just return an error.
if len(fields) != 0 {
return o, ErrFindPathPastObject
}
}
switch o.vtype.Kind() {
case reflect.Struct:
return findByFieldsStruct(o, fields, opt)
case reflect.Map:
return findByFieldsMap(o, fields, opt)
case reflect.Slice:
return o, ErrNotImplemented
case reflect.Array:
return o, ErrNotImplemented
case reflect.Chan, reflect.Func, reflect.Interface, reflect.Invalid, reflect.UnsafePointer:
return o, ErrUnsupportedType
default:
return o, ErrScalarType
}
}
// Finds a sub-object based on the path of successive fields.
//
// Returns the found object, its path, and possibly an error.
func FindByFields(object interface{}, format string, fields []interface{}) (interface{}, string, error) {
o := objectPath{
value: reflect.ValueOf(object),
vtype: reflect.TypeOf(object),
format: strings.Split(format, "/"),
}
o, err := findByFields(o, fields, findOptions{})
if err != nil {
return nil, "", err
}
if !o.value.IsValid() {
return nil, "", ErrFindKeyNotFound
}
if !o.value.CanAddr() {
// Returning a copy if the object is non-addressable
return o.value.Interface(), strings.Join(append(o.keypath, o.format...), "/"), nil
}
// If the value is addressable, return a pointer
return o.value.Addr().Interface(), strings.Join(append(o.keypath, o.format...), "/"), nil
}
// Encode part of the object stored at position key.
// The subfield is identified by a list of fields.
// Structure attributes are identified by name (as a string).
// Slice indexes are identified with integers.
// Map keys are identified by given an object of the same type than the map key.
func Encode(format string, object interface{}, fields ...interface{}) (map[string]string, error) {
formatpath := strings.Split(format, "/")
o := objectPath{
value: reflect.ValueOf(object),
vtype: reflect.TypeOf(object),
format: formatpath,
keypath: []string{},
}
o, err := findByFields(o, fields, findOptions{})
if err != nil {
return nil, err
}
if !o.value.IsValid() {
return nil, ErrFindObjectNotFound
}
state := &encodeState{
kvs: make(map[string]string),
}
err = state.encode(o)
if err != nil {
return nil, err
}
return state.kvs, nil
}
// Find sub-object from struct per its key
// Returns the found object, the consumed key path
func findByKeyOneStruct(o objectPath, path []string, opt findOptions) (objectPath, error) {
if len(o.format) != 1 && o.format[0] != "" {
return o, fmt.Errorf("Struct object expect [\"\"] format")
}
v := o.value
t := o.vtype
for i := 0; i < t.NumField(); i++ {
f := t.Field(i)
if f.PkgPath != "" {
// Attribute is not exported
continue
}
format, err := getStructFieldFormat(f)
if err != nil {
return o, err
}
if v.IsValid() {
o.value = v.Field(i) // Get field if value exists
}
o.vtype = f.Type // Get attribute type
o.format = format
// First see if the format corresponds
o2, path2, err := findByKeyFormat(o, path)
if err == nil {
// We can fully look in there
o2.fields = append(o2.fields, f.Name)
return findByKey(o2, path2, opt)
}
// Let's continue searching
}
return o, ErrFindPathNotFound
}
// Finds a sub-object inside a map with the provided object format (e.g. {key}, {key}/, {key}/name).
func findByKeyOneMap(o objectPath, path []string, opt findOptions) (objectPath, error) {
if o.value.IsValid() && o.value.IsNil() && opt.Create && !o.value.CanSet() {
// Create MAP if necessary
return findByKeyRevertAddressable(o, path, opt)
}
o2 := o
o.lastMapIndirection = &o2
if len(o.format) == 0 || o.format[0] != "{key}" {
return o, fmt.Errorf("Map format must contain a '{key}' element")
}
o.format = o.format[1:] // Consume {key} format
// Consume key
keyvalue, err := unserializeMapKey(path[0], o.vtype.Key())
if err != nil {
return o, err
}
m := o.value
if o.value.IsValid() {
if o.value.IsNil() && opt.Create {
n := reflect.MakeMap(o.vtype) // Create new map
o.value.Set(n) // Set the pointer value to the current value
m = o.value
}
// Set object to inner object
// Note: Use of indirect here is probably weird.
// It is used to dereference pointers whenever the map stores pointers,
// such that the object is addressible.
// But it would also work if it does not.
val := o.value.MapIndex(keyvalue)
if val.IsValid() {
o.value = val
} else if opt.Create {
val = reflect.New(o.vtype.Elem()) // Get pointer to a new value
o.value.SetMapIndex(keyvalue, val.Elem()) // Set the value in the map
o.value = o.value.MapIndex(keyvalue) // Get the value
} else {
o.value = val
}
}
o.fields = append(o.fields, keyvalue.Interface()) // Set field to key object
o.vtype = o.vtype.Elem() // Get the map value type
o.keypath = append(o.keypath, path[0]) // Add object key to keypath
if opt.MakeMapAddressable {
// Note that MakeMapAddressable requires the value to exist. We do not check here.
val := reflect.New(o.vtype)
val.Elem().Set(o.value) // Make a copy of the current value
o.value = val.Elem()
opt.MakeMapAddressable = false
o, err := findByKey(o, path[1:], opt) //Iterate on the addressable value
if err != nil {
return o, err
}
m.SetMapIndex(keyvalue, val.Elem()) // Set the addressable value in the map
return o, err
} else {
// Iterate within the object
return findByKey(o, path[1:], opt)
}
}
func findByKeyPtr(o objectPath, path []string, opt findOptions) (objectPath, error) {
if o.value.IsValid() {
if o.value.Elem().IsValid() {
o.value = o.value.Elem()
} else if opt.Create {
if !o.value.CanSet() {
return findByKeyRevertAddressable(o, path, opt)
}
n := reflect.New(o.vtype.Elem()) // Get pointer to a new value
o.value.Set(n) // Set the pointer value to the current value
o.value = o.value.Elem() // Dereference
} else {
o.value = o.value.Elem()
}
}
o.vtype = o.vtype.Elem()
return findByKey(o, path, opt)
}
func findByKeyFormat(o objectPath, path []string) (objectPath, []string, error) {
for len(o.format) != 0 {
if o.format[0] == "" && len(o.format) == 1 {
// This object is supposed to be encoded within the given key path
break
} else if o.format[0] == "{key}" || o.format[0] == "{index}" {
//We stop here and can format a map, array or slice element
break
} else if len(path) == 0 {
// We are going to stop now
break
} else if o.format[0] != path[0] {
// Provided path does not match the expected format
return o, path, ErrFindPathNotFound
} else {
// Pile-up key and continue
o.keypath = append(o.keypath, path[0])
path = path[1:]
o.format = o.format[1:]
}
}
return o, path, nil
}
// When some object must be changed but is not addressable, we revert to the last addressable object
// and restart while asking for the rest of the process to be addressable.
func findByKeyRevertAddressable(o objectPath, path []string, opt findOptions) (objectPath, error) {
if o.lastMapIndirection == nil {
return o, fmt.Errorf("Object is not addressable")
}
path = append(o.keypath[len(o.lastMapIndirection.keypath):], path...) // Reconstruct the keypath before it was consumed
o = *o.lastMapIndirection
opt.MakeMapAddressable = true
return findByKeyOneMap(o, path, opt)
}
func findByKeySetMaybe(o objectPath, path []string, opt findOptions) (objectPath, error) {
// If no set is needed, return ok
if opt.SetObject == nil && opt.SetValue == nil {
return o, nil
}
// Can only set if the value exists (opt.Create should be set if intent is to create too)
if !o.value.IsValid() {
return o, ErrFindSetNoExists
}
// If object cannot be set, try to rollback
if !o.value.CanSet() {
return findByKeyRevertAddressable(o, path, opt)
}
var value reflect.Value
var err error
// If set by string, parse the string
if opt.SetObject == nil {
value, err = unserializeValue(*opt.SetValue, o.vtype)
if err != nil {
if opt.IgnoreUnmarshalFailure {
value = reflect.New(o.vtype).Elem()
} else {
return o, err
}
}
} else {
value = reflect.ValueOf(opt.SetObject).Elem()
}
// Check the type
if value.Type() != o.vtype {
return o, ErrFindSetWrongType
}
// Set the value
o.value.Set(value)
return o, nil
}
func findByKey(o objectPath, path []string, opt findOptions) (objectPath, error) {
if o.vtype.Kind() == reflect.Ptr {
// Let's first dereference (Before actually parsing the keys)
return findByKeyPtr(o, path, opt)
}
// Go through format prefixing element (before "", "{key}" or "{index}")
o, path, err := findByKeyFormat(o, path)
if err != nil {
return o, err
}
if len(o.format) == 0 {
// The object is supposed to be encoded as a blob
if len(path) != 0 {
// Path is too specific and therefore does not correspond to an encoded object.
return o, ErrFindPathPastObject
}
return findByKeySetMaybe(o, path, opt)
}
if len(path) == 0 || (path[0] == "" && len(path) != 1) {
// We reached the end of the requested path but the object expects more.
return o, ErrFindKeyInvalid
}
if path[0] == "" {
return findByKeySetMaybe(o, path, opt)
}
switch o.vtype.Kind() {
case reflect.Struct:
return findByKeyOneStruct(o, path, opt)
case reflect.Map:
return findByKeyOneMap(o, path, opt)
case reflect.Slice:
return o, ErrNotImplemented
case reflect.Array:
return o, ErrNotImplemented
case reflect.Chan, reflect.Func, reflect.Interface, reflect.Invalid, reflect.UnsafePointer:
return o, ErrUnsupportedType
default:
return o, ErrScalarType
}
}
// FindByKey returns a sub-object by following the provided path.
//
// 'format' is the provided object key formatting string,
// equivalent to the attribute 'kvs' tags from struct fields.
// For most types, providing a format is optional.
//
// Note that the provided path should include the format, or specific values
// used by the format. For instance, if the format is "here/{key}/there/", then
// the path should start with "here/<some-key-value>/there/".
func FindByKey(o interface{}, format string, path string) (interface{}, []interface{}, error) {
op := objectPath{
value: reflect.ValueOf(o),
vtype: reflect.TypeOf(o),
format: strings.Split(format, "/"),
}
op, err := findByKey(op, strings.Split(path, "/"), findOptions{})
if err != nil {
return nil, nil, err
}
if !op.value.IsValid() {
return nil, nil, ErrFindKeyNotFound
}
if !op.value.CanAddr() {
// If the value is not addressable, return a copy
return op.value.Interface(), op.fields, nil
}
return op.value.Addr().Interface(), op.fields, nil
}
// Update transforms a (key,value) into an actually modified object.
//
// Given an object and its format, as well as a (key, value) pair (where key is relative to the object),
// Update modifies the object, returns the field path to the modified sub-object.
func UpdateKeyObject(object interface{}, format string, keypath string, value string) ([]interface{}, error) {
o := objectPath{
value: reflect.ValueOf(object),
vtype: reflect.TypeOf(object),
format: strings.Split(format, "/"),
}
opt := findOptions{
Create: true,
SetValue: &value,
IgnoreUnmarshalFailure: true,
}
o, err := findByKey(o, strings.Split(keypath, "/"), opt)
if err != nil {
return nil, err
}
return o.fields, nil
}
func DeleteKeyObject(object interface{}, format string, keypath string) ([]interface{}, error) {
o := objectPath{
value: reflect.ValueOf(object),
vtype: reflect.TypeOf(object),
format: strings.Split(format, "/"),
}
opt := findOptions{}
path := strings.Split(keypath, "/")
o, err := findByKey(o, path, opt)
if err != nil && err != ErrFindKeyInvalid {
// Getting ErrFindKeyInvalid means the key does not represent an encoded value, which is ok in this case
return nil, err
}
err, _ = DeleteByFields(object, format, o.fields...)
return o.fields, err
}
func SetByFields(object interface{}, format string, value interface{}, fields ...interface{}) error {
o := objectPath{
value: reflect.ValueOf(object),
vtype: reflect.TypeOf(object),
format: strings.Split(format, "/"),
}
opt := findOptions{
Create: true,
SetObject: value,
}
_, err := findByFields(o, fields, opt)
if err != nil {
return err
}
return nil
}
// Deletes an element from a map, which means the last element from the fields
// list must be a key, and the previous fields must reference a map object.
// Returns an error, or nil and the format string of the removed object
func DeleteByFields(object interface{}, format string, fields ...interface{}) (error, string) {
if len(fields) < 1 {
return ErrNotMapIndex, ""
}
o := objectPath{
value: reflect.ValueOf(object),
vtype: reflect.TypeOf(object),
format: strings.Split(format, "/"),
}
opt := findOptions{}
o, err := findByFields(o, fields[0:len(fields)-1], opt)
if err != nil {
return err, ""
}
if o.vtype.Kind() != reflect.Map {
return ErrNotMapIndex, ""
}
o2, err := findByFields(o, fields[len(fields)-1:], opt)
if err != nil {
return err, ""
}
if !o2.value.IsValid() {
return ErrFindObjectNotFound, ""
}
key := reflect.ValueOf(fields[len(fields)-1])
o.value.SetMapIndex(key, reflect.ValueOf(nil))
keypath := strings.Join(o2.keypath, "/")
if len(o2.format) != 0 { //More subkeys
keypath = keypath + "/"
}
return nil, keypath
}
|
import { Component, OnInit, ViewChild, ElementRef } from '@angular/core';
import * as jspdf from 'jspdf';
import * as html2canvas from 'html2canvas';
import { Subscriber, Subscription } from 'rxjs';
import { PrintService } from '../../services/print.service';
@Component({
selector: 'app-print-layout',
templateUrl: './print-layout.component.html',
styleUrls: ['./print-layout.component.css']
})
export class PrintLayoutComponent implements OnInit {
d: Subscription;
@ViewChild('content') content: ElementRef
constructor(private printService: PrintService) {
}
ngOnInit() {
if (this.printService.isExport) {
var pdf = new jspdf('p', 'pt', 'a4');
pdf.addHTML(document.getElementById('printpdf'), function() {
pdf.save('quality.pdf');
});
// html2canvas(document.getElementById('printpdf')).then(canvas => {
// var imgWidth = 208;
// var pageHeight = 295;
// var imgHeight = canvas.height * imgWidth / canvas.width;
// var heightLeft = imgHeight;
// const contentDataURL = canvas.toDataURL('image/png')
// let pdf = new jspdf('p', 'mm', 'a5'); // A4 size page of PDF
// var position = 0;
// pdf.addImage(contentDataURL, 'PNG', 0, position, imgWidth, imgHeight)
// pdf.save('MYPdf.pdf'); // Generated PDF
// // window.close();
// });
}
// } else {
// this.flag = true;
// }
}
}
|
#!/usr/bin/env bash
# WARNING: THIS FILE IS MANAGED IN THE 'BOILERPLATE' REPO AND COPIED TO OTHER REPOSITORIES.
# ONLY EDIT THIS FILE FROM WITHIN THE 'LYFT/BOILERPLATE' REPOSITORY:
#
# TO OPT OUT OF UPDATES, SEE https://github.com/lyft/boilerplate/blob/master/Readme.rst
set -e
echo ""
echo "------------------------------------"
echo " DOCKER BUILD"
echo "------------------------------------"
echo ""
if [ -n "$REGISTRY" ]; then
# Do not push if there are unstaged git changes
CHANGED=$(git status --porcelain)
if [ -n "$CHANGED" ]; then
echo "Please commit git changes before pushing to a registry"
exit 1
fi
fi
GIT_SHA=$(git rev-parse HEAD)
IMAGE_TAG_WITH_SHA="${IMAGE_NAME}:${GIT_SHA}"
RELEASE_SEMVER=$(git describe --tags --exact-match "$GIT_SHA" 2>/dev/null) || true
if [ -n "$RELEASE_SEMVER" ]; then
IMAGE_TAG_WITH_SEMVER="${IMAGE_NAME}:${RELEASE_SEMVER}${IMAGE_TAG_SUFFIX}"
fi
# build the image
docker build -t "$IMAGE_TAG_WITH_SHA" --build-arg IMAGE_TAG="${IMAGE_TAG_WITH_SHA}" .
echo "${IMAGE_TAG_WITH_SHA} built locally."
# if REGISTRY specified, push the images to the remote registy
if [ -n "$REGISTRY" ]; then
if [ -n "${DOCKER_REGISTRY_PASSWORD}" ]; then
docker login --username="$DOCKER_REGISTRY_USERNAME" --password="$DOCKER_REGISTRY_PASSWORD"
fi
docker tag "$IMAGE_TAG_WITH_SHA" "${REGISTRY}/${IMAGE_TAG_WITH_SHA}"
docker push "${REGISTRY}/${IMAGE_TAG_WITH_SHA}"
echo "${REGISTRY}/${IMAGE_TAG_WITH_SHA} pushed to remote."
# If the current commit has a semver tag, also push the images with the semver tag
if [ -n "$RELEASE_SEMVER" ]; then
docker tag "$IMAGE_TAG_WITH_SHA" "${REGISTRY}/${IMAGE_TAG_WITH_SEMVER}"
docker push "${REGISTRY}/${IMAGE_TAG_WITH_SEMVER}"
echo "${REGISTRY}/${IMAGE_TAG_WITH_SEMVER} pushed to remote."
fi
fi
|
#!/bin/bash
if [ "$GIT_HOME" = "" ]; then
GIT_HOME=..
fi
sudo udevil unmount /media/ubuntu-16.04.3-server-amd64.iso
sudo rm -rf $GIT_HOME/isofiles
sudo rm $GIT_HOME/ubuntu-16.04.3-server-amd64.iso
unset GIT_HOME
|
function FibonacciSequence(){
let a = 0, b = 1
console.log(a);
console.log(b);
for(let i = 3; i <= 10; i++){
c = a + b;
console.log(c);
a = b;
b = c;
}
}
FibonacciSequence(); |
# platform = multi_platform_rhel,multi_platform_ol,multi_platform_fedora
. /usr/share/scap-security-guide/remediation_functions
populate var_audispd_remote_server
{{% if product in ["rhel8", "fedora"] %}}
AUDITCONFIG=/etc/audit/audisp-remote.conf
{{% else %}}
AUDITCONFIG=/etc/audisp/audisp-remote.conf
{{% endif %}}
replace_or_append $AUDITCONFIG '^remote_server' "$var_audispd_remote_server" "@CCENUM@"
|
#!/bin/bash
echo "Installing dependencies... Hold on."
echo "Updating system."
apt-get update
echo "Installing packages."
for package in adb curl grep tar openjdk-11-jdk p7zip-full
do
apt-get install -y $package
done
echo "Done installing packages. Cheers."
# Do not edit this with any other OS than linux. It will edit the line endings and mess with interpreter and won't work.
|
import { User } from './User';
export declare class UserProfile {
id: number;
user: User;
profile: number;
}
|
#include<stdio.h>
void print_hello_world(){
printf("Hello World!");
} |
class Cache:
def __init__(self, read_latency, write_latency):
self.read_latency = read_latency
self.write_latency = write_latency
self.cache = {}
def read(self, key):
if key in self.cache:
# Simulate read operation with latency
time_taken = self.read_latency
print(f"Reading '{key}' from cache with latency {time_taken} ms")
return self.cache[key]
else:
# Simulate retrieving from underlying data source and storing in cache
time_taken = self.read_latency
print(f"Retrieving '{key}' from data source with latency {time_taken} ms")
# Assume data retrieval from source and store in cache
value = f"Value for '{key}'"
self.cache[key] = value
print(f"Storing '{key}' in cache with latency {self.write_latency} ms")
return value
def write(self, key, value):
# Simulate storing in cache with latency
time_taken = self.write_latency
print(f"Storing '{key}' in cache with latency {time_taken} ms")
self.cache[key] = value
# Usage demonstration
cache = Cache(read_latency=10, write_latency=5)
print(cache.read("key1")) # Retrieve from data source and store in cache
print(cache.read("key1")) # Retrieve from cache
cache.write("key2", "Value for key2") # Store in cache
print(cache.read("key2")) # Retrieve from cache |
#!/bin/bash
# remediation = bash
# platform = Fedora,Red Hat Enterprise Linux 7,Red Hat Enterprise Linux 8
mkdir -p /etc/audit/rules.d
echo "-a always,exit -F path=/usr/bin/sudo -F auid>=1000 -F auid!=unset -F key=privileged" >> /etc/audit/rules.d/privileged.rules
|
<reponame>tylerchen/foss-qdp-project-v4
/*******************************************************************************
* Copyright (c) 2017-11-09 @author <a href="mailto:<EMAIL>"><NAME></a>.
* All rights reserved.
*
* Contributors:
* <a href="mailto:<EMAIL>"><NAME></a> - initial API and implementation.
* Auto Generate By foreveross.com Quick Deliver Platform.
******************************************************************************/
package com.foreveross.qdp.infra.vo.system.common;
import javax.xml.bind.annotation.XmlRootElement;
import java.io.Serializable;
import java.util.Date;
/**
* 脚本管理 - SysScriptVO
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @version 1.0.0
* auto generate by qdp v3.0.
* @since 2017-11-09
*/
@XmlRootElement(name = "SysScript")
@SuppressWarnings("serial")
public class SysScriptVO implements Serializable {
/**
* 主键
**/
private String id;
/**
* 名称
**/
private String name;
/**
* 代码
**/
private String code;
/**
* 内容
**/
private String content;
/**
* 参数
**/
private String parameter;
/**
* 分类1
**/
private String type1;
/**
* 分类2
**/
private String type2;
/**
* 描述
**/
private String description;
/**
* 创建时间
**/
private Date createTime;
/**
* 更新时间
**/
private Date updateTime;
public SysScriptVO() {
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public String getParameter() {
return parameter;
}
public void setParameter(String parameter) {
this.parameter = parameter;
}
public String getType1() {
return type1;
}
public void setType1(String type1) {
this.type1 = type1;
}
public String getType2() {
return type2;
}
public void setType2(String type2) {
this.type2 = type2;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Date getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
}
|
package org.museautomation.ui.taskinput;
import javafx.scene.*;
import net.christophermerrill.testfx.*;
import org.junit.jupiter.api.*;
import org.museautomation.*;
import org.museautomation.builtins.valuetypes.*;
import org.museautomation.core.context.*;
import org.museautomation.core.project.*;
import org.museautomation.core.task.*;
import org.museautomation.core.task.input.*;
import org.museautomation.core.values.*;
import static org.junit.jupiter.api.Assertions.*;
import java.util.*;
import java.util.concurrent.atomic.*;
/**
* @author <NAME> (see LICENSE.txt for license details)
*/
public class TaskInputValuesEditorTests extends ComponentTest
{
@Test
public void provideInputValues()
{
TaskInput input1 = createInput("name1", new StringValueType(), true, null);
TaskInput input2 = createInput("name2", new StringValueType(), true, ValueSourceConfiguration.forValue("default2"));
TaskInput input3 = createInput("name3", new IntegerValueType(), false, null);
TaskInput input4 = createInput("name4", new BooleanValueType(), false, ValueSourceConfiguration.forValue(true));
TaskInputSet inputs = new TaskInputSet();
inputs.addInput(input1);
inputs.addInput(input2);
inputs.addInput(input3);
inputs.addInput(input4);
// set the inputs
_satisifed.set(true);
_editor.addSatisfactionChangeListener(_listener);
_editor.setInputs(inputs);
waitForUiEvents();
assertFalse(_satisifed.get());
// verify each is displayed
assertTrue(exists(input1.getName()));
assertTrue(exists(input2.getName()));
assertTrue(exists(input3.getName()));
assertTrue(exists(input4.getName()));
// verify not valid
assertFalse(_editor.isSatisfied());
// fill the required fields
fillFieldAndTabAway(lookup(id(TaskInputValueEditorRow.VALUE_FIELD_ID)).nth(0).query(), quoted("val1"));
clickOn(lookup(id(TaskInputValueEditorRow.USE_DEFAULT_ID)).nth(0).queryButton()); // in the second row
// verify valid
assertTrue(_editor.isSatisfied());
assertTrue(_satisifed.get());
// edit a non-required with invalid value
fillFieldAndTabAway(lookup(id(TaskInputValueEditorRow.VALUE_FIELD_ID)).nth(2).query(), quoted("val1"));
assertFalse(_editor.isSatisfied()); // should now be invalid
assertFalse(_satisifed.get());
// edit a non-required with valid value
fillFieldAndTabAway(lookup(id(TaskInputValueEditorRow.VALUE_FIELD_ID)).nth(2).query(), "123");
assertTrue(_editor.isSatisfied()); // should now be valid
assertTrue(_satisifed.get());
// verify the values collected
List<ResolvedTaskInput> resolved_list = _editor.getResolvedInputs();
assertEquals(input1.getName(), resolved_list.get(0).getName());
assertEquals("val1", resolved_list.get(0).getValue());
assertEquals(input2.getName(), resolved_list.get(1).getName());
assertEquals("default2", resolved_list.get(1).getValue());
assertEquals(input3.getName(), resolved_list.get(2).getName());
assertEquals(123L, resolved_list.get(2).getValue());
assertEquals(3, resolved_list.size());
}
private TaskInput createInput(String name, MuseValueType type, boolean required, ValueSourceConfiguration default_val)
{
TaskInput input = new TaskInput(name, type.getId(), required);
input.setDefault(default_val);
return input;
}
@Override
public Node createComponentNode()
{
_editor = new TaskInputValuesEditor(new ProjectExecutionContext(new SimpleProject()));
return _editor.getNode();
}
private TaskInputValuesEditor _editor;
private final AtomicBoolean _satisifed = new AtomicBoolean();
private final TaskInputValuesEditor.InputsSatisfiedListener _listener = (old_value, new_value) -> _satisifed.set(new_value);
} |
<filename>mayan/apps/mayan_statistics/icons.py
from __future__ import absolute_import, unicode_literals
from appearance.classes import Icon
icon_statistics = Icon(driver_name='fontawesome', symbol='sort-numeric-up')
|
# test builtin issubclass
class A:
pass
print(issubclass(A, A))
print(issubclass(A, (A,)))
try:
issubclass(A, 1)
except TypeError:
print('TypeError')
try:
issubclass('a', 1)
except TypeError:
print('TypeError')
|
#include <dirent.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include "parser.h"
#include "standard.h"
// This function will read the input from the user and return it as a char* (array)
char *read_input() {
char *input = NULL;
size_t input_buffer_size = 0;
getline(&input, &input_buffer_size, stdin);
return input;
}
// The will take the users input and parse it into individual
// tokens, ie: the command, arguments, redirect, pipe, etc.
char **parse_input(char *input) {
int input_buffer_size = BUFFER_SIZE;
int index = 0;
char **all_segments = malloc(input_buffer_size * sizeof(char *));
char *current_segment;
current_segment = strtok(input, DELIMITER);
while (current_segment != NULL) {
// Add the current segment to the array of all segments
all_segments[index] = current_segment;
index++;
// Reallocate dynamic memory buffer if needed
if (index >= input_buffer_size) {
input_buffer_size *= 2;
void *temp = realloc(all_segments, input_buffer_size * sizeof(char *));
if (temp == NULL)
free(temp);
all_segments = temp;
}
// Move the current segment to be the next segment with the same delim.
current_segment = strtok(NULL, DELIMITER);
}
// NULL terminated
all_segments[index] = NULL;
return all_segments;
}
// Simple function to sum up the number of arguments given by
// a parsed user's input. (including the command name)
int count_arguments(char **parsed_input) {
int counter = 0;
for (int i=0; parsed_input[i] != NULL; i++)
counter++;
return counter;
}
// Runs the internal command: cd (change directory)
int run_cd(char **parsed_input) {
if (parsed_input[1] == NULL) {
char buffer[BUFFER_SIZE];
if (getcwd(buffer, BUFFER_SIZE) == NULL)
print_error();
printf("%s%s\n", SHELLNAME, buffer);
}
else if (chdir(parsed_input[1]) != 0) {
print_error();
}
return 1;
}
// Runs the internal command: clr (clear screen)
int run_clr() {
system("clear");
return 1;
}
// Runs the internal command: dir (list's the current directory contents)
int run_dir(char **parsed_input) {
DIR *d;
if (parsed_input[1] == NULL)
d = opendir(".");
else
d = opendir(parsed_input[1]);
if (d == NULL) {
print_error();
return 1;
}
struct dirent *dir;
while ((dir = readdir(d)) != NULL) {
printf("\t%s\n", dir->d_name);
}
return 1;
}
// Runs the internal command: environ (list's environment variables)
int run_environ() {
const char* env_variables[BUFFER_SIZE];
int size = 0;
env_variables[size++] = "USER";
env_variables[size++] = getenv("USER");
env_variables[size++] = "HOME";
env_variables[size++] = getenv("HOME");
env_variables[size++] = "SHELL";
env_variables[size++] = getenv("SHELL");
env_variables[size++] = "OS";
env_variables[size++] = getenv("DESKTOP_SESSION");
env_variables[size++] = "PWD";
env_variables[size++] = getenv("PWD");
env_variables[size++] = "USERNAME";
env_variables[size++] = getenv("USERNAME");
env_variables[size++] = "LANG";
env_variables[size++] = getenv("LANG");
int index = 0;
while (index < size) {
printf("\t%s: %s\n", env_variables[index], env_variables[index+1]);
index += 2;
}
return 1;
}
// Runs the internal command: echo (prints all arguments out)
int run_echo(char **parsed_input) {
int index = 1;
while (parsed_input[index]) {
printf("%s ", parsed_input[index]);
index++;
}
printf("\n");
return 1;
}
// Runs the internal command: help
// (gives the user a helpful guide to the crb_shell program)
int run_help() {
printf("crb_shell User Manual:\n");
printf(" Internal Commands:\n");
printf("\tcd <directory>: Change the directory to the given <directory>\n");
printf("\tclr: clear the screen\n");
printf("\tdir: list the contents of the current working directory\n");
printf("\tenviron: list some of the local system environment variables\n");
printf("\techo <arguments...>: print out to given arguments\n");
printf("\thelp: list helpful user information for the crb_shell program\n");
printf("\tpause: pause the shell until the 'enter' key is pressed\n");
printf("\tquit: quit the crb_shell program, and return to calling shell program\n");
printf(" External Commands:\n");
printf("\tAll external commands are supported.\n");
printf(" Input redirects: < or <<\n");
printf("\tUsage: com1 args... < filename: This will use the contents of filename as the input for com1.\n");
printf("\tUsage: com1 args... << filename: This will use the contents of filename as the input for com1.\n");
printf(" Output redirects: > or >>\n");
printf("\tUsage: com1 args... > filename: This will append the output of com1 to filename.\n");
printf("\tUsage: com1 args... >> filename: This will truncate the output of com1 to filename.\n");
printf(" Command piping: |\n");
printf("\tUsage: com1 args... | com2 args...: This will link the output of com1 to the input of com2.\n");
return 1;
}
// Runs the internal command: pause
// (pauses the crb_shell until the 'enter' key is pressed)
int run_pause() {
printf("Press 'enter' to continue");
while (getchar() != '\n');
return 1;
}
// Runs the internal command: quit (quits the crb_shell program)
int run_quit() {
exit(EXIT_SUCCESS);
return 0;
}
|
public class MainActivity extends AppCompatActivity {
private TextView temperatureTextView, forecastTextView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
temperatureTextView = findViewById(R.id.temperatureTextView);
forecastTextView = findViewById(R.id.forecastTextView);
// Make network request
Api.getWeather("Los Angeles", new Callback<Weather>() {
@Override
public void onResponse(@NonNull Call<Weather> call, @NonNull Response<Weather> response) {
// Handle successful response
if (response.isSuccessful()) {
Weather weather = response.body();
temperatureTextView.setText("Current Temperature: " + weather.getCurrentTemperature());
forecastTextView.setText("Forecast: \n"
+ "Day 1: " + weather.getForecastTemp1() + "\n"
+ "Day 2: " + weather.getForecastTemp2() + "\n"
+ "Day 3: " + weather.getForecastTemp3());
}
}
@Override
public void onFailure(Call<Weather> call, Throwable t) {
// Handle error
}
});
}
} |
import pyttsx3
import PySimpleGUI as sg
import wikipedia
from configs.gui import main
engine = pyttsx3.init("sapi5")
newVoiceRate = 100
engine.setProperty('rate',newVoiceRate)
def speak(text):
engine.say(text)
engine.runAndWait()
speak(main()) |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for DSA-3120-1
#
# Security announcement date: 2015-01-06 00:00:00 UTC
# Script generation date: 2017-01-01 21:07:10 UTC
#
# Operating System: Debian 7 (Wheezy)
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - mantis:1.2.18-1
#
# Last versions recommanded by security team:
# - mantis:1.2.18-1+deb7u1
#
# CVE List:
# - CVE-2014-6316
# - CVE-2014-7146
# - CVE-2014-8553
# - CVE-2014-8554
# - CVE-2014-8598
# - CVE-2014-8986
# - CVE-2014-8988
# - CVE-2014-9089
# - CVE-2014-9117
# - CVE-2014-9269
# - CVE-2014-9270
# - CVE-2014-9271
# - CVE-2014-9272
# - CVE-2014-9280
# - CVE-2014-9281
# - CVE-2014-9388
# - CVE-2014-9506
# - CVE-2014-6387
# - CVE-2013-4460
# - CVE-2013-1934
# - CVE-2013-1811
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade mantis=1.2.18-1+deb7u1 -y
|
#!/bin/bash
cd ..
cd common-files
kubectl apply -f flink-configuration-configmap.yaml
kubectl apply -f jobmanager-rest-service.yaml
kubectl apply -f jobmanager-service.yaml
kubectl apply -f experiments-taskmanager.yaml
kubectl apply -f zookeeper-service.yaml
kubectl apply -f zookeeper-deployment.yaml
kubectl apply -f kafka-multi-broker.yaml
helm install prometheus prometheus --repo https://prometheus-community.github.io/helm-charts --values values-prometheus.yaml
helm install grafana grafana --repo https://grafana.github.io/helm-charts --values values-grafana.yaml --set-file dashboards.default.flink-dashboard.json=grafana-dashboard.json --set-file dashboards.default.scaling-dashboard.json=grafana-dashboard-auto.json
kubectl expose deployment prometheus-server --type=LoadBalancer --name=my-external-prometheus
kubectl expose deployment grafana --type=LoadBalancer --name=my-external-grafana
kubectl wait --timeout=3m --for=condition=ready pods --all
kubectl wait --timeout=2m --for=condition=ready statefulset --all
kubectl exec kafka-2 -- /opt/kafka/bin/kafka-topics.sh --create -zookeeper zoo1:2181 --replication-factor 1 --partitions 24 --topic bids_topic
# kubectl expose job flink-jobmanager
cd ..
cd query-1-experiments
kubectl apply -f experiments-jobmanager.yaml
kubectl apply -f workbench-deployment.yaml |
pip3 uninstall rblk
git rm -r dist
git rm -r build
git rm -r rblk.egg-info
rm -r dist
rm -r build
rm -r rblk.egg-info
git add .
git commit -m "remove old build"
|
dconf update
|
#!/bin/bash
echo "---installing awscli---"
export DEBIAN_FRONTEND=noninteractive
apt-get install -y --no-install-recommends python3-setuptools
python3 -m pip install --upgrade pip
pip3 install awscli
echo "---installing awscli done---"
|
<filename>src/dynamic_programming/Boj1463.java
package dynamic_programming;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Arrays;
/**
*
* @author minchoba
* 백준 1463번: 1로 만들기
*
* @see https://www.acmicpc.net/problem/1463/
*
*/
public class Boj1463 {
private static int[] dp;
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int X = Integer.parseInt(br.readLine());
dp = new int[X + 1];
Arrays.fill(dp, -1);
dp[1] = 0;
System.out.println(recursion(X));
}
/**
*
* Top-Down
*
* line 45: x / 3 case
* line 46: x / 2 case
* line 47: x - 1 case
*
* @param x
* @return current case & memoization
*/
private static int recursion(int x) {
if(x == 1) return 0;
if(dp[x] != -1) return dp[x];
int result = Integer.MAX_VALUE;
if(x % 3 == 0) result = Math.min(recursion(x / 3), result);
if(x % 2 == 0) result = Math.min(recursion(x >> 1), result);
if(x >= 1) result = Math.min(recursion(x - 1), result);
return dp[x] = result + 1;
}
}
|
<gh_stars>1-10
"""set of common utilities"""
import os
import sys
import time
import uuid
import zipfile
import datetime
import tempfile
from contextlib import contextmanager
import six
import logging
list_types = (list, tuple)
if sys.version_info.major == 3:
number_type = (int, float)
else:
number_type = (int, float, long)
#----------------------------------------------------------------------
def create_uid():
if six.PY2:
return uuid.uuid4().get_hex()
else:
return uuid.uuid4().hex
#----------------------------------------------------------------------
def _date_handler(obj):
if isinstance(obj, datetime.datetime):
return local_time_to_online(obj)
else:
return obj
#----------------------------------------------------------------------
def local_time_to_online(dt=None):
"""
converts datetime object to a UTC timestamp for AGOL
Inputs:
dt - datetime object
Output:
Long value
"""
if dt is None:
dt = datetime.datetime.now()
is_dst = time.daylight and time.localtime().tm_isdst > 0
utc_offset = (time.altzone if is_dst else time.timezone)
return (time.mktime(dt.timetuple()) * 1000) + (utc_offset *1000)
#----------------------------------------------------------------------
def online_time_to_string(value,timeFormat):
"""
Converts a timestamp to date/time string
Inputs:
value - timestamp as long
timeFormat - output date/time format
Output:
string
"""
return datetime.datetime.fromtimestamp(value /1000).strftime(timeFormat)
#----------------------------------------------------------------------
def timestamp_to_datetime(timestamp):
"""
Converts a timestamp to a datetime object
Inputs:
timestamp - timestamp value as Long
output:
datetime object
"""
return datetime.datetime.fromtimestamp(timestamp /1000)
#--------------------------------------------------------------------------
def is_valid(value):
from _geom import Point, Polygon, Polyline, MultiPoint, Envelope
"""checks if the value is valid"""
if isinstance(value, Point):
if hasattr(value, 'x') and \
hasattr(value, 'y') :
return True
elif 'x' in value and \
(value['x'] is None or \
value['x'] == "NaN"):
return True
return False
elif isinstance(value, Envelope):
if all(hasattr(value, a) for a in ('xmin', 'ymin',
'xmax', 'ymax')) and \
all(isinstance(getattr(value,a), number_type) for a in ('xmin', 'ymin',
'xmax', 'ymax')):
return True
elif hasattr(value, "xmin") and \
(value.xmin is None or value.xmin == "NaN"):
return True
else:
return False
elif isinstance(value, (MultiPoint,
Polygon,
Polyline)):
if 'paths' in value:
if len(value['paths']) == 0:
return True
else:
return is_line(coords=value['paths'])
elif 'rings' in value:
if len(value['rings']) == 0:
return True
else:
return is_polygon(coords=value['rings'])
elif 'points' in value:
if len(value['points']) == 0:
return True
else:
return is_point(coords=value['points'])
return False
else:
return False
return False
#--------------------------------------------------------------------------
def is_polygon(coords):
lengths = all(len(elem) >= 4 for elem in coords)
valid_pts = all(is_line(part) for part in coords)
isring = all(elem[0] == elem[-1] for elem in coords)
return lengths and isring and valid_pts
#--------------------------------------------------------------------------
def is_line(coords):
"""
checks to see if the line has at
least 2 points in the list
"""
if isinstance(coords, list_types) and \
len(coords) > 0: # list of lists
return all(is_point(elem) for elem in coords)
else:
return True
return False
#--------------------------------------------------------------------------
def is_point(coords):
"""
checks to see if the point has at
least 2 coordinates in the list
"""
if isinstance(coords, (list, tuple)) and \
len(coords) > 1:
for coord in coords:
if isinstance(coord, number_type):
return all(isinstance(v, number_type) for v in coords) and \
len(coords) > 1
else:
return is_point(coord)
return False
###########################################################################
class Error(Exception): pass
#--------------------------------------------------------------------------
@contextmanager
def _tempinput(data):
temp = tempfile.NamedTemporaryFile(delete=False)
temp.write((bytes(data, 'UTF-8')))
temp.close()
yield temp.name
os.unlink(temp.name)
#--------------------------------------------------------------------------
def _lazy_property(fn):
'''Decorator that makes a property lazy-evaluated.
'''
# http://stevenloria.com/lazy-evaluated-properties-in-python/
attr_name = '_lazy_' + fn.__name__
@property
def _lazy_property(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
return _lazy_property
#--------------------------------------------------------------------------
def _is_shapefile(data):
if zipfile.is_zipfile(data):
zf = zipfile.ZipFile(data, 'r')
namelist = zf.namelist()
for name in namelist:
if name.endswith('.shp') or name.endswith('.SHP'):
return True
return False
#--------------------------------------------------------------------------
def rot13(s):
result = ""
# Loop over characters.
for v in s:
# Convert to number with ord.
c = ord(v)
# Shift number back or forward.
if c >= ord('a') and c <= ord('z'):
if c > ord('m'):
c -= 13
else:
c += 13
elif c >= ord('A') and c <= ord('Z'):
if c > ord('M'):
c -= 13
else:
c += 13
# Append to result.
result += chr(c)
# Return transformation.
return result
#--------------------------------------------------------------------------
def _to_utf8(data):
""" Converts strings and collections of strings from unicode to utf-8. """
if isinstance(data, dict):
return {_to_utf8(key): _to_utf8(value) \
for key, value in data.items() if value is not None}
elif isinstance(data, list):
return [_to_utf8(element) for element in data]
elif isinstance(data, str):
return data
elif isinstance(data, six.text_type):
return data.encode('utf-8')
elif isinstance(data, (float, six.integer_types)):
return data
else:
return data
#--------------------------------------------------------------------------
class _DisableLogger():
def __enter__(self):
logging.disable(logging.CRITICAL)
def __exit__(self, a, b, c):
logging.disable(logging.NOTSET)
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-shuffled-N/model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-shuffled-N/512+0+512-STG-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_trigrams_globally_first_half_quarter --eval_function last_quarter_eval |
<filename>src/index.ts<gh_stars>0
import { HtmlTags, LoadContext, Plugin } from '@docusaurus/types';
export default function pluginGoatcounter({
siteConfig: { themeConfig },
}: LoadContext): Plugin<void> {
const { goatcounter }: any = themeConfig || {};
if (!goatcounter) {
throw new Error(
`You need to specify 'goatcounter' object in 'themeConfig' with 'code' field in it to use docusaurus-plugin-goatcounter`
);
}
const code = goatcounter?.code;
if (!code) {
throw new Error(
'You specified the `goatcounter` object in `themeConfig` but the `code` field was missing. ' +
'Please add it.'
);
}
if (typeof code !== 'string') {
throw new Error(
'You specified the `goatcounter` object in `themeConfig` but the `code` field should be a string.'
);
}
const isProd = process.env.NODE_ENV === 'production';
const analyticsDomain = `https://${code}.goatcounter.com`;
const injectGoatcounterTags = (): { headTags: HtmlTags } => {
return {
headTags: [
{
tagName: 'link',
attributes: {
rel: 'preconnect',
href: analyticsDomain,
},
},
{
tagName: 'script',
attributes: {
async: true,
src: '//gc.zgo.at/count.js',
'data-goatcounter': `${analyticsDomain}/count`,
},
},
],
};
};
return {
name: 'docusaurus-plugin-goatcounter',
injectHtmlTags: isProd ? injectGoatcounterTags : undefined,
};
}
|
'use strict';
exports.moveProjectiles = function(projectiles, projectilesSpeed) {
for(let i = projectiles.length - 1; i >= 0; i--) {
projectiles[i].y -= projectilesSpeed;
if (projectiles[i].y < 0) {
projectiles.splice(i, 1);
};
};
};
exports.moveStarsAndAsteroids = function(starsOrAsteroids, speed, dynWidth, dynHeight) {
for(let i = 0; i < starsOrAsteroids.length; i++) {
starsOrAsteroids[i].y += speed;
if (starsOrAsteroids[i].y > dynHeight) {
starsOrAsteroids[i].y = 0;
starsOrAsteroids[i].x = Math.floor(Math.random() * dynWidth);
};
};
};
exports.moveShip = function(playerShip, clientX) {
playerShip.currentPosition.x = Math.floor(clientX - playerShip.shipXCenter);
};
exports.fireLeftWeapon = function(gameData, playerShip) {
gameData.projectiles.push({"x" : playerShip.currentPosition.x + playerShip.weaponXCenter, "y" : playerShip.currentPosition.y});
};
exports.fireRightWeapon = function(gameData, playerShip) {
gameData.projectiles.push({"x" : playerShip.currentPosition.x + playerShip.size.x - playerShip.weaponXCenter, "y" : playerShip.currentPosition.y});
};
|
<reponame>ChristopherChudzicki/mathbox
// TODO: This file was created by bulk-decaffeinate.
// Sanity-check the conversion and remove this comment.
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS206: Consider reworking classes to avoid initClass
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
import * as UThree from "../../../util/three.js";
import { View } from "./view.js";
export class Cartesian extends View {
static initClass() {
this.traits = ["node", "object", "visible", "view", "view3", "vertex"];
}
make() {
super.make();
this.uniforms = { viewMatrix: this._attributes.make(this._types.mat4()) };
this.viewMatrix = this.uniforms.viewMatrix.value;
this.composer = UThree.transformComposer();
}
unmake() {
super.unmake();
delete this.viewMatrix;
delete this.objectMatrix;
delete this.uniforms;
}
change(changed, touched, init) {
if (!touched["view"] && !touched["view3"] && !init) {
return;
}
const p = this.props.position;
const s = this.props.scale;
const q = this.props.quaternion;
const r = this.props.rotation;
const g = this.props.range;
const e = this.props.eulerOrder;
const { x } = g[0];
const y = g[1].x;
const z = g[2].x;
const dx = g[0].y - x || 1;
const dy = g[1].y - y || 1;
const dz = g[2].y - z || 1;
// Forward transform
this.viewMatrix.set(2 / dx, 0, 0, -(2 * x + dx) / dx, 0, 2 / dy, 0, -(2 * y + dy) / dy, 0, 0, 2 / dz, -(2 * z + dz) / dz, 0, 0, 0, 1);
const transformMatrix = this.composer(p, r, q, s, null, e);
this.viewMatrix.multiplyMatrices(transformMatrix, this.viewMatrix);
if (changed["view.range"]) {
this.trigger({
type: "view.range",
});
}
}
vertex(shader, pass) {
if (pass === 1) {
shader.pipe("cartesian.position", this.uniforms);
}
return super.vertex(shader, pass);
}
}
Cartesian.initClass();
|
SELECT DATEDIFF(date1, date2) AS date_difference
FROM table_name; |
#
# Docker context
#
# Show current remote Docker context
# ------------------------------------------------------------------------------
# Configuration
# ------------------------------------------------------------------------------
SPACESHIP_DOCKER_CONTEXT_SHOW="${SPACESHIP_DOCKER_CONTEXT_SHOW=true}"
SPACESHIP_DOCKER_CONTEXT_PREFIX="${SPACESHIP_DOCKER_CONTEXT_PREFIX=" ("}"
SPACESHIP_DOCKER_CONTEXT_SUFFIX="${SPACESHIP_DOCKER_CONTEXT_SUFFIX=")"}"
# ------------------------------------------------------------------------------
# Section
# ------------------------------------------------------------------------------
spaceship_docker_context() {
[[ $SPACESHIP_DOCKER_CONTEXT_SHOW == false ]] && return
local docker_remote_context
# Docker has three different ways to work on remote Docker hosts:
# 1. docker-machine
# 2. DOCKER_HOST environment variable
# 3. docker context (since Docker 19.03)
if [[ -n $DOCKER_MACHINE_NAME ]]; then
docker_remote_context="$DOCKER_MACHINE_NAME"
elif [[ -n $DOCKER_HOST ]]; then
# Remove protocol (tcp://) and port number from displayed Docker host
docker_remote_context="$(basename $DOCKER_HOST | cut -d':' -f1)"
else
# Docker contexts can be set using either the DOCKER_CONTEXT environment variable
# or the `docker context use` command. `docker context ls` will show the selected
# context in both cases. But we are not interested in the local "default" context.
docker_remote_context=$(docker context ls --format '{{if .Current}}{{if and (ne .Name "default") (ne .Name "desktop-linux")}}{{.Name}}{{end}}{{end}}' 2>/dev/null)
[[ $? -ne 0 ]] && return
docker_remote_context=$(echo $docker_remote_context | tr -d '\n')
fi
[[ -z $docker_remote_context ]] && return
spaceship::section \
"$SPACESHIP_DOCKER_COLOR" \
"$SPACESHIP_DOCKER_CONTEXT_PREFIX${docker_remote_context}$SPACESHIP_DOCKER_CONTEXT_SUFFIX"
}
|
# This sample requires starting up the Subscriber Service after the Publisher starts up the hub and registers the topic
$ ballerina run publisher.bal
2018-04-12 18:32:59,058 INFO [] - Starting up the Ballerina Hub Service
ballerina: started HTTPS/WSS endpoint localhost:9292
ballerina: Default Ballerina WebSub Hub started up at https://localhost:9292/websub/hub
$ ballerina run subscriber.bal
ballerina: initiating service(s) in 'subscriber.bal'
2018-04-12 18:33:01,116 INFO [ballerina.websub] - Initializing WebSub signature validation filter
ballerina: started HTTP/WS endpoint 0.0.0.0:8181
Output from the publisher:
2018-04-12 18:32:59,847 INFO [] - Topic registration successful!
2018-04-12 18:33:02,679 INFO [websub.hub] - Intent verification successful for mode: [subscribe], for callback URL: [http://0.0.0.0:8181/websub]
2018-04-12 18:33:19,860 INFO [] - Publishing update to internal Hub
2018-04-12 18:33:20,028 INFO [] - Update notification successful!
Output from the subscriber:
2018-04-12 18:33:02,506 INFO [ballerina.websub] - Subscription Request successful at Hub[https://localhost:9292/websub/hub], for Topic[http://www.websubpubtopic.com], with Callback [http://0.0.0.0:8181/websub]
2018-04-12 18:33:02,601 INFO [ballerina.websub] - Intent Verification agreed - Mode [subscribe], Topic [http://www.websubpubtopic.com], Lease Seconds [86400000]
2018-04-12 18:33:02,602 INFO [] - Intent verified for subscription request
2018-04-12 18:33:20,080 INFO [] - WebSub Notification Received: {"action":"publish","mode":"internal-hub"}
|
import json
from pathlib import Path
from aiohttp.web import Application, run_app
from click import argument, command, format_filename, option
from .utils import log
from . import handlers, jobs
@command()
@argument("database")
@argument("host", default="0.0.0.0")
@argument("port", default="8080")
@option("-u", "--update-now", is_flag=True, help="Update the database on start")
@option(
"-i",
"--update-interval",
type=float,
default=10,
help="Database update interval in minutes",
)
def main(database: str, host: str, port: int, update_now: bool, update_interval: float):
app = Application()
app["database_path"] = database
app["update_interval"] = update_interval
if update_now:
app.on_startup.append(jobs.update_database)
else:
if not Path(database).exists():
log.error(format_filename(database) + " does not exist")
return
with open(database) as f:
app["database"] = json.load(f)
app.on_startup.append(jobs.start_database_updater)
app.router.add_get("/courses", handlers.courses)
app.router.add_get("/classes", handlers.classes)
app.router.add_get("/semesters", handlers.semesters)
app.router.add_get("/campi", handlers.campi)
run_app(app, host=host, port=port)
if __name__ == "__main__":
main()
|
import { expect } from 'chai'
import { compose } from './compose'
describe('Compose', () => {
it('Should compose functions', () => {
expect(
compose(
(a) => a * 2,
(a: number) => a + 10
)(10)
).to.equal(40)
expect(
compose(
(a) => a + 10,
(a: number) => a * 2
)(10)
).to.equal(30)
expect(
compose(
(a) => a * 2,
(a: number) => a * 2,
(a: number) => a + 10
)(10)
).to.equal(80)
expect(
compose(
(a) => a + 10,
(a: number) => a + 10,
(a: number) => a * 2
)(10)
).to.equal(40)
})
})
|
/*
* Copyright (c) 2011, <NAME>
*
* All rights reserved.
*
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the authors nor other contributors may be used to endorse
* or promote products derived from this software without specific prior written
* permission.
*
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*/
#ifndef OSC_H
#define OSC_H
#include "UNIV.h"
#include <stdio.h>
#include <iostream>
#include <stdexcept>
#include <map>
#include "SchemeProcess.h"
#include "EXTThread.h"
#ifdef EXT_BOOST
#include <boost/asio.hpp>
#else
#include <sys/socket.h>
#include <sys/types.h>
#include <netinet/in.h>
#endif
extern "C"
{
uint64_t swap64f(double d);
double unswap64f(uint64_t a);
uint32_t swap32f(float f);
float unswap32f(uint32_t a);
uint64_t swap64i(uint64_t d);
uint64_t unswap64i(uint64_t a);
uint32_t swap32i(uint32_t f);
uint32_t unswap32i(uint32_t a);
}
//#define _OSC_DEBUG_
namespace extemp {
class OSC {
public:
OSC();
static OSC* I(scheme* _sc) {
if(SCHEME_MAP.count(_sc)<1) {
throw std::runtime_error("Error: NO such OSC Server");
}
return SCHEME_MAP[_sc];
//if(OSC::singleton == NULL) OSC::singleton = new OSC(); return OSC::singleton;
}
static void schemeInit(SchemeProcess* scm);
//void getMessage();
static int setOSCTimestamp(char* data, double d);
static int getOSCTimestamp(const char* data, double* d);
static int setOSCString(char* data, std::string* str);
static int getOSCString(const char* data, std::string* str);
static int setOSCfloat(char* data, float* f);
static int getOSCfloat(const char* data, float* f);
static int setOSCdouble(char* data, double* f);
static int getOSCdouble(const char* data, double* f);
static int setOSCInt(char* data, int* i);
static int getOSCInt(const char* data, int* i);
static int setOSCLong(char* data, int64_t* l);
static int getOSCLong(const char* data, int64_t* l);
// static int setOSCData(char* data, NSData* data);
// static int getOSCData(const char* data, NSData** data);
static void processArgs(pointer arg, char** tmp, char** ptr, int* lgth, std::string& typetags, scheme* _sc);
int clearMessageBuffer();
static void getOSCStringSection(std::string* input, std::string* output, int section);
//static pointer sendOSC(scheme* _sc, pointer args);
static pointer registerScheme(scheme* _sc, pointer args);
static pointer set_real_type(scheme* _sc, pointer args);
static pointer set_integer_type(scheme* _sc, pointer args);
static pointer send_from_server_socket(scheme* _sc, pointer args);
static pointer set_msg_include_netaddr(scheme* _sc, pointer args);
#ifdef EXT_BOOST
boost::asio::ip::udp::endpoint* getAddress() { return osc_address; }
boost::asio::ip::udp::endpoint* getClientAddress() { return osc_client_address; }
int* getClientAddressSize() { return &osc_client_address_size; }
void setClientAddressSize(int addr_size) { osc_client_address_size = addr_size; }
int getConnectionType() { return conn_type; }
void setConnectionType(int type) { conn_type = type; }
char* getMessageData() { return message_data; }
int getMessageLength() { return message_length; }
boost::asio::ip::udp::socket* getSendFD() { return send_socket; }
void setSendFD(boost::asio::ip::udp::socket* fd) { send_socket = fd; }
void setSocket(boost::asio::ip::udp::socket* soc) { socket = soc; }
boost::asio::ip::udp::socket* getSocketFD() { return socket; }
boost::asio::io_service* getIOService() { return io_service; }
#else
struct sockaddr_in* getAddress() { return &osc_address; }
struct sockaddr_in* getClientAddress() { return &osc_client_address; }
int* getClientAddressSize() { return &osc_client_address_size; }
void setClientAddressSize(int addr_size) { osc_client_address_size = addr_size; }
int getConnectionType() { return conn_type; }
void setConnectionType(int type) { conn_type = type; }
char* getMessageData() { return message_data; }
int getMessageLength() { return message_length; }
int getSendFD() { return send_socket_fd; }
void setSendFD(int fd) { send_socket_fd = fd; }
int* getSocketFD() { return &socket_fd; }
void setSocketFD(int fd) { socket_fd = fd; }
#endif
EXTThread& getThread() { return threadOSC; }
bool getStarted() { return started; }
void setStarted(bool val) { started = val; }
int (*getNativeOSC()) (char*,char*,char*,int) { return nativeOSC; }
void setNativeOSC(int(*val)(char*,char*,char*,int)) { nativeOSC = val; }
int (*getNativeUDP()) (char*,int) { return nativeUDP; }
void setNativeUDP(int(*val)(char*,int)) { nativeUDP = val; }
void sendOSC(TaskI* task);
scheme* sc;
char fname[256];
static std::map<scheme*, OSC*> SCHEME_MAP;
char scheme_real_type;
char scheme_integer_type;
bool send_from_serverfd;
bool msg_include_netaddr;
private:
static OSC* singleton;
EXTThread threadOSC;
#ifdef EXT_BOOST
boost::asio::ip::udp::socket* socket;
boost::asio::ip::udp::socket* send_socket;
boost::asio::ip::udp::endpoint* osc_address;
boost::asio::ip::udp::endpoint* osc_client_address;
boost::asio::io_service* io_service;
#else
int socket_fd;
int send_socket_fd;
struct sockaddr_in osc_address;
struct sockaddr_in osc_client_address;
#endif
int conn_type; // UDP (1) or TCP (2)
int osc_client_address_size;
char message_data[70000];
int message_length;
bool started;
int(*nativeOSC)(char*,char*,char*,int); /* if not null then use this compiled function for callbacks */
int(*nativeUDP)(char*,int);
};
} //End Namespace
#endif
|
package com.test_shangguigu.t09_autowired.config;
import com.test_shangguigu.t09_autowired.beans.Person;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
/**
* 自动装配:
* Spring利用依赖主妇DI,完成对IOC容器中各个组件的依赖关系赋值
*
* 1)@Autowired 自动注入 【这是spring框架的注解】
* 1)默认有限按照类型去容器中找到对应的组件:applicationContext.getBean(BookDao.class);
* 2) 如果找到多个相同类型的组件,再将属性的名称作为组件的id去容器中查找 application.getBean("bookDao")
* 3)@Qualifier("bookDao")=明确指定。使用Qualifier指定需要装配的组件的id,而不是使用属性名。
* 【容器中没有的组件,如何自动装配?】
* 4)自动装配默认一定要将属性赋值好,没有就会报错。能否设定为如果找得到就装配,如果没找到不装配呢?答:可以。@Autowired(required=false)
* 5) @Primary =首选装配。 让Spring进行自动装配的时候,默认使用首选的bean,
* 也可以继续使用@Qualifier指定需要装配的bean的名字。
*
* 2)Spring还支持使用@Resource JSR250 和@Inject JSR330 【这是java规范的注解】
* 1)@Resource
* 它可以和@Autowired一样进行实现自动装配功能,默认是按照组件名进行自动装配的。
* 它没有支持@Primary工能,没有支持@Autowired(required=false)
* 2)@Inject
* 需要导入javax.inject包,和@Autowired功能一样。
* 它没有支持@Autowired(required=false)
*
* 3)@Autowired:构造器、参数、方法、属性
* 1)【标注在方法位置】@Bean+方法参数,参数从容器中获取;默认不屑@Autowired效果是一样的,都能自动装配
* 2)【标注在构造器上】如果组件只有一个有参构造器,这个有参构造器的@@Autowired可以省略,参数位置的组件还是可以自动从容器中获取
* 3)【标注在参数位置】
*
* 3)自定义组件想要使用Spring容器底层的一些组件(ApplicationContext,BeanFactory,xxx),
* 自定义组件实现xxxAware,在创建对象的时候,会调用接口规定的方法注入组件需要的组件。
* Aware吧Spring底层一些组件注入到自定义的Bean中;
* XXXAware,功能使用xxxProcessor
* ApplicationContextAware==》ApplicationContextAwareProcessor
*
*
*
*
*/
@Configuration
@ComponentScan({"com.test_shangguigu.t09_autowired.controller"
,"com.test_shangguigu.t09_autowired.service"
,"com.test_shangguigu.t09_autowired.dao"
,"com.test_shangguigu.t09_autowired.beans"
})
public class AppConfig09 {
}
|
#!/bin/bash
dieharder -d 9 -g 22 -S 956079594
|
// Copyright 2017 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package s2
import (
"testing"
"github.com/golang/geo/s1"
)
func TestContainsVertexQueryUndetermined(t *testing.T) {
q := NewContainsVertexQuery(parsePoint("1:2"))
q.AddEdge(parsePoint("3:4"), 1)
q.AddEdge(parsePoint("3:4"), -1)
if got := q.ContainsVertex(); got != 0 {
t.Errorf("ContainsVertex() = %v, want 0 for vertex with undetermined containment", got)
}
}
func TestContainsVertexQueryContainedWithDuplicates(t *testing.T) {
// The Ortho reference direction points approximately due west.
// Containment is determined by the unmatched edge immediately clockwise.
q := NewContainsVertexQuery(parsePoint("0:0"))
q.AddEdge(parsePoint("3:-3"), -1)
q.AddEdge(parsePoint("1:-5"), 1)
q.AddEdge(parsePoint("2:-4"), 1)
q.AddEdge(parsePoint("1:-5"), -1)
if got := q.ContainsVertex(); got != 1 {
t.Errorf("ContainsVertex() = %v, want 1 for vertex that is contained", got)
}
}
func TestContainsVertexQueryNotContainedWithDuplicates(t *testing.T) {
// The Ortho reference direction points approximately due west.
// Containment is determined by the unmatched edge immediately clockwise.
q := NewContainsVertexQuery(parsePoint("1:1"))
q.AddEdge(parsePoint("1:-5"), 1)
q.AddEdge(parsePoint("2:-4"), -1)
q.AddEdge(parsePoint("3:-3"), 1)
q.AddEdge(parsePoint("1:-5"), -1)
if got := q.ContainsVertex(); got != -1 {
t.Errorf("ContainsVertex() = %v, want -1 for vertex that is not contained", got)
}
}
func TestContainsVertexQueryMatchesLoopContainment(t *testing.T) {
// Check that the containment function defined is compatible with Loop
loop := RegularLoop(parsePoint("89:-179"), s1.Angle(10)*s1.Degree, 1000)
for i := 1; i <= loop.NumVertices(); i++ {
q := NewContainsVertexQuery(loop.Vertex(i))
q.AddEdge(loop.Vertex(i-1), -1)
q.AddEdge(loop.Vertex(i+1), 1)
if got, want := q.ContainsVertex() > 0, loop.ContainsPoint(loop.Vertex(i)); got != want {
t.Errorf("ContainsVertex() = %v, loop.ContainsPoint(%v) = %v, should be the same", got, loop.Vertex(i), want)
}
}
}
|
def get_record_details(endpoints: dict, endpoint_name: str) -> str:
if endpoint_name in endpoints:
endpoint_details = endpoints[endpoint_name]
formatted_details = f"Record Type: {endpoint_name}\nPID Type: {endpoint_details['pid_type']}\nRecord Class: {endpoint_details['record_class']}\nRoute: {endpoint_details['route']}\nTemplate: {endpoint_details['template']}"
return formatted_details
else:
return "Endpoint not found" |
<gh_stars>1-10
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Authors: <NAME>, <NAME>
*
*/
package org.rumbledb.runtime.primary;
import org.rumbledb.api.Item;
import org.rumbledb.exceptions.DuplicateObjectKeyException;
import org.rumbledb.exceptions.ExceptionMetadata;
import org.rumbledb.exceptions.IteratorFlowException;
import org.rumbledb.exceptions.UnexpectedTypeException;
import org.rumbledb.items.ItemFactory;
import org.rumbledb.items.ObjectItem;
import org.rumbledb.runtime.LocalRuntimeIterator;
import org.rumbledb.runtime.RuntimeIterator;
import sparksoniq.jsoniq.ExecutionMode;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
public class ObjectConstructorRuntimeIterator extends LocalRuntimeIterator {
private static final long serialVersionUID = 1L;
private List<RuntimeIterator> keys;
private List<RuntimeIterator> values;
private boolean isMergedObject = false;
public ObjectConstructorRuntimeIterator(
List<RuntimeIterator> keys,
List<RuntimeIterator> values,
ExecutionMode executionMode,
ExceptionMetadata iteratorMetadata
) {
super(keys, executionMode, iteratorMetadata);
this.children.addAll(values);
this.keys = keys;
this.values = values;
}
public ObjectConstructorRuntimeIterator(
List<RuntimeIterator> childExpressions,
ExecutionMode executionMode,
ExceptionMetadata iteratorMetadata
) {
super(null, executionMode, iteratorMetadata);
this.children.addAll(childExpressions);
this.isMergedObject = true;
}
@Override
public Item next() {
if (this.hasNext) {
LinkedHashMap<String, Item> content = new LinkedHashMap<>();
if (this.isMergedObject) {
for (RuntimeIterator iterator : this.children) {
iterator.open(this.currentDynamicContextForLocalExecution);
while (iterator.hasNext()) {
ObjectItem item = (ObjectItem) iterator.next();
for (String key : item.getKeys()) {
if (content.containsKey(key)) {
throw new DuplicateObjectKeyException(
"Duplicate key: " + key,
getMetadata()
);
}
content.put(key, item.getItemByKey(key));
}
}
iterator.close();
}
this.hasNext = false;
return ItemFactory.getInstance()
.createObjectItem(content);
} else {
Item key = null;
Item value = null;
for (int i = 0; i < this.keys.size(); ++i) {
RuntimeIterator keyIterator = this.keys.get(i);
RuntimeIterator valueIterator = this.values.get(i);
List<Item> currentResults = new ArrayList<>();
valueIterator.open(this.currentDynamicContextForLocalExecution);
while (valueIterator.hasNext()) {
currentResults.add(valueIterator.next());
}
valueIterator.close();
// <NAME>, if value is more than one item, wrap it in an array
if (currentResults.size() > 1) {
value = ItemFactory.getInstance().createArrayItem(currentResults);
} else if (currentResults.size() == 1) {
value = currentResults.get(0);
} else {
value = ItemFactory.getInstance().createNullItem();
}
keyIterator.open(this.currentDynamicContextForLocalExecution);
if (!keyIterator.hasNext()) {
throw new IteratorFlowException("A key cannot be the empty sequence", getMetadata());
}
key = keyIterator.next();
if (!key.isString()) {
throw new UnexpectedTypeException(
"Key provided for object creation must be of type String",
getMetadata()
);
}
if (content.containsKey(key.getStringValue())) {
throw new DuplicateObjectKeyException(
"Duplicate key: " + key.getStringValue(),
getMetadata()
);
}
content.put(key.getStringValue(), value);
if (keyIterator.hasNext()) {
throw new IteratorFlowException(
"A key cannot be a sequence of more than one item",
getMetadata()
);
}
keyIterator.close();
}
this.hasNext = false;
return ItemFactory.getInstance()
.createObjectItem(content);
}
}
throw new IteratorFlowException("Invalid next() call on object!", getMetadata());
}
}
|
<reponame>nicklinyi/AxiSEM-3D
//
// NetCDF_Writer.cpp
// AxiSEM3D
//
// Created by <NAME> on 3/24/19.
// Copyright © 2019 <NAME>. All rights reserved.
//
// NetCDF writer
#include "NetCDF_Writer.hpp"
////////////////// file system //////////////////
// open
void NetCDF_Writer::open(const std::string &fname, bool overwrite) {
close();
if (overwrite) {
if (nc_create(fname.c_str(), NC_NETCDF4, &mFileID) != NC_NOERR) {
throw std::runtime_error("NetCDF_Writer::open || "
"Error creating NetCDF file: || " + fname);
}
defModeOff();
} else {
if (nc_open(fname.c_str(), NC_WRITE | NC_NETCDF4, &mFileID)
!= NC_NOERR) {
throw std::runtime_error("NetCDF_Writer::open || "
"Error opening NetCDF file: || " + fname);
}
}
mPWD = mFileID;
mFileName = fname;
}
// open parallel
void NetCDF_Writer::openParallel(const std::string &fname) {
#ifdef _USE_PARALLEL_NETCDF
close();
if (nc_open_par(fname.c_str(), NC_MPIIO | NC_WRITE | NC_NETCDF4,
MPI_COMM_WORLD, MPI_INFO_NULL, &mFileID) != NC_NOERR) {
throw std::runtime_error("NetCDF_Writer::openParallel || "
"Error opening NetCDF file: || " + fname);
}
mPWD = mFileID;
mFileName = fname;
#else
throw std::runtime_error("NetCDF_Writer::openParallel || "
"CMakeLists.txt has disabled Parallel NetCDF.");
#endif
}
// close
void NetCDF_Writer::close() {
if (isOpen()) {
netcdf::error(nc_close(mFileID), "nc_close", mFileName);
mPWD = mFileID = -1;
mFileName = "";
}
}
////////////////// group //////////////////
// create group
void NetCDF_Writer::createGroup(const std::string &gname) const {
int grpid = -1;
netcdf::error(nc_def_grp(mPWD, gname.c_str(), &grpid),
"nc_def_grp", mFileName);
}
// go to group
void NetCDF_Writer::goToGroup(const std::string &gname) {
int grpid = -1;
netcdf::error(nc_inq_grp_ncid(mPWD, gname.c_str(), &grpid),
"nc_inq_grp_ncid", mFileName);
mPWD = grpid;
}
////////////////// specialization //////////////////
// add string attribute
template <>
void NetCDF_Writer::addAttribute<std::string>(const std::string &attname,
const std::string &attvalue,
const std::string &vname) const {
if (vname == "") {
// file attribute
netcdf::error(nc_put_att_text(mFileID, NC_GLOBAL, attname.c_str(),
attvalue.length(), attvalue.c_str()),
"nc_put_att_text", mFileName);
} else {
// variable attribute
int varid = netcdf::varID(mPWD, vname, mFileName);
netcdf::error(nc_put_att_text(mPWD, varid, attname.c_str(),
attvalue.length(), attvalue.c_str()),
"nc_put_att_text", mFileName);
}
}
|
/*!
* ISC License
*
* Copyright (c) 2018, Imqueue Sandbox
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
import {
IMQService,
IRedisClient,
expose,
profile,
} from '@imqueue/rpc';
import { CarObject } from './types';
import { CarsDB } from './CarsDB';
import { carPush, carSort, toPartial } from './helpers';
/**
* Class Car - implements in-memory cars database with API access to cars data
*/
export class Car extends IMQService {
/**
* Cars in-memory database
* @type {CarsDB}
*/
private db: CarsDB;
/**
* Overrides and adds service-specific async stuff to service
* initialization
*/
@profile()
public async start() {
const ret = await super.start();
const redis: IRedisClient = (this.imq as any).writer;
this.db = new CarsDB(this.logger, redis);
await this.db.bootstrap();
return ret;
}
/**
* Returns a list of car manufacturers (car brands)
*
* @return {string[]} - list of known brands
*/
@profile()
@expose()
public brands(): string[] {
return this.db.brands().sort();
}
/**
* Returns car object by its identifier or if multiple identifiers given
* as array of identifiers - returns a list of car objects.
*
* @param {string | string[]} id - car identifier
* @param {string[]} [selectedFields] - fields to return
* @return {Partial<CarObject> | Partial<CarObject|null>[] | null} - found object or null otherwise
*/
@profile()
@expose()
public fetch(
id: string | string[],
selectedFields?: string[]
): Partial<CarObject> | Partial<CarObject|null>[] | null {
if (!(id instanceof Array)) {
return toPartial(
this.db.car(id),
selectedFields || []
);
}
return id.map(carId => toPartial(
this.db.car(carId),
selectedFields || [],
));
}
// codebeat:disable[ARITY]
/**
* Returns list of known cars for a given brand
*
* @param {string} brand - car manufacturer (brand) name
* @param {string[]} [selectedFields] - fields to return
* @param {string} [sort] - sort field, by default is 'model'
* @param {'asc' | 'desc'} [dir] - sort direction, by default is 'asc' - ascending
* @return {Partial<CarObject>[]} - list of found car objects
*/
@profile()
@expose()
public list(
brand: string,
selectedFields?: string[],
sort: string = 'model',
dir: 'asc' | 'desc' = 'asc',
): Partial<CarObject>[] {
return this.db.cars(brand)
.sort(carSort(sort, dir))
.reduce((cars, car) => carPush(cars, toPartial(car)), [])
;
}
// codebeat:enable[ARITY]
}
|
# Importing required libraries
from sklearn import tree
import pandas as pd
# Creating dataset
dataset = pd.DataFrame({'Gender':[0, 1, 0, 0, 1, 1],
'Age':[45, 23, 67, 34, 50, 68],
'Risk Factors':[1, 0, 1, 0, 0, 1]})
# Separating dependent and independent variables
X = dataset.drop('Heart Attack', axis = 1)
y = dataset['Heart Attack']
# Training the decision tree model
clf = tree.DecisionTreeClassifier()
clf = clf.fit(X, y) |
// https://github.com/TaiBIF/camera-trap-api/wiki/Status-Code-and-Error-Code
// TODO: define error message
const getErrorMessage = status => {
switch (status) {
case '400':
return '操作失敗,資料內容有誤。';
case '401':
return '操作失敗,請先登入。';
case '403':
return '操作失敗,您的帳號無權限操作此步驟';
default:
return '操作失敗,請再試一次';
}
};
export default getErrorMessage;
|
<reponame>kkrull/javaspec<filename>prototypes/javaspec-jupiter/src/test/java/info/javaspec/jupiter/syntax/fixture/Minimax.java
package info.javaspec.jupiter.syntax.fixture;
import java.util.Collection;
final class Minimax {
private final String maximizerPlayer;
private final String minimizerPlayer;
public Minimax(String maximizerPlayer, String minimizerPlayer) {
this.maximizerPlayer = maximizerPlayer;
this.minimizerPlayer = minimizerPlayer;
}
public int score(GameState game, String player) {
if(this.maximizerPlayer.equals(game.findWinner())) {
return +1;
} else if(this.minimizerPlayer.equals(game.findWinner())) {
return -1;
} else if(game.isOver()) {
return 0;
}
if(this.maximizerPlayer.equals(player)) {
int bestScore = -100;
for(String nextMove : game.availableMoves()) {
GameState nextGame = game.move(nextMove);
int nextScore = score(nextGame, this.minimizerPlayer);
if(nextScore > bestScore) {
bestScore = nextScore;
}
}
return bestScore;
} else if(this.minimizerPlayer.equals(player)) {
int bestScore = +100;
for(String nextMove : game.availableMoves()) {
GameState nextGame = game.move(nextMove);
int nextScore = score(nextGame, this.maximizerPlayer);
if(nextScore < bestScore) {
bestScore = nextScore;
}
}
return bestScore;
}
return 9999;
}
interface GameState {
Collection<String> availableMoves();
String findWinner();
boolean isOver();
GameState move(String move);
}
}
|
<reponame>tarachandverma/ngx-openidc
#include "shm_apr.h"
#include "shm_data.h"
#include "common_utils.h"
static void shapr_make_array_core(array_header *res, shared_heap* sheap,int nelts, int elt_size, int clear){
/*
* Assure sanity if someone asks for
* array of zero elts.
*/
if (nelts < 1) {
nelts = 1;
}
if (clear) {
res->elts = shdata_shpcalloc(sheap, nelts * elt_size);
}else {
res->elts = shdata_shpalloc(sheap, nelts * elt_size);
}
res->pool = NULL;
res->elt_size = elt_size;
res->nelts = 0; /* No active elements yet... */
res->nalloc = nelts; /* ...but this many allocated */
}
array_header* shapr_array_make(shared_heap* sheap, int nelts, int elt_size){
array_header *res;
res = (array_header*) shdata_shpalloc(sheap,sizeof(array_header));
shapr_make_array_core(res, sheap, nelts, elt_size, 1);
return res;
}
void * shapr_array_push(shared_heap* sheap, array_header *arr){
int new_size;
char *new_data;
if (arr->nelts == arr->nalloc) {
new_size = (arr->nalloc <= 0) ? 1 : arr->nalloc * 2;
new_data = shdata_shpalloc(sheap, arr->elt_size * new_size);
memcpy(new_data, arr->elts, arr->nalloc * arr->elt_size);
memset(new_data + arr->nalloc * arr->elt_size, 0, arr->elt_size * (new_size - arr->nalloc));
arr->elts = new_data;
arr->nalloc = new_size;
}
++arr->nelts;
return arr->elts + (arr->elt_size * (arr->nelts - 1));
}
array_header* shapr_parseLongArrayFromCsv(shared_heap* sheap, int arraySz, const char* delim, char* src){
char *srccpy=NULL, *prodStr=NULL, *p1=NULL;
long *prodId=NULL, prodlook=0;
char* end=NULL;
array_header* arr=(array_header*)shapr_array_make(sheap,arraySz,sizeof(long));
if(src==NULL){return arr;}
srccpy=shdata_32BitString_copy(sheap,src);
if(arr==NULL){
return NULL;
}
prodStr=apr_strtok(srccpy,delim,&p1);
while(prodStr!=NULL){
// prodId= (long *) shapr_array_push(sheap,arr);
// *prodId = (long) atol(prodStr);
prodlook = strtol(prodStr,&end,10);
if(*end=='\0'){
prodId= (long *) shapr_array_push(sheap,arr);
*prodId=prodlook;
}
prodStr =strtok_r(NULL,delim,&p1);
}
return arr;
}
// Copies source array of char* to sheap array.
array_header* shapr_copyStringArrayToSheap(shared_heap* sheap, array_header* sarray){
int i;
char**place;
array_header* dstArr;
if(sarray==NULL||sarray->nelts<1) return NULL;
dstArr=shapr_array_make(sheap,sarray->nelts,sizeof(char*));
for(i=0;i<sarray->nelts;i++){
place=(char**)shapr_array_push(sheap,dstArr);
*place=shdata_32BitString_copy(sheap,(char*)cu_getElement(sarray,i));
}
return dstArr;
}
array_header* shapr_parseStringArrayFromCsv(shared_heap* sheap, int arraySz, const char* delim, char* src){
char *srccpy=NULL, *prodStr=NULL, *p1=NULL;
char **val=NULL;
array_header* arr=(array_header*)shapr_array_make(sheap,arraySz,sizeof(char*));
if(src==NULL){return arr;}
srccpy=shdata_32BitString_copy(sheap,src);
if(arr==NULL){
return NULL;
}
prodStr=apr_strtok(srccpy,delim,&p1);
while(prodStr!=NULL){
val= (char**) shapr_array_push(sheap,arr);
*val = prodStr;
prodStr =strtok_r(NULL,delim,&p1);
}
return arr;
}
typedef struct shapr_hash_entry_t shapr_hash_entry_t;
struct shapr_hash_entry_t {
shapr_hash_entry_t *next;
unsigned int hash;
const void *key;
apr_ssize_t klen;
const void *val;
};
/*
* Data structure for iterating through a hash table.
*
* We keep a pointer to the next hash entry here to allow the current
* hash entry to be freed or otherwise mangled between calls to
* apr_hash_next().
*/
struct shapr_hash_index_t {
shapr_hash_t *ht;
shapr_hash_entry_t *this, *next;
unsigned int index;
};
/*
* The size of the array is always a power of two. We use the maximum
* index rather than the size so that we can use bitwise-AND for
* modular arithmetic.
* The count of hash entries may be greater depending on the chosen
* collision rate.
*/
struct shapr_hash_t {
shapr_hash_entry_t **array;
shapr_hash_index_t iterator; /* For apr_hash_first(NULL, ...) */
unsigned int count, max;
shapr_hash_entry_t *free; /* List of recycled entries */
};
#define INITIAL_MAX 127 /* tunable == 2^n - 1 */
static shapr_hash_entry_t **shapr_alloc_array(shared_heap* sheap, shapr_hash_t *ht, unsigned int max){
return (shapr_hash_entry_t **)shdata_shpcalloc(sheap, sizeof(*ht->array) * (max + 1));
}
shapr_hash_t* shapr_hash_make(shared_heap* sheap){
shapr_hash_t *ht;
ht = (shapr_hash_t*)shdata_shpalloc(sheap, sizeof(shapr_hash_t));
ht->free = NULL;
ht->count = 0;
ht->max = INITIAL_MAX;
ht->array = shapr_alloc_array(sheap,ht, ht->max);
return ht;
}
void shapr_hash_this(shapr_hash_index_t *hi,
const void **key,
apr_ssize_t *klen,
void **val){
if (key) *key = hi->this->key;
if (klen) *klen = hi->this->klen;
if (val) *val = (void *)hi->this->val;
}
/*
* This is where we keep the details of the hash function and control
* the maximum collision rate.
*
* If val is non-NULL it creates and initializes a new hash entry if
* there isn't already one there; it returns an updatable pointer so
* that hash entries can be removed.
*/
static shapr_hash_entry_t **shapr_find_entry(shared_heap* sheap,shapr_hash_t *ht,const void *key,apr_ssize_t klen,const void *val){
shapr_hash_entry_t **hep, *he;
const unsigned char *p;
unsigned int hash;
apr_ssize_t i;
/*
* This is the popular `times 33' hash algorithm which is used by
* perl and also appears in Berkeley DB. This is one of the best
* known hash functions for strings because it is both computed
* very fast and distributes very well.
*
* The originator may be <NAME> but the code in Berkeley DB
* cites <NAME> as the source. The best citation I have found
* is "<NAME>, Hash function for text in C, Usenet message
* <<EMAIL>> in comp.lang.c , October, 1990." in Rich
* Salz's USENIX 1992 paper about INN which can be found at
* <http://citeseer.nj.nec.com/salz92internetnews.html>.
*
* The magic of number 33, i.e. why it works better than many other
* constants, prime or not, has never been adequately explained by
* anyone. So I try an explanation: if one experimentally tests all
* multipliers between 1 and 256 (as I did while writing a low-level
* data structure library some time ago) one detects that even
* numbers are not useable at all. The remaining 128 odd numbers
* (except for the number 1) work more or less all equally well.
* They all distribute in an acceptable way and this way fill a hash
* table with an average percent of approx. 86%.
*
* If one compares the chi^2 values of the variants (see
* <NAME> ``Hashing Frequently Asked Questions'' at
* http://burtleburtle.net/bob/hash/hashfaq.html for a description
* of chi^2), the number 33 not even has the best value. But the
* number 33 and a few other equally good numbers like 17, 31, 63,
* 127 and 129 have nevertheless a great advantage to the remaining
* numbers in the large set of possible multipliers: their multiply
* operation can be replaced by a faster operation based on just one
* shift plus either a single addition or subtraction operation. And
* because a hash function has to both distribute good _and_ has to
* be very fast to compute, those few numbers should be preferred.
*
* -- <NAME> <<EMAIL>>
*/
hash = 0;
if (klen == APR_HASH_KEY_STRING) {
for (p = key; *p; p++) {
hash = hash * 33 + *p;
}
klen = p - (const unsigned char *)key;
}else {
for (p = key, i = klen; i; i--, p++) {
hash = hash * 33 + *p;
}
}
/* scan linked list */
for (hep = &ht->array[hash & ht->max], he = *hep;
he; hep = &he->next, he = *hep) {
if (he->hash == hash
&& he->klen == klen
&& memcmp(he->key, key, klen) == 0)
break;
}
if (he || !val)
return hep;
/* add a new entry for non-NULL values */
if ((he = ht->free) != NULL)
ht->free = he->next;
else
he = (shapr_hash_entry_t*)shdata_shpalloc(sheap, sizeof(*he));
he->next = NULL;
he->hash = hash;
/*allocate shared memory for key
* this used to be he->key=key;
**/
he->key = shdata_32BitString_copy(sheap,(char*)key);
he->klen = klen;
he->val = val;
*hep = he;
ht->count++;
return hep;
}
shapr_hash_index_t * shapr_hash_next(shapr_hash_index_t *hi){
hi->this = hi->next;
while (!hi->this) {
if (hi->index > hi->ht->max)
return NULL;
hi->this = hi->ht->array[hi->index++];
}
hi->next = hi->this->next;
return hi;
}
shapr_hash_index_t * shapr_hash_first(apr_pool_t* pool, shapr_hash_t *ht){
shapr_hash_index_t *hi;
if (pool)
hi = (shapr_hash_index_t*)apr_palloc(pool, sizeof(*hi));
else
hi = &ht->iterator;
hi->ht = ht;
hi->index = 0;
hi->this = NULL;
hi->next = NULL;
return shapr_hash_next(hi);
}
/*
* Expanding a hash table
*/
static void shapr_expand_array(shared_heap* sheap,shapr_hash_t *ht){
shapr_hash_index_t *hi;
shapr_hash_entry_t **new_array;
unsigned int new_max;
new_max = ht->max * 2 + 1;
new_array = shapr_alloc_array(sheap,ht, new_max);
for (hi = shapr_hash_first(NULL, ht); hi; hi = shapr_hash_next(hi)) {
unsigned int i = hi->this->hash & new_max;
hi->this->next = new_array[i];
new_array[i] = hi->this;
}
ht->array = new_array;
ht->max = new_max;
}
void shapr_hash_set(shared_heap* sheap,shapr_hash_t *ht,const void *key,apr_ssize_t klen,const void *val){
shapr_hash_entry_t **hep;
if(key==NULL) return;
hep = shapr_find_entry(sheap,ht, key, klen, val);
if (*hep) {
if (!val) {
/* delete entry */
shapr_hash_entry_t *old = *hep;
*hep = (*hep)->next;
old->next = ht->free;
ht->free = old;
--ht->count;
}else {
/* replace entry */
(*hep)->val = val;
/* check that the collision rate isn't too high */
if (ht->count > ht->max) {
shapr_expand_array(sheap,ht);
}
}
}
/* else key not present and val==NULL */
}
void* shapr_hash_get(shapr_hash_t *ht,const void *key,apr_ssize_t klen){
shapr_hash_entry_t *he, **he_p;
he_p=shapr_find_entry(NULL,ht, key, klen, NULL);
he = *he_p;
if (he)
return (void *)he->val;
else
return NULL;
}
unsigned int shapr_hash_count(shapr_hash_t *ht){
return ht->count;
}
|
#!/bin/bash
set -e
current_dir="$( cd "$( dirname "$0" )" && pwd )"
FE_ROOT="$(dirname "$current_dir")"
cd $FE_ROOT/
java -jar $FE_ROOT/swagger-codegen-cli.jar generate -i $FE_ROOT/src/api/openapi.yaml -l typescript-fetch -o $FE_ROOT/src/api/generated/ --additional-properties modelPropertyNaming=original
rm $FE_ROOT/src/api/generated/api_test.spec.ts
yarn prettier --end-of-line lf
python $FE_ROOT/scripts/add_header.py $FE_ROOT/src/api/generated/
yarn build:copy
|
#!/bin/bash
#
# profiles = xccdf_org.ssgproject.content_profile_C2S
touch /etc/cron.daily
chgrp root /etc/cron.daily
|
#!/bin/bash
##############################################################################
# Copyright (c) 2016-22, Lawrence Livermore National Security, LLC and Umpire
# project contributors. See the COPYRIGHT file for details.
#
# SPDX-License-Identifier: (MIT)
##############################################################################
SCRIPTPATH="$( cd "$(dirname "$0")" ; pwd -P )"
. ${SCRIPTPATH}/build_and_test.sh
#
# The remainder of this script assumes that build_and_test.sh places us in the
# build directory (this assumption is not new, I am just documenting it now).
#
echo "Benchmarking..."
COMMIT="$( cd "$(dirname "$0")" ; git rev-parse --short HEAD )"
DATE=`date +%Y-%m-%d`
BENCHMARK_OUTPUT_NAME="${COMMIT}_${COMPILER}_${SYS_TYPE}_${DATE}"
if [[ $HOSTNAME == *manta* ]]; then
bsub -x -n 1 -G guests -Ip ./benchmark/copy_benchmarks --benchmark_out=copy_$BENCHMARK_OUTPUT_NAME.json --benchmark_out_format=json
bsub -x -n 1 -G guests -Ip ./benchmark/allocator_benchmarks --benchmark_out=allocator_$BENCHMARK_OUTPUT_NAME --benchmark_out_format=json
else
srun -ppdebug -t 5 -N 1 ./benchmark/copy_benchmarks --benchmark_out=copy_${BENCHMARK_OUTPUT_NAME}.json --benchmark_out_format=json
srun -ppdebug -t 5 -N 1 ./benchmark/allocator_benchmarks --benchmark_out=allocator_${BENCHMARK_OUTPUT_NAME}.json --benchmark_out_format=json
fi
cp copy_${BENCHMARK_OUTPUT_NAME}.json /usr/workspace/wsrzc/umpire/benchmark_results
cp allocator_${BENCHMARK_OUTPUT_NAME}.json /usr/workspace/wsrzc/umpire/benchmark_results
|
<reponame>jnschbrt/data-prepper
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/
package com.amazon.dataprepper.model.trace;
/**
* Represents the attributes associated with an entire trace.
* @since 1.2
*/
public interface TraceGroupFields {
/**
* Gets the end time of the trace in ISO 8601
* @return the end time
* @since 1.2
*/
String getEndTime();
/**
* Gets the duration of the entire trace in nanoseconds
* @return the duration
* @since 1.2
*/
Long getDurationInNanos();
/**
* Gets the status code for the entire trace
* @return the status code
* @since 1.2
*/
Integer getStatusCode();
}
|
app.post('/storeName', async (req,res) => {
const name = req.body.name;
try {
await storeName(name);
res.status(200).json({success: true});
} catch(err) {
res.status(500).json({error: err.message});
}
}); |
import User from "./User";
import Guild from "./Guild";
import Channel from "./Channel";
declare enum WebhookType {
Incoming = 1,
Channel_Follower = 2,
Application = 3
}
declare class Webhook {
id: string;
type: WebhookType;
guild_id?: string;
channel_id?: string;
user?: User;
name: string;
avatar: string;
token?: string;
application_id: string;
source_guild?: Guild;
source_channel?: Channel;
url?: string;
constructor();
}
export default Webhook;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.