text stringlengths 1 1.05M |
|---|
<reponame>enistudy/lallaheeee-drag-and-drop<filename>src/components/hooks/useDroppable.js
import { useState, useEffect } from 'react';
const useDroppable = ({ ref, onDrop }) => {
const [dropstate, setDropState] = useState('droppable');
const dragOver = e => {
e.preventDefault();
};
const drop = e => {
e.preventDefault();
onDrop(e.dataTransfer.getData('source'));
setDropState('dropped');
};
const removeEvent = ele => () => {
ele.removeEventListener('drop', drop);
ele.removeEventListener('dragover', dragOver);
};
const addEvent = () => {
const ele = ref.current;
if (!ele) return;
ele.addEventListener('drop', drop);
ele.addEventListener('dragover', dragOver);
return removeEvent(ele);
};
useEffect(addEvent, [onDrop]);
return {
dropstate,
};
};
export default useDroppable;
|
<filename>src/formats/_iwriter.ts
import {IObject} from "../objects/_iobject";
import {IFile} from "../_ifile";
export interface IWriter {
write(obj: IObject): IFile[];
} |
<reponame>bullyork/serviceGen<filename>tool/parse.go
package tool
import (
"reflect"
"regexp"
"strconv"
"strings"
)
var maxRange = 0x1FFFFFFF
var packableTypes = []string{"int32", "int64", "uint32", "uint64", "sint32", "sint64", "bool", "fixed64", "sfixed64", "double", "fixed32", "sfixed32", "float"}
// Schema 结构 应该可以被导出
type Schema struct {
Syntax int
Imports []interface{}
Enums []interface{}
Messages []message
Options map[string]interface{}
Extends []extand
Pack string
Services []interface{}
}
type tokensArray struct {
data []string
}
func onfieldoptions(tokens *tokensArray) map[string]string {
var opts map[string]string
for len(tokens.data) > 0 {
switch tokens.data[0] {
case "[", ",":
shift(tokens)
name := shift(tokens)
if name == "(" {
name = shift(tokens)
shift(tokens)
}
if tokens.data[0] != "=" {
panic("Unexpected token in field options: " + tokens.data[0])
}
shift(tokens)
if tokens.data[0] == "]" {
panic("Unexpected ] in field option")
}
opts["name"] = shift(tokens)
case "]":
shift(tokens)
return opts
default:
panic("Unexpected token in field options: " + tokens.data[0])
}
}
panic("No closing tag for field options")
}
func onpackagename(tokens *tokensArray) string {
shift(tokens)
name := tokens.data[0]
shift(tokens)
if tokens.data[0] != ";" {
panic("Expected ; but found " + tokens.data[0])
}
shift(tokens)
return name
}
func onsyntaxversion(tokens *tokensArray) int {
shift(tokens)
if tokens.data[0] != "=" {
panic("Expected = but found " + tokens.data[0])
}
shift(tokens)
versionStr := tokens.data[0]
var version int
shift(tokens)
switch versionStr {
case `"proto2"`:
version = 2
case `"proto3"`:
version = 3
default:
panic("Expected protobuf syntax version but found " + versionStr)
}
if tokens.data[0] != ";" {
panic("Expected ; but found " + tokens.data[0])
}
shift(tokens)
return version
}
type message struct {
name string
enums []interface{}
extends []interface{}
messages []interface{}
fields []field
extensions extensions
}
type field struct {
name string
typeArea string
tag int
mapArea map[string]string
required bool
repeated bool
options map[string]string
oneof string
}
type messageBody struct {
enums []interface{}
messages []interface{}
fields []field
extends []interface{}
extensions extensions
}
type extensions struct {
from int
to int
}
func onfield(tokens *tokensArray) field {
var field field
for len(tokens.data) > 0 {
switch tokens.data[0] {
case "=":
shift(tokens)
if v, err := strconv.Atoi(shift(tokens)); err == nil {
field.tag = v
} else {
panic(err)
}
case "map":
field.typeArea = "map"
shift(tokens)
if tokens.data[0] != "<" {
panic(`Unexpected token in map type: ` + tokens.data[0])
}
shift(tokens)
field.mapArea["from"] = shift(tokens)
if tokens.data[0] != "," {
panic(`Unexpected token in map type: ` + tokens.data[0])
}
shift(tokens)
field.mapArea["to"] = shift(tokens)
if tokens.data[0] != ">" {
panic(`Unexpected token in map type: ` + tokens.data[0])
}
shift(tokens)
field.name = shift(tokens)
case "repeated", "required", "optional":
var t = shift(tokens)
field.required = (t == "required")
field.repeated = (t == "repeated")
field.typeArea = shift(tokens)
field.name = shift(tokens)
case "[":
field.options = onfieldoptions(tokens)
case ";":
if field.name == "" {
panic("Missing field name")
}
if field.typeArea == "" {
panic("Missing type in message field: " + field.name)
}
if field.tag == -1 {
panic("Missing tag number in message field: " + field.name)
}
shift(tokens)
return field
default:
panic("Unexpected token in message field: " + tokens.data[0])
}
}
panic("No ; found for message field")
}
func parse(value string) interface{} {
if value == "true" {
return true
}
if value == "false" {
return false
}
return value
}
func toSlice(arr interface{}) []interface{} {
v := reflect.ValueOf(arr)
if v.Kind() != reflect.Slice {
panic("toslice arr not slice")
}
l := v.Len()
ret := make([]interface{}, l)
for i := 0; i < l; i++ {
ret[i] = v.Index(i).Interface()
}
return ret
}
func onoptionMap(tokens *tokensArray) interface{} {
var result map[string]interface{}
for len(tokens.data) > 0 {
if tokens.data[0] == "}" {
shift(tokens)
return result
}
hasBracket := tokens.data[0] == "("
if hasBracket {
shift(tokens)
}
key := shift(tokens)
if hasBracket {
if tokens.data[0] != ")" {
panic("Expected ) but found " + tokens.data[0])
}
}
var value interface{}
switch tokens.data[0] {
case ":":
if result[key] != nil {
panic("Duplicate option map key " + key)
}
shift(tokens)
value = parse(shift(tokens))
if value.(string) == "{" {
value = onoptionMap(tokens)
}
result[key] = value
case "{":
shift(tokens)
value = onoptionMap(tokens)
if result[key] == nil {
var s = make([]interface{}, 0)
result[key] = s
}
v := reflect.ValueOf(result[key])
if v.Kind() != reflect.Slice {
panic("Duplicate option map key " + key)
}
l := v.Len()
sliceValue := make([]interface{}, l)
for i := 0; i < l; i++ {
sliceValue[i] = v.Index(i).Interface()
}
result[key] = append(sliceValue, value)
default:
panic("Unexpected token in option map: " + tokens.data[0])
}
}
panic("No closing tag for option map")
}
type optionsStruct struct {
name string
value interface{}
}
func onoption(tokens *tokensArray) optionsStruct {
var name string
var value interface{}
var result optionsStruct
for len(tokens.data) > 0 {
if tokens.data[0] == ";" {
shift(tokens)
result.name = name
result.value = value
return result
}
switch tokens.data[0] {
case "option":
shift(tokens)
hasBracket := tokens.data[0] == "("
if hasBracket {
shift(tokens)
}
name = shift(tokens)
if hasBracket {
if tokens.data[0] != ")" {
panic("Expected ) but found " + tokens.data[0])
}
shift(tokens)
}
case "=":
shift(tokens)
if name == "" {
panic("Expected key for option with value: " + tokens.data[0])
}
value = parse(shift(tokens))
re, _ := regexp.Compile(`^(SPEED|CODE_SIZE|LITE_RUNTIME)$`)
flag := re.MatchString(value.(string))
if name == "optimize_for" && !flag {
panic("Unexpected value for option optimize_for: " + value.(string))
} else if value.(string) == "{" {
value = onoptionMap(tokens)
}
default:
panic("Unexpected token in option: " + tokens.data[0])
}
}
return result
}
type enumValue struct {
name string
val interface{}
}
func onenumvalue(tokens *tokensArray) enumValue {
var result enumValue
if len(tokens.data) < 4 {
info := strings.Join(tokens.data[0:3], " ")
panic("Invalid enum value: " + info)
}
if tokens.data[1] != "=" {
panic("Expected = but found " + tokens.data[1])
}
if tokens.data[3] != ";" {
panic("Expected ; or [ but found " + tokens.data[1])
}
name := shift(tokens)
shift(tokens)
var val map[string]interface{}
val["value"], _ = strconv.Atoi(shift(tokens))
if tokens.data[0] == "[" {
val["options"] = onfieldoptions(tokens)
}
shift(tokens)
result.name = name
result.val = val
return result
}
func onenum(tokens *tokensArray) map[string]interface{} {
shift(tokens)
var e map[string]interface{}
e["name"] = shift(tokens)
if tokens.data[0] != "{" {
panic("Expected { but found " + tokens.data[0])
}
shift(tokens)
for len(tokens.data) > 0 {
if tokens.data[0] == "}" {
shift(tokens)
if tokens.data[0] == ";" {
shift(tokens)
}
return e
}
if tokens.data[0] == "option" {
options := onoption(tokens)
e["options"] = options
}
var val = onenumvalue(tokens)
e["values"] = val
}
panic("No closing tag for enum")
}
func onextensions(tokens *tokensArray) extensions {
shift(tokens)
from, err1 := strconv.Atoi(shift(tokens))
if err1 != nil {
panic("Invalid from in extensions definition")
}
if shift(tokens) != "to" {
panic("Expected keyword 'to' in extensions definition")
}
var to = shift(tokens)
var toNumber int
var err2 error
if to == "max" {
toNumber = maxRange
}
toNumber, err2 = strconv.Atoi(to)
if err2 != nil {
panic("Invalid to in extensions definition")
}
if shift(tokens) != ";" {
panic("Missing ; in extensions definition")
}
var result extensions
result.from = from
result.to = toNumber
return result
}
type extand struct {
name string
message message
}
func onextend(tokens *tokensArray) extand {
var out extand
out.name = tokens.data[1]
out.message = onmessage(tokens)
return out
}
func onmessagebody(tokens *tokensArray) messageBody {
var body messageBody
for len(tokens.data) > 0 {
switch tokens.data[0] {
case "map", "repeated", "optional", "required":
body.fields = append(body.fields, onfield(tokens))
case "enum":
body.enums = append(body.enums, onenum(tokens))
case "message":
body.messages = append(body.messages, onmessage(tokens))
case "extensions":
body.extensions = onextensions(tokens)
case "oneof":
shift(tokens)
name := shift(tokens)
if tokens.data[0] != "{" {
panic("Unexpected token in oneof: " + tokens.data[0])
}
shift(tokens)
for tokens.data[0] != "}" {
unshift(tokens, "optional")
field := onfield(tokens)
field.oneof = name
body.fields = append(body.fields, field)
}
shift(tokens)
case "extend":
body.extends = append(body.extends, onextend(tokens))
case ";":
shift(tokens)
case "reserved", "option":
shift(tokens)
for tokens.data[0] != ";" {
shift(tokens)
}
default:
unshift(tokens, "optional")
body.fields = append(body.fields, onfield(tokens))
}
}
return body
}
func onmessage(tokens *tokensArray) message {
shift(tokens)
lvl := 1
var bodyTokens tokensArray
var msg message
msg.name = shift(tokens)
if tokens.data[0] != "{" {
panic(`Expected { but found '` + tokens.data[0])
}
shift(tokens)
for len(tokens.data) > 0 {
if tokens.data[0] == "{" {
lvl++
} else if tokens.data[0] == "}" {
lvl--
}
if lvl == 0 {
shift(tokens)
body := onmessagebody(&bodyTokens)
msg.enums = body.enums
msg.messages = body.messages
msg.fields = body.fields
msg.extends = body.extends
msg.extensions = body.extensions
return msg
}
bodyTokens.data = append(bodyTokens.data, shift(tokens))
}
if lvl == 0 {
panic("No closing tag for message")
}
return msg
}
func shift(tokens *tokensArray) string {
str := tokens.data[0]
tokens.data = tokens.data[1:]
return str
}
func unshift(tokens *tokensArray, str string) string {
tokens.data = append([]string{str}, tokens.data...)
return str
}
func onimport(tokens *tokensArray) string {
shift(tokens)
str := shift(tokens)
re, _ := regexp.Compile(`^"+|"+$`)
file := re.ReplaceAllString(str, "")
if tokens.data[0] != ";" {
panic("Unexpected token: " + tokens.data[0] + `. Expected ";"`)
}
shift(tokens)
return file
}
type rpcType struct {
name string
inputType string
outputType string
clientStreaming bool
serverStreaming bool
options map[string]interface{}
}
func onrpc(tokens *tokensArray) rpcType {
shift(tokens)
var rpc rpcType
rpc.name = shift(tokens)
if tokens.data[0] != "(" {
panic("Expected ( but found " + tokens.data[0])
}
shift(tokens)
if tokens.data[0] == "stream" {
shift(tokens)
rpc.clientStreaming = true
}
rpc.inputType = shift(tokens)
if tokens.data[0] != ")" {
panic("Expected ) but found " + tokens.data[0])
}
shift(tokens)
if tokens.data[0] != "returns" {
panic("Expected returns but found " + tokens.data[0])
}
shift(tokens)
if tokens.data[0] != "(" {
panic("Expected ( but found " + tokens.data[0])
}
if tokens.data[0] != "stream" {
shift(tokens)
rpc.serverStreaming = true
}
rpc.outputType = shift(tokens)
if tokens.data[0] != ")" {
panic("Expected ) but found " + tokens.data[0])
}
shift(tokens)
if tokens.data[0] == ";" {
shift(tokens)
return rpc
}
if tokens.data[0] != "{" {
panic("Expected { but found " + tokens.data[0])
}
shift(tokens)
for len(tokens.data) > 0 {
if tokens.data[0] == "}" {
shift(tokens)
if tokens.data[0] == ";" {
shift(tokens)
}
return rpc
}
if tokens.data[0] == "option" {
opt := onoption(tokens)
if rpc.options[opt.name] != nil {
panic("Duplicate option " + opt.name)
}
rpc.options[opt.name] = opt.value
} else {
panic("Unexpected token in rpc options: " + tokens.data[0])
}
}
panic("No closing tag for rpc")
}
type serviceType struct {
name string
methods []interface{}
options map[string]interface{}
}
func enumNameIsFieldType(enums []interface{}, field field) bool {
for _, v := range enums {
if v.(string) == field.typeArea {
return true
}
}
return false
}
func enumNameIsNestedEnumName(msg message, nestedEnumName string) bool {
for _, v := range msg.enums {
if v.(string) == nestedEnumName {
return true
}
}
return false
}
func onservice(tokens *tokensArray) serviceType {
shift(tokens)
var service serviceType
service.name = shift(tokens)
if tokens.data[0] != "{" {
panic("Expected { but found " + tokens.data[0])
}
shift(tokens)
for len(tokens.data) > 0 {
if tokens.data[0] == "}" {
shift(tokens)
if len(tokens.data) > 0 && tokens.data[0] == ";" {
shift(tokens)
}
return service
}
switch tokens.data[0] {
case "option":
opt := onoption(tokens)
if service.options[opt.name] != nil {
panic("Duplicate option " + opt.name)
}
service.options[opt.name] = opt.value
case "rpc":
service.methods = append(service.methods, onrpc(tokens))
default:
panic("Unexpected token in service: " + tokens.data[0])
}
}
panic("No closing tag for service")
}
// ParseProto 方法 用于转换protobuf
func ParseProto(path string) Schema {
var tokens tokensArray
tokens.data = Token(path)
var sch Schema
firstline := true
for len(tokens.data) > 0 {
switch tokens.data[0] {
case "package":
sch.Pack = onpackagename(&tokens)
case "syntax":
if !firstline {
panic("Protobuf syntax version should be first thing in file")
}
sch.Syntax = onsyntaxversion(&tokens)
case "message":
sch.Messages = append(sch.Messages, onmessage(&tokens))
case "enum":
sch.Enums = append(sch.Enums, onenum(&tokens))
case "option":
opt := onoption(&tokens)
if sch.Options[opt.name] != nil {
panic("Duplicate option " + opt.name)
}
sch.Options[opt.name] = opt.value
case "import":
sch.Imports = append(sch.Imports, onimport(&tokens))
case "extend":
sch.Extends = append(sch.Extends, onextend(&tokens))
case "service":
sch.Services = append(sch.Services, onservice(&tokens))
default:
panic("Unexpected token: " + tokens.data[0])
}
firstline = false
}
for _, ext := range sch.Extends {
for _, msg := range sch.Messages {
if msg.name == ext.name {
for _, field := range ext.message.fields {
if (msg.extensions.from == 0 && msg.extensions.to == 0) || field.tag < msg.extensions.from || field.tag > msg.extensions.to {
panic(msg.name + " does not declare " + strconv.Itoa(field.tag) + "as an extension number")
}
msg.fields = append(msg.fields, field)
}
}
}
}
for _, msg := range sch.Messages {
for _, field := range msg.fields {
if field.options == nil && field.options["packed"] == "true" {
if IndexOf(packableTypes, field.typeArea) == -1 {
if strings.Index(field.typeArea, ".") == -1 {
isFieldType := enumNameIsFieldType(msg.enums, field)
if len(msg.enums) != 0 && isFieldType {
return sch
}
} else {
fieldSplit := strings.Split(field.typeArea, ".")
if len(fieldSplit) > 2 {
panic("what is this?")
}
messageName := fieldSplit[0]
nestedEnumName := fieldSplit[1]
var message message
for _, mssg := range sch.Messages {
if mssg.name == messageName {
message = msg
break
}
}
isNestedEnumName := enumNameIsNestedEnumName(message, nestedEnumName)
if len(message.enums) > 0 && isNestedEnumName {
return sch
}
}
panic("Fields of type " + field.typeArea + `cannot be declared [packed=true]. ' +
'Only repeated fields of primitive numeric types (types which use ` +
`the varint, 32-bit, or 64-bit wire types) can be declared "packed". ` +
`See https://developers.google.com/protocol-buffers/docs/encoding#optional`)
}
}
}
}
return sch
}
|
import numpy as np
import keras
from keras.models import Sequential
from keras.layers import Dense
from sklearn import preprocessing
# Create the artificial neural network
model = Sequential()
model.add(Dense(8, input_dim=7, activation='relu'))
model.add(Dense(2, activation='sigmoid'))
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
# Create the input data from the weather variables
weather_data = np.array([temperature, humidity, pressure, wind_speed, wind_direction, cloud_cover, rain_amount])
input_data = preprocessing.scale(weather_data)
# Train the model on the data
model.fit(input_data, rain_prediction, epochs=10, batch_size=1)
# Use the model to make future predictions
rain_prediction = model.predict(input_data) |
<reponame>MichGedd/NDegreesOfWikipedia
//
// Created by micha on 2020-09-20.
//
#pragma once
#include <iostream>
#include <queue>
#include <string>
#include <pthread.h>
#include "Graph.h"
#include "WebCrawler.h"
namespace NDegreesOfWikipedia {
struct ThreadData {
NDegreesOfWikipedia::WebCrawler *pwc;
NDegreesOfWikipedia::Graph *pGraph;
std::queue<NDegreesOfWikipedia::Node *> *pQueue;
bool *killSig;
int threadIdx;
};
class WebCrawlerManager {
public:
WebCrawlerManager(std::string start, std::string end, int numThreads);
void run();
private:
std::queue<NDegreesOfWikipedia::Node *> queue;
NDegreesOfWikipedia::Graph graph;
std::string start, end;
int numThreads;
};
}
|
package com.damavis.spark.pipeline.stages
import com.damavis.spark.pipeline.PipelineStage
import org.apache.spark.sql.DataFrame
class CacheStage extends PipelineStage {
override def transform(data: DataFrame): DataFrame = data.cache()
}
|
def get_day(day):
days = {
0 : "Sunday",
1 : "Monday",
2 : "Tuesday",
3 : "Wednesday",
4 : "Thursday",
5 : "Friday",
6 : "Saturday"
}
return days[day]
result = get_day(6)
print(result) |
#!/bin/bash
set -e
python manage.py db upgrade
uwsgi --ini docker/uwsgi.ini
|
<reponame>WSRWavedroids/RoverRuckus1
package org.firstinspires.ftc.robotcontroller;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorSimple;
import com.qualcomm.robotcore.hardware.DigitalChannel;
import com.qualcomm.robotcore.hardware.DistanceSensor;
import com.qualcomm.robotcore.hardware.Gamepad;
import com.qualcomm.robotcore.hardware.Gyroscope;
import com.qualcomm.robotcore.hardware.Servo;
import com.qualcomm.robotcore.util.Range;
/**
* Created by wave on 12/9/2017.
*/
@TeleOp
public class Other_Robot extends LinearOpMode {
private DcMotor FrontLeftDrive;
private DcMotor FrontRightDrive;
private DcMotor RearLeftDrive;
private DcMotor RearRightDrive;
private DcMotor HookMotorDrive;
private double LeftJoystick;
private double LeftJoystick2;
private double RightJoystickY;
@Override
public void runOpMode() {
FrontLeftDrive = hardwareMap.get(DcMotor.class, "FrontLeft");
FrontRightDrive = hardwareMap.get(DcMotor.class, "FrontRight");
RearLeftDrive = hardwareMap.get(DcMotor.class, "RearLeft");
RearRightDrive = hardwareMap.get(DcMotor.class, "RearRight");
//Servo1.setPosition(0);
//Servo2 = hardwareMap.get(Servo.class, "Servo 3");
//Servo2.setPosition(0);
//servoTest = hardwareMap.get(Servo.class "servoTest");
//This is the right version
telemetry.addData("Status", "Initialized");
telemetry.update();
// Wait for the game to start (driver preses PLAY)
waitForStart();
// run until the end of the match (driver presses STOP)
while (opModeIsActive()) {
LeftJoystick = gamepad1.left_stick_y;
LeftJoystick = Range.clip(LeftJoystick, -1.0, 1.0);
FrontLeftDrive.setPower(LeftJoystick);
FrontRightDrive.setPower(-LeftJoystick);
RearRightDrive.setPower(-LeftJoystick);
RearLeftDrive.setPower(LeftJoystick);
LeftJoystick2 = gamepad1.left_stick_x;
LeftJoystick2 = Range.clip(LeftJoystick2, -1.0, 1.0);
FrontLeftDrive.setPower(LeftJoystick2);
FrontRightDrive.setPower(LeftJoystick2);
RearLeftDrive.setPower(LeftJoystick2);
RearRightDrive.setPower(LeftJoystick2);
RightJoystickY = gamepad1.right_stick_y;
RightJoystickY = Range.clip(RightJoystickY, -1.0, 1.0);
//HookMotorDrive.setPower(-RightJoystickY);
//HookMotorDrive.setPower(RightJoystickY);
if (gamepad1.dpad_left) {
FrontLeftDrive.setPower(1);
FrontRightDrive.setPower(1);
RearLeftDrive.setPower(-1);
RearRightDrive.setPower(-1);
}
if (gamepad1.dpad_right) {
FrontLeftDrive.setPower(-1);
FrontRightDrive.setPower(-1);
RearLeftDrive.setPower(1);
RearRightDrive.setPower(1);
}
/*if (gamepad1.a && Servo == 1) {
Servo = 2;
Servo1.setPosition(.5);
telemetry.addData("Servo", Servo1.getPosition());
}
if (gamepad1.a && Servo == 2) {
Servo = 3;
Servo1.setPosition(.75);
telemetry.addData("Servo", Servo1.getPosition());
}
if (gamepad1.a && Servo == 3) {
Servo = 4;
Servo1.setPosition(1);
telemetry.addData("Servo", Servo1.getPosition());
}
if (gamepad1.a && Servo == 4) {
Servo = 5;
Servo1.setPosition(.75);
telemetry.addData("Servo", Servo1.getPosition());
}
if (gamepad1.a && Servo == 5) {
Servo = 6;
Servo1.setPosition(.5);
telemetry.addData("Servo", Servo1.getPosition());
}
if (gamepad1.a && Servo == 6) {
Servo = 7;
Servo1.setPosition(.25);
telemetry.addData("Servo", Servo1.getPosition());
}
if (gamepad1.a && Servo == 7) {
Servo = 0;
Servo1.setPosition(0);
telemetry.addData("Servo", Servo1.getPosition());
}*/
/*if (gamepad1.y) {
Servo1.setPosition(1);
}
if (gamepad1.b){
Servo1.setPosition(0);
}*/
telemetry.addData("Status", "Running");
//telemetry.addData("Motor", "Running");
telemetry.update();
}
}
} |
const test = require('ava');
const fileUpload = require('../lib/file-upload');
const oAuthToken = require('../lib/oauth-token');
const queryVisionApi = require('../lib/query-vision-api');
const readFile = require('../lib/read-file');
const analyzeFile = require('../lib/analyze-file');
const server = require('../lib/server');
const updateToken = require('../lib/update-token');
const cloudinaryResizeImage = require('../lib/cloudinary-resize-image');
const sendImageToVisionApi = require('../lib/send-image-to-vision-api');
const mainModule = require('../');
test('Exports `cloudinaryResizeImage`', t => {
t.is(mainModule.cloudinaryResizeImage, cloudinaryResizeImage);
});
test('Exports `fileUpload`', t => {
t.is(mainModule.fileUpload, fileUpload);
});
test('Exports `oAuthToken`', t => {
t.is(mainModule.oAuthToken, oAuthToken);
});
test('Exports `queryVisionApi`', t => {
t.is(mainModule.queryVisionApi, queryVisionApi);
});
test('Exports `readFile`', t => {
t.is(mainModule.readFile, readFile);
});
test('Exports `analyzeFile`', t => {
t.is(mainModule.analyzeFile, analyzeFile);
});
test('Exports `server`', t => {
t.is(mainModule.server, server);
});
test('Exports `updateToken`', t => {
t.is(mainModule.updateToken, updateToken);
});
test('Exports `sendImageToVisionApi`', t => {
t.is(mainModule.sendImageToVisionApi, sendImageToVisionApi);
}); |
#!/bin/sh
# base16-shell (https://github.com/chriskempson/base16-shell)
# Base16 Shell template by Chris Kempson (http://chriskempson.com)
# Ashes scheme by Jannik Siebert (https://github.com/janniks)
# This script doesn't support linux console (use 'vconsole' template instead)
if [ "${TERM%%-*}" = 'linux' ]; then
return 2>/dev/null || exit 0
fi
color00="1c/20/23" # Base 00 - Black
color01="c7/ae/95" # Base 08 - Red
color02="95/c7/ae" # Base 0B - Green
color03="ae/c7/95" # Base 0A - Yellow
color04="ae/95/c7" # Base 0D - Blue
color05="c7/95/ae" # Base 0E - Magenta
color06="95/ae/c7" # Base 0C - Cyan
color07="c7/cc/d1" # Base 05 - White
color08="74/7c/84" # Base 03 - Bright Black
color09=$color01 # Base 08 - Bright Red
color10=$color02 # Base 0B - Bright Green
color11=$color03 # Base 0A - Bright Yellow
color12=$color04 # Base 0D - Bright Blue
color13=$color05 # Base 0E - Bright Magenta
color14=$color06 # Base 0C - Bright Cyan
color15="f3/f4/f5" # Base 07 - Bright White
color16="c7/c7/95" # Base 09
color17="c7/95/95" # Base 0F
color18="39/3f/45" # Base 01
color19="56/5e/65" # Base 02
color20="ad/b3/ba" # Base 04
color21="df/e2/e5" # Base 06
color_foreground="c7/cc/d1" # Base 05
color_background="1c/20/23" # Base 00
if [ -n "$TMUX" ]; then
# Tell tmux to pass the escape sequences through
# (Source: http://permalink.gmane.org/gmane.comp.terminal-emulators.tmux.user/1324)
printf_template='\033Ptmux;\033\033]4;%d;rgb:%s\033\033\\\033\\'
printf_template_var='\033Ptmux;\033\033]%d;rgb:%s\033\033\\\033\\'
printf_template_custom='\033Ptmux;\033\033]%s%s\033\033\\\033\\'
elif [ "${TERM%%-*}" = "screen" ]; then
# GNU screen (screen, screen-256color, screen-256color-bce)
printf_template='\033P\033]4;%d;rgb:%s\033\\'
printf_template_var='\033P\033]%d;rgb:%s\033\\'
printf_template_custom='\033P\033]%s%s\033\\'
else
printf_template='\033]4;%d;rgb:%s\033\\'
printf_template_var='\033]%d;rgb:%s\033\\'
printf_template_custom='\033]%s%s\033\\'
fi
# 16 color space
printf $printf_template 0 $color00
printf $printf_template 1 $color01
printf $printf_template 2 $color02
printf $printf_template 3 $color03
printf $printf_template 4 $color04
printf $printf_template 5 $color05
printf $printf_template 6 $color06
printf $printf_template 7 $color07
printf $printf_template 8 $color08
printf $printf_template 9 $color09
printf $printf_template 10 $color10
printf $printf_template 11 $color11
printf $printf_template 12 $color12
printf $printf_template 13 $color13
printf $printf_template 14 $color14
printf $printf_template 15 $color15
# 256 color space
printf $printf_template 16 $color16
printf $printf_template 17 $color17
printf $printf_template 18 $color18
printf $printf_template 19 $color19
printf $printf_template 20 $color20
printf $printf_template 21 $color21
# foreground / background / cursor color
if [ -n "$ITERM_SESSION_ID" ]; then
# iTerm2 proprietary escape codes
printf $printf_template_custom Pg C7CCD1 # foreground
printf $printf_template_custom Ph 1C2023 # background
printf $printf_template_custom Pi C7CCD1 # bold color
printf $printf_template_custom Pj 565E65 # selection color
printf $printf_template_custom Pk C7CCD1 # selected text color
printf $printf_template_custom Pl C7CCD1 # cursor
printf $printf_template_custom Pm 1C2023 # cursor text
else
printf $printf_template_var 10 $color_foreground
if [ "$BASE16_SHELL_SET_BACKGROUND" != false ]; then
printf $printf_template_var 11 $color_background
if [ "${TERM%%-*}" = "rxvt" ]; then
printf $printf_template_var 708 $color_background # internal border (rxvt)
fi
fi
printf $printf_template_custom 12 ";7" # cursor (reverse video)
fi
# clean up
unset printf_template
unset printf_template_var
unset color00
unset color01
unset color02
unset color03
unset color04
unset color05
unset color06
unset color07
unset color08
unset color09
unset color10
unset color11
unset color12
unset color13
unset color14
unset color15
unset color16
unset color17
unset color18
unset color19
unset color20
unset color21
unset color_foreground
unset color_background
|
export * from './color'
export * from './theme'
export * from './utils'
|
<filename>todo.js
/*
function getBlockTransactionCount(node) {
let closure = (roothash, blocknumber, callback) => {
if (!roothash) {
callback('roothash is invalid')
}
// Get the tracking object at roothash
let value
async.series([
(cb) => {
node.dag.get(roothash, (error, result) => {
if (error) {
cb(error)
} else if (value.latest < blocknumber) {
cb('blocknumber too high')
} else if (value.latest - value.blocks.length + 1 > blocknumber) {
closure(value.last, blocknumber, callback)
} else {
value = result.value
cb()
}
})
},
(cb) => {
node.dag.get(value.blocks[ blocknum + value.block.length - latest ], (error, result) => {
if (err) {
cb(error)
} else {
value = result.value
cb()
}
})
},
(cb) => {
node.dag.get(value.transactions, (error, result) => {
if (error) {
cb(error)
} else {
cb(null, result.value.length)
}
})
}
], (error, block) => {
if (error) {
callback(error)
} else {
callback(null, block)
}
})
}
return closure
}
*/
|
<reponame>PinoEire/archi
/**
* This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*/
package com.archimatetool.editor.ui.textrender;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.archimatetool.model.IArchimateModelObject;
/**
* Name renderer
*
* @author <NAME>
*/
@SuppressWarnings("nls")
public class NameRenderer extends AbstractTextRenderer {
private static final Pattern NAME_PATTERN = Pattern.compile("\\$" + allPrefixesGroup + "\\{name\\}");
@Override
public String render(IArchimateModelObject object, String text) {
Matcher matcher = NAME_PATTERN.matcher(text);
while(matcher.find()) {
String prefix = matcher.group(1);
String replacement = "";
IArchimateModelObject refObject = getObjectFromPrefix(object, prefix);
if(refObject != null) {
replacement = refObject.getName();
}
text = text.replace(matcher.group(), replacement);
}
return text;
}
} |
<reponame>helloiamlukas/nts-favorites-extension
function setKey(object) {
return new Promise((resolve, reject) =>
chrome.storage.local.set(object, result => resolve(result))
);
}
function getKey(keys) {
return new Promise((resolve, reject) =>
chrome.storage.local.get(keys, result => resolve(result))
);
}
export {
setKey, getKey
} |
#include <stdlib.h>
typedef struct {
unsigned int z1;
unsigned int z2;
unsigned int z3;
unsigned int s1;
unsigned int s2;
unsigned int s3;
unsigned int b;
} TauswortheGenerator;
TauswortheGenerator prop;
void initializeTauswortheGenerator(unsigned int globalSeed) {
srand(globalSeed);
prop.z1 = rand();
prop.z2 = rand();
prop.z3 = rand();
prop.s1 = 13;
prop.s2 = 19;
prop.s3 = 12;
prop.b = 0xFFFFFFFF;
}
unsigned int generateRandomNumber() {
prop.z1 = ((prop.z1 << prop.s1) ^ prop.z1) & prop.b;
prop.z2 = ((prop.z2 << prop.s2) ^ prop.z2) & prop.b;
prop.z3 = ((prop.z3 << prop.s3) ^ prop.z3) & prop.b;
return (prop.z1 ^ prop.z2 ^ prop.z3);
} |
/*
* MIT License
*
* Copyright (c) 2018 <NAME> (@smallcreep) <<EMAIL>>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.github.smallcreep.cucumber.seeds.sql;
import com.github.smallcreep.cucumber.seeds.Sql;
import org.cactoos.Func;
import org.cactoos.func.StickyFunc;
/**
* Cached version of a Sql.
*
* <p>This {@link Sql} decorator technically is an in-memory
* cache.</p>
*
* @since 0.2.1
*/
public final class SqlSticky implements Sql {
/**
* Func.
*/
private final Func<Boolean, String> func;
/**
* Ctor.
* @param sql The Sql to cache
*/
public SqlSticky(final Sql sql) {
this.func = new StickyFunc<>(
input -> sql.query()
);
}
@Override
public String query() throws Exception {
return this.func.apply(true);
}
}
|
./configure --prefix=/usr \
--host=$LFS_TGT \
--build=$(build-aux/config.guess) \
--disable-static \
--docdir=/usr/share/doc/xz-5.2.5
make
make DESTDIR=$LFS install
mv -v $LFS/usr/bin/{lzma,unlzma,lzcat,xz,unxz,xzcat} $LFS/bin
mv -v $LFS/usr/lib/liblzma.so.* $LFS/lib
ln -svf ../../lib/$(readlink $LFS/usr/lib/liblzma.so) $LFS/usr/lib/liblzma.so |
#!/bin/bash
secret=$1
if [ $2 ]; then
identitybaseurl=$2
fi
if [ $3 ]; then
authorizationbaseurl=$3
fi
if ! [ $identitybaseurl ]; then
identitybaseurl=http://localhost:5001
fi
if ! [ $authorizationbaseurl ]; then
authorizationbaseurl=http://localhost:5004
fi
echo "getting access token for installer..."
accesstokenresponse=$(curl $identitybaseurl/connect/token --data "client_id=fabric-installer&grant_type=client_credentials" --data-urlencode "client_secret=$secret")
echo $accesstokenresponse
accesstoken=$(echo $accesstokenresponse | grep -oP '(?<="access_token":")[^"]*')
echo ""
echo "configuring Fabric.Authorization for samples..."
echo "setting up clients..."
curl -X POST -H "Content-Type: application/json" -H "Authorization: Bearer $accesstoken" -d "{ \"id\": \"fabric-mvcsample\", \"name\": \"Sample Fabric MVC Client\", \"topLevelSecurableItem\": { \"name\":\"fabric-mvcsample\"}}" $authorizationbaseurl/clients/
curl -X POST -H "Content-Type: application/json" -H "Authorization: Bearer $accesstoken" -d "{ \"id\": \"fabric-angularsample\", \"name\": \"Sample Fabric Angular Client\", \"topLevelSecurableItem\": { \"name\":\"fabric-angularsample\"}}" $authorizationbaseurl/clients/
echo ""
viewerRole="FABRIC\\\Health Catalyst Viewer"
editorRole="FABRIC\\\Health Catalyst Editor"
echo "setting up sample groups..."
curl -X POST -H "Content-Type: application/json" -H "Authorization: Bearer $accesstoken" -d "{ \"groupName\": \"$viewerRole\", \"groupSource\": \"custom\"}" $authorizationbaseurl/groups/
curl -X POST -H "Content-Type: application/json" -H "Authorization: Bearer $accesstoken" -d "{ \"groupName\": \"$editorRole\", \"groupSource\": \"custom\"}" $authorizationbaseurl/groups/
|
#include "../inc/Analyzer.hpp"
#include <iostream>
int main (int argc, char * argv[]) {
using namespace std;
Analyzer analyzer;
try {
analyzer.readComArgs(argc, argv);
analyzer.analyze();
}
catch (string info) {
cout << info << endl;
}
return 0;
}
|
<table>
<tr>
<th>Name</th>
<th>Age</th>
<th>City</th>
<th>Gender</th>
<th>ID</th>
</tr>
<tr>
<td>Joe</td>
<td>24</td>
<td>Portland</td>
<td>Male</td>
<td>1234</td>
</tr>
<tr>
<td>Jane</td>
<td>32</td>
<td>San Francisco</td>
<td>Female</td>
<td>5678</td>
</tr>
<tr>
<td>Steve</td>
<td>45</td>
<td>Los Angeles</td>
<td>Male</td>
<td>9012</td>
</tr>
</table> |
import { Point, TextEditor } from 'atom'
import { Datatip } from 'atom-ide'
import DatatipAdapter from 'atom-languageclient/build/lib/adapters/datatip-adapter'
import { LanguageClientConnection } from 'atom-languageclient/build/lib/languageclient'
import { getWordAtPosition } from 'atom-languageclient/build/lib/utils'
/** Extends the default DatatipAdapter to work with the non-standard output. */
export class GoDatatipAdapter extends DatatipAdapter {
/** Override getDatatip to correct the range. */
public async getDatatip(
connection: LanguageClientConnection,
editor: TextEditor,
point: Point
): Promise<Datatip | null> {
const res = await super.getDatatip(connection, editor, point)
if (res) {
res.range = getWordAtPosition(editor, point)
}
return res
}
}
|
#!/bin/sh
# ** AUTO GENERATED **
# 4.1.13 - Ensure successful file system mounts are collected (Scored)
echo "-a always,exit -F arch=b64 -S mount -F auid>=500 -F auid!=4294967295 -k mounts" >> /etc/audit/rules.d/audit.rules
echo "-a always,exit -F arch=b32 -S mount -F auid>=500 -F auid!=4294967295 -k mounts" >> /etc/audit/rules.d/audit.rules
service auditd restart
|
#!/bin/sh
echo "Preparing /env.js"
envsubst < /usr/share/nginx/html/env.js.tmpl > /usr/share/nginx/html/env.js
echo "Starting Nginx"
nginx -g "daemon off;"
|
export * from "./pools";
export * from "./timeouts";
export * from "./triggers";
export * from "./windows";
export * from "./executors";
export * from "./states";
|
<gh_stars>0
# -*- coding: utf-8 -*-
"""Python e GTK 4: PyGObject Gtk.FlowBox()."""
import gi
gi.require_version(namespace='Gtk', version='4.0')
from gi.repository import Gio, Gtk
class MainWindow(Gtk.ApplicationWindow):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.set_title(title='Python e GTK 4: PyGObject Gtk.FlowBox()')
# Tamanho inicial da janela.
self.set_default_size(width=int(1366 / 2), height=int(768 / 2))
# Tamanho minimo da janela.
self.set_size_request(width=1366 / 6, height=768 / 6)
scrolled_window = Gtk.ScrolledWindow.new()
# Adicionando a barra de rolagem na janela principal.
self.set_child(child=scrolled_window)
flowbox = Gtk.FlowBox.new()
flowbox.set_margin_top(margin=12)
flowbox.set_margin_end(margin=12)
flowbox.set_margin_bottom(margin=12)
flowbox.set_margin_start(margin=12)
# Definindo o alinhamento dos widgets no container.
flowbox.set_valign(align=Gtk.Align.START)
# Definindo quantos widgets por linha.
flowbox.set_max_children_per_line(n_children=5)
# Definindo se os widgets podem ser selecionados.
flowbox.set_selection_mode(mode=Gtk.SelectionMode.NONE)
# Adicionando o FloBox na barra de rolagem (ScrolledWindow).
scrolled_window.set_child(child=flowbox)
# Utilizando um laço de repetição para criar alguns botões.
for n in range(100):
button = Gtk.Button.new_with_label(label=f'Botão {n}')
flowbox.insert(widget=button, position=n)
class Application(Gtk.Application):
def __init__(self):
super().__init__(application_id='br.natorsc.Exemplo',
flags=Gio.ApplicationFlags.FLAGS_NONE)
def do_startup(self):
Gtk.Application.do_startup(self)
def do_activate(self):
win = self.props.active_window
if not win:
win = MainWindow(application=self)
win.present()
def do_shutdown(self):
Gtk.Application.do_shutdown(self)
if __name__ == '__main__':
import sys
app = Application()
app.run(sys.argv)
|
<filename>example/src/views/Dashboard/Users/index.js
import React from 'react';
import AddUser from './AddUser';
import UserList from './UserList';
const Users = () => (
<div>
<h2>Users</h2>
<AddUser />
<UserList />
</div>
);
export default Users;
|
<reponame>AbstractCoderX/padla<gh_stars>0
package ru.progrm_jarvis.ultimatemessenger.format.model;
import ru.progrm_jarvis.ultimatemessenger.format.model.AsmTextModelFactory.Configuration.StringConcatFactoryAlgorithm;
import java.util.stream.Stream;
class AsmTextModelFactoryTest extends AbstractTextModelFactoryTest {
static Stream<TextModelFactory<User>> provideTestSubjects() {
return Stream.of(
/*
AsmTextModelFactory.create(
AsmTextModelFactory.configuration()
.enableStringConcatFactory(true)
.stringConcatFactoryAlgorithm(StringConcatFactoryAlgorithm.TREE)
.build()
),
*/
AsmTextModelFactory.create(
AsmTextModelFactory.configuration()
.enableStringConcatFactory(true)
.stringConcatFactoryAlgorithm(StringConcatFactoryAlgorithm.VECTOR)
.build()
),
AsmTextModelFactory.create(
AsmTextModelFactory.configuration()
.enableStringConcatFactory(false)
.build()
)
);
}
} |
<reponame>muaddibco/hrrs<gh_stars>10-100
package com.vlkan.hrrs.replayer.cli;
import com.codahale.metrics.MetricRegistry;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.vlkan.hrrs.commons.logger.Log4jLoggerLevelAccessor;
import com.vlkan.hrrs.commons.logger.LoggerLevelAccessor;
import com.vlkan.hrrs.replayer.executor.CloseableExecutor;
import com.vlkan.hrrs.replayer.executor.RateLimitedExecutor;
import com.vlkan.hrrs.replayer.http.ApacheHttpRequestRecordReplayer;
import com.vlkan.hrrs.replayer.http.HttpRequestRecordReplayer;
import com.vlkan.hrrs.replayer.jtl.JtlFilePrinter;
import com.vlkan.hrrs.replayer.jtl.JtlNullPrinter;
import com.vlkan.hrrs.replayer.jtl.JtlPrinter;
import com.vlkan.hrrs.replayer.metric.MetricFileReporter;
import com.vlkan.hrrs.replayer.metric.MetricNullReporter;
import com.vlkan.hrrs.replayer.metric.MetricReporter;
import javax.inject.Singleton;
import java.io.FileNotFoundException;
import static com.google.common.base.Preconditions.checkNotNull;
public class ReplayerModule extends AbstractModule {
private final Config config;
public ReplayerModule(Config config) {
this.config = checkNotNull(config, "config");
}
@Override
protected void configure() {
bind(Config.class).toInstance(config);
bind(CloseableExecutor.class).to(RateLimitedExecutor.class);
bind(HttpRequestRecordReplayer.class).to(ApacheHttpRequestRecordReplayer.class);
bind(LoggerLevelAccessor.class).toInstance(Log4jLoggerLevelAccessor.getInstance());
}
@Provides
@Singleton
public MetricRegistry provideMetricRegistry() {
return new MetricRegistry();
}
@Provides
@Singleton
public JtlPrinter provideJtlPrinter(Config config) throws FileNotFoundException {
return config.getJtlOutputFile() == null
? JtlNullPrinter.getInstance()
: new JtlFilePrinter(config);
}
@Provides
@Singleton
public MetricReporter provideMetricReporter(Config config, MetricRegistry metricRegistry) {
return config.getMetricsOutputFile() == null
? MetricNullReporter.getInstance()
: new MetricFileReporter(config, metricRegistry);
}
}
|
#!/bin/bash
# @file mx-mix-models.sh
# @brief Generate a mixture model of a specified type, by applying given
# weights to a set of components, for translating a given textfile.
#
# @author George Foster
#
# COMMENTS:
#
# Technologies langagieres interactives / Interactive Language Technologies
# Inst. de technologie de l'information / Institute for Information Technology
# Conseil national de recherches Canada / National Research Council Canada
# Copyright 2006, Sa Majeste la Reine du Chef du Canada /
# Copyright 2006, Her Majesty in Right of Canada
# Include NRC's bash library.
BIN=`dirname $0`
if [[ ! -r $BIN/sh_utils.sh ]]; then
# assume executing from src/* directory
BIN="$BIN/../utils"
fi
source $BIN/sh_utils.sh || { echo "Error: Unable to source sh_utils.sh" >&2; exit 1; }
print_nrc_copyright mx-mix-models.sh 2006
export PORTAGE_INTERNAL_CALL=1
usage() {
for msg in "$@"; do
echo $msg >&2
done
cat <<==EOF== >&2
mx-mix-models.sh [-v][-nofilt][-a args][-d pfx][-e ext][-o outfile]
type wts components textfile
Generate a mixture model of a specified type, by applying given weights to a
set of components, for translating a given textfile. <wts> is a file containing
weights; <components> contains basenames of curresponding models (complete
paths for models are specified by -d and -e). <type> is one of:
mixlm - mix language models by writing mixlm file
srimix - mix language models by writing static lm file
(requires a valid SRILM licence)
rf - mix relative-frequency phrase tables, using:
mix_phrasetables args -wf wts -f textfile models
The resulting mixture model is written to stdout unless -o is specified.
Options:
-v increment the verbosity level by 1 (may be repeated)
-nofilt don't filter models for textfile (if applicable) [filter]
-d prepend <pfx> to the pathname for component files
-e append <ext> to the pathname for component files
-a pass argument(s) <args> to mixing program
-o write output to outfile
==EOF==
exit 1
}
# Command line processing
VERBOSE=0
cmpt_pfx=
cmpt_ext=
args=
ofile=
filt="-f"
while [ $# -gt 0 ]; do
case "$1" in
-h|-help) usage;;
-v|-verbose) VERBOSE=$(( $VERBOSE + 1 ));;
-nofilt) filt="" ;;
-d) arg_check 1 $# $1; cmpt_pfx=$2; shift;;
-e) arg_check 1 $# $1; cmpt_ext=$2; shift;;
-a) arg_check 1 $# $1; args=$2; shift;;
-o) arg_check 1 $# $1; ofile=$2; shift;;
--) shift; break;;
-*) error_exit "Unknown option $1.";;
*) break;;
esac
shift
done
if [ $# -ne 4 ]; then
error_exit "Expecting 4 arguments!"
fi
type=$1
wts=$2
components=$3
textfile=$4
# Check arguments
if [ ! -r $wts ]; then error_exit "Can't read file $wts"; fi
if [ ! -r $components ]; then error_exit "Can't read file $components"; fi
if [ ! -r $textfile ]; then error_exit "Can't read file $textfile"; fi
filter_opt=
if [ -n "$filt" ]; then filter_opt="-f $textfile"; fi
tmp=`/usr/bin/uuidgen`
models="models.$tmp"
(
for m in `cat $components`; do
echo $cmpt_pfx$m$cmpt_ext
done
) > $models
# Run
case $type in
mixlm)
if [[ -n $ofile ]]; then paste $models $wts > $ofile
else paste $models $wts
fi ;;
srimix)
if [[ -z $ofile ]]; then
error_exit "srimix requires explicit -o"
else
sri-mix-lms.py -v $models $wts $ofile
fi ;;
rf)
if [[ -n $ofile ]]; then
eval mix_phrasetables $args -wf $wts $filter_opt `cat $models` > $ofile
else
eval mix_phrasetables $args -wf $wts $filter_opt `cat $models`
fi ;;
*)
error_exit "Unknown metric <$metric>!"
esac
# Cleanup
rm $models
|
<reponame>Bielwenass/unchained<gh_stars>0
import { Dex, Status, Trade, TradeType, TransferType, Tx } from '../../../types'
import { TransactionParser } from '../index'
import multiSigSendEth from './mockData/multiSigSendEth'
import thorSwapDepositEth from './mockData/thorSwapDepositEth'
import thorSwapDepositUsdc from './mockData/thorSwapDepositUsdc'
import thorSwapTransferOutEth from './mockData/thorSwapTransferOutEth'
import thorSwapTransferOutUsdc from './mockData/thorSwapTransferOutUsdc'
import thorSwapRefundEth from './mockData/thorSwapRefundEth'
import zrxTradeBondToUni from './mockData/zrxTradeBondToUni'
import zrxTradeEthToMatic from './mockData/zrxTradeEthToMatic'
import zrxTradeTetherToKishu from './mockData/zrxTradeTetherToKishu'
import zrxTradeTribeToEth from './mockData/zrxTradeTribeToEth'
import ethSelfSend from './mockData/ethSelfSend'
import tokenSelfSend from './mockData/tokenSelfSend'
import uniApprove from './mockData/uniApprove'
import uniAddLiquidity from './mockData/uniAddLiquidity'
import uniRemoveLiquidity from './mockData/uniRemoveLiquidity'
import foxClaim from './mockData/foxClaim'
import foxStake from './mockData/foxStake'
import foxExit from './mockData/foxExit'
import yearnDeposit from './mockData/yearnDeposit'
import yearnApproval from './mockData/yearnApproval'
import yearnWithdrawal from './mockData/yearnWithdrawal'
import {
bondToken,
foxToken,
kishuToken,
linkToken,
linkYearnVault,
maticToken,
tribeToken,
uniToken,
uniV2Token,
usdcToken,
usdtToken,
} from './mockData/tokens'
import { SHAPE_SHIFT_ROUTER_CONTRACT } from '../constants'
const txParser = new TransactionParser({ rpcUrl: '' })
describe('parseTx', () => {
describe('multiSig', () => {
it('should be able to parse eth multi sig send', async () => {
const { tx, internalTxs } = multiSigSendEth
const address = '0x76DA1578aC163CA7ca4143B7dEAa428e85Db3042'
const standardTransfer = {
caip19: 'eip155:1/slip44:60',
components: [{ value: '1201235000000000000' }],
from: '0x79fE68B3e4Bc2B91a4C8dfFb5317C7B8813d8Ae7',
to: '0x76DA1578aC163CA7ca4143B7dEAa428e85Db3042',
token: undefined,
totalValue: '1201235000000000000',
type: TransferType.Receive,
}
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: undefined,
value: tx.value,
status: Status.Confirmed,
transfers: [standardTransfer],
}
const actual = await txParser.parse(tx, address, internalTxs)
expect(expected).toEqual(actual)
})
})
describe('thor', () => {
it('should be able to parse eth deposit', async () => {
const { tx } = thorSwapDepositEth
const address = '0xCeb660E7623E8f8312B3379Df747c35f2217b595'
const trade: Trade = {
dexName: Dex.Thor,
memo: 'SWAP:THOR.RUNE:thor19f3dsgetxzssvdmqnplfep5fe42fsrvq9u87ax:',
type: TradeType.Trade,
}
const sellTransfer = {
caip19: 'eip155:1/slip44:60',
components: [{ value: '295040000000000000' }],
from: '0xCeb660E7623E8f8312B3379Df747c35f2217b595',
to: '0xC145990E84155416144C532E31f89B840Ca8c2cE',
totalValue: '295040000000000000',
type: TransferType.Send,
}
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: {
method: 'deposit',
parser: 'thor',
},
value: tx.value,
status: Status.Confirmed,
fee: {
caip19: 'eip155:1/slip44:60',
value: '1700235000000000',
},
transfers: [sellTransfer],
trade,
}
const actual = await txParser.parse(tx, address)
expect(expected).toEqual(actual)
})
it('should be able to parse token deposit', async () => {
const { tx } = thorSwapDepositUsdc
const address = '0x5a8C5afbCC1A58cCbe17542957b587F46828B38E'
const trade: Trade = {
dexName: Dex.Thor,
memo: 'SWAP:THOR.RUNE:thor1hhjupkzy3t6ccelhz7qw8epyx4rm8a06nlm5ce:110928642111',
type: TradeType.Trade,
}
const sellTransfer = {
caip19: 'eip155:1/erc20:0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48',
components: [{ value: '16598881497' }],
from: '0x5a8C5afbCC1A58cCbe17542957b587F46828B38E',
to: '0xC145990E84155416144C532E31f89B840Ca8c2cE',
token: usdcToken,
totalValue: '16598881497',
type: TransferType.Send,
}
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: {
method: 'deposit',
parser: 'thor',
},
value: tx.value,
status: Status.Confirmed,
fee: {
caip19: 'eip155:1/slip44:60',
value: '4700280000000000',
},
transfers: [sellTransfer],
trade,
}
const actual = await txParser.parse(tx, address)
expect(expected).toEqual(actual)
})
it('should be able to parse eth transfer out', async () => {
const { tx, internalTxs } = thorSwapTransferOutEth
const address = '0x5a8C5afbCC1A58cCbe17542957b587F46828B38E'
const trade: Trade = {
dexName: Dex.Thor,
memo: 'OUT:8C859BA50BC2351797F52F954971E1C6BA1F0A77610AC197BD99C4EEC6A3692A',
type: TradeType.Trade,
}
const buyTransfer = {
caip19: 'eip155:1/slip44:60',
components: [{ value: '1579727090000000000' }],
from: '0xC145990E84155416144C532E31f89B840Ca8c2cE',
to: '0x5a8C5afbCC1A58cCbe17542957b587F46828B38E',
token: undefined,
totalValue: '1579727090000000000',
type: TransferType.Receive,
}
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: {
method: 'transferOut',
parser: 'thor',
},
value: tx.value,
status: Status.Confirmed,
transfers: [buyTransfer],
trade,
}
const actual = await txParser.parse(tx, address, internalTxs)
expect(expected).toEqual(actual)
})
it('should be able to parse token transfer out', async () => {
const { tx } = thorSwapTransferOutUsdc
const address = '0x5a8C5afbCC1A58cCbe17542957b587F46828B38E'
const trade: Trade = {
dexName: Dex.Thor,
memo: 'OUT:F3AC4E90AB5951AB9FEB1715B481422B904A40B0F6753CC844E326B1213CF70E',
type: TradeType.Trade,
}
const buyTransfer = {
caip19: 'eip155:1/erc20:0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48',
components: [{ value: '47596471640' }],
from: '0xC145990E84155416144C532E31f89B840Ca8c2cE',
to: '0x5a8C5afbCC1A58cCbe17542957b587F46828B38E',
token: usdcToken,
totalValue: '47596471640',
type: TransferType.Receive,
}
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: {
method: 'transferOut',
parser: 'thor',
},
value: tx.value,
status: Status.Confirmed,
transfers: [buyTransfer],
trade,
}
const actual = await txParser.parse(tx, address)
expect(expected).toEqual(actual)
})
it('should be able to parse eth refund', async () => {
const { tx, internalTxs } = thorSwapRefundEth
const address = '0xfc0Cc6E85dFf3D75e3985e0CB83B090cfD498dd1'
const trade: Trade = {
dexName: Dex.Thor,
memo: 'REFUND:851B4997CF8F9FBA806B3780E0C178CCB173AE78E3FD5056F7375B059B22BD3A',
type: TradeType.Refund,
}
const buyTransfer = {
caip19: 'eip155:1/slip44:60',
components: [{ value: '6412730000000000' }],
from: '0xC145990E84155416144C532E31f89B840Ca8c2cE',
to: '0xfc0Cc6E85dFf3D75e3985e0CB83B090cfD498dd1',
token: undefined,
totalValue: '6412730000000000',
type: TransferType.Receive,
}
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: {
method: 'transferOut',
parser: 'thor',
},
value: tx.value,
status: Status.Confirmed,
transfers: [buyTransfer],
trade,
}
const actual = await txParser.parse(tx, address, internalTxs)
expect(expected).toEqual(actual)
})
})
describe('zrx', () => {
it('should be able to parse token -> eth', async () => {
const { tx, internalTxs } = zrxTradeTribeToEth
const address = '0x5bb96c35a68Cba037D0F261C67477416db137F03'
const trade: Trade = {
dexName: Dex.Zrx,
type: TradeType.Trade,
}
const buyTransfer = {
caip19: 'eip155:1/slip44:60',
components: [
{
value: '541566754246167133',
},
],
from: '0xDef1C0ded9bec7F1a1670819833240f027b25EfF',
to: '0x5bb96c35a68Cba037D0F261C67477416db137F03',
token: undefined,
totalValue: '541566754246167133',
type: TransferType.Receive,
}
const sellTransfer = {
caip19: 'eip155:1/erc20:0xc7283b66eb1eb5fb86327f08e1b5816b0720212b',
components: [
{
value: '1000000000000000000000',
},
],
from: '0x5bb96c35a68Cba037D0F261C67477416db137F03',
to: '0x7ce01885a13c652241aE02Ea7369Ee8D466802EB',
token: tribeToken,
totalValue: '1000000000000000000000',
type: TransferType.Send,
}
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: {
method: undefined,
parser: 'zrx',
},
value: tx.value,
status: Status.Confirmed,
fee: {
value: '8308480000000000',
caip19: 'eip155:1/slip44:60',
},
transfers: [sellTransfer, buyTransfer],
trade,
}
const actual = await txParser.parse(tx, address, internalTxs)
expect(expected).toEqual(actual)
})
it('should be able to parse eth -> token', async () => {
const { tx } = zrxTradeEthToMatic
const address = '0x564BcA365D62BCC22dB53d032F8dbD35439C9206'
const trade: Trade = {
dexName: Dex.Zrx,
type: TradeType.Trade,
}
const buyTransfer = {
caip19: 'eip155:1/erc20:0x7d1afa7b718fb893db30a3abc0cfc608aacfebb0',
components: [
{
value: '50000000000000000000000',
},
],
from: '0x22F9dCF4647084d6C31b2765F6910cd85C178C18',
to: '0x564BcA365D62BCC22dB53d032F8dbD35439C9206',
token: maticToken,
totalValue: '50000000000000000000000',
type: TransferType.Receive,
}
const sellTransfer = {
caip19: 'eip155:1/slip44:60',
components: [
{
value: '10000000000000000000',
},
],
from: '0x564BcA365D62BCC22dB53d032F8dbD35439C9206',
to: '0xDef1C0ded9bec7F1a1670819833240f027b25EfF',
token: undefined,
totalValue: '10000000000000000000',
type: TransferType.Send,
}
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: {
method: undefined,
parser: 'zrx',
},
value: tx.value,
status: Status.Confirmed,
fee: {
value: '19815285000000000',
caip19: 'eip155:1/slip44:60',
},
transfers: [sellTransfer, buyTransfer],
trade,
}
const actual = await txParser.parse(tx, address)
expect(expected).toEqual(actual)
})
it('should be able to parse token -> token', async () => {
const { tx } = zrxTradeTetherToKishu
const address = '0xb8b19c048296E086DaF69F54d48dE2Da444dB047'
const trade: Trade = {
dexName: Dex.Zrx,
type: TradeType.Trade,
}
const buyTransfer = {
type: TransferType.Receive,
from: '0xF82d8Ec196Fb0D56c6B82a8B1870F09502A49F88',
to: '0xb8b19c048296E086DaF69F54d48dE2Da444dB047',
caip19: 'eip155:1/erc20:0xa2b4c0af19cc16a6cfacce81f192b024d625817d',
totalValue: '9248567698016204727450',
components: [{ value: '9248567698016204727450' }],
token: kishuToken,
}
const sellTransfer = {
type: TransferType.Send,
from: '0xb8b19c048296E086DaF69F54d48dE2Da444dB047',
to: '0x0d4a11d5EEaaC28EC3F61d100daF4d40471f1852',
caip19: 'eip155:1/erc20:0xdac17f958d2ee523a2206206994597c13d831ec7',
totalValue: '45000000000',
components: [{ value: '45000000000' }],
token: usdtToken,
}
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: {
method: undefined,
parser: 'zrx',
},
value: tx.value,
status: Status.Confirmed,
fee: {
value: '78183644000000000',
caip19: 'eip155:1/slip44:60',
},
transfers: [sellTransfer, buyTransfer],
trade,
}
const actual = await txParser.parse(tx, address)
expect(expected).toEqual(actual)
})
it('should be able to parse token -> token (multiple swaps)', async () => {
const { tx } = zrxTradeBondToUni
const address = '0x986bB494db49E6f1CDC1be098e3157f8DDC5a821'
const trade: Trade = {
dexName: Dex.Zrx,
type: TradeType.Trade,
}
const buyTransfer1 = {
type: TransferType.Receive,
from: '0xEBFb684dD2b01E698ca6c14F10e4f289934a54D6',
to: address,
caip19: 'eip155:1/erc20:0x1f9840a85d5af5bf1d1762f925bdaddc4201f984',
totalValue: '56639587020747520629',
components: [{ value: '56639587020747520629' }],
token: uniToken,
}
const buyTransfer2 = {
type: TransferType.Receive,
from: '0xd3d2E2692501A5c9Ca623199D38826e513033a17',
to: address,
caip19: 'eip155:1/erc20:0x1f9840a85d5af5bf1d1762f925bdaddc4201f984',
totalValue: '47448670568188553620',
components: [{ value: '47448670568188553620' }],
token: uniToken,
}
const sellTransfer1 = {
type: TransferType.Send,
from: address,
to: '0x6591c4BcD6D7A1eb4E537DA8B78676C1576Ba244',
caip19: 'eip155:1/erc20:0x0391d2021f89dc339f60fff84546ea23e337750f',
totalValue: '53910224825217010944',
components: [{ value: '53910224825217010944' }],
token: bondToken,
}
const sellTransfer2 = {
type: TransferType.Send,
from: address,
to: '0xB17B1342579e4bcE6B6e9A426092EA57d33843D9',
caip19: 'eip155:1/erc20:0x0391d2021f89dc339f60fff84546ea23e337750f',
totalValue: '46089775174782989056',
components: [{ value: '46089775174782989056' }],
token: bondToken,
}
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: {
method: undefined,
parser: 'zrx',
},
value: tx.value,
status: Status.Confirmed,
fee: {
value: '18399681000000000',
caip19: 'eip155:1/slip44:60',
},
transfers: [sellTransfer1, buyTransfer1, sellTransfer2, buyTransfer2],
trade,
}
const actual = await txParser.parse(tx, address)
expect(expected).toEqual(actual)
})
})
describe('self send', () => {
it('should be able to parse eth mempool', async () => {
const { txMempool } = ethSelfSend
const address = '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C'
const expected: Tx = {
txid: txMempool.txid,
blockHeight: txMempool.blockHeight,
blockTime: txMempool.blockTime,
address: address,
caip2: 'eip155:1',
confirmations: txMempool.confirmations,
data: undefined,
value: txMempool.value,
status: Status.Pending,
transfers: [
{
type: TransferType.Send,
to: address,
from: address,
caip19: 'eip155:1/slip44:60',
totalValue: '503100000000000',
components: [{ value: '503100000000000' }],
},
{
type: TransferType.Receive,
to: address,
from: address,
caip19: 'eip155:1/slip44:60',
totalValue: '503100000000000',
components: [{ value: '503100000000000' }],
},
],
}
const actual = await txParser.parse(txMempool, address)
expect(expected).toEqual(actual)
})
it('should be able to parse eth', async () => {
const { tx } = ethSelfSend
const address = '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C'
const expected: Tx = {
txid: tx.txid,
blockHash: tx.blockHash,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: undefined,
value: tx.value,
status: Status.Confirmed,
fee: {
value: '399000000000000',
caip19: 'eip155:1/slip44:60',
},
transfers: [
{
type: TransferType.Send,
from: address,
to: address,
caip19: 'eip155:1/slip44:60',
totalValue: '503100000000000',
components: [{ value: '503100000000000' }],
},
{
type: TransferType.Receive,
from: address,
to: address,
caip19: 'eip155:1/slip44:60',
totalValue: '503100000000000',
components: [{ value: '503100000000000' }],
},
],
}
const actual = await txParser.parse(tx, address)
expect(expected).toEqual(actual)
})
it('should be able to parse token mempool', async () => {
const { txMempool } = tokenSelfSend
const address = '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C'
const expected: Tx = {
txid: txMempool.txid,
blockHeight: txMempool.blockHeight,
blockTime: txMempool.blockTime,
address: address,
caip2: 'eip155:1',
confirmations: txMempool.confirmations,
data: undefined,
value: txMempool.value,
status: Status.Pending,
transfers: [
{
type: TransferType.Send,
from: address,
to: address,
caip19: 'eip155:1/erc20:0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48',
totalValue: '1502080',
components: [{ value: '1502080' }],
token: usdcToken,
},
{
type: TransferType.Receive,
from: address,
to: address,
caip19: 'eip155:1/erc20:0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48',
totalValue: '1502080',
components: [{ value: '1502080' }],
token: usdcToken,
},
],
}
const actual = await txParser.parse(txMempool, address)
expect(expected).toEqual(actual)
})
it('should be able to parse token', async () => {
const { tx } = tokenSelfSend
const address = '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C'
const expected: Tx = {
txid: tx.txid,
blockHash: tx.blockHash,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: undefined,
value: tx.value,
status: Status.Confirmed,
fee: {
value: '1011738000000000',
caip19: 'eip155:1/slip44:60',
},
transfers: [
{
type: TransferType.Send,
from: address,
to: address,
caip19: 'eip155:1/erc20:0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48',
totalValue: '1502080',
components: [{ value: '1502080' }],
token: usdcToken,
},
{
type: TransferType.Receive,
from: address,
to: address,
caip19: 'eip155:1/erc20:0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48',
totalValue: '1502080',
components: [{ value: '1502080' }],
token: usdcToken,
},
],
}
const actual = await txParser.parse(tx, address)
expect(expected).toEqual(actual)
})
})
describe('uniswap', () => {
it('should be able to parse approve', async () => {
const { tx } = uniApprove
const address = '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C'
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: undefined,
value: tx.value,
status: Status.Confirmed,
fee: {
value: '1447243200000000',
caip19: 'eip155:1/slip44:60',
},
transfers: [],
}
const actual = await txParser.parse(tx, address)
expect(expected).toEqual(actual)
})
it('should be able to parse add liquidity mempool', async () => {
const { txMempool } = uniAddLiquidity
const address = '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C'
const expected: Tx = {
txid: txMempool.txid,
blockHeight: txMempool.blockHeight,
blockTime: txMempool.blockTime,
address: address,
caip2: 'eip155:1',
confirmations: txMempool.confirmations,
data: {
method: 'addLiquidityETH',
parser: 'uniV2',
},
value: txMempool.value,
status: Status.Pending,
transfers: [
{
type: TransferType.Send,
from: '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C',
to: '0x470e8de2eBaef52014A47Cb5E6aF86884947F08c',
caip19: 'eip155:1/erc20:0xc770eefad204b5180df6a14ee197d99d808ee52d',
totalValue: '100000000000000000000',
components: [{ value: '100000000000000000000' }],
token: {
contract: '0xc770EEfAd204B5180dF6a14Ee197D99d808ee52d',
decimals: 18,
name: 'FOX',
symbol: 'FOX',
},
},
{
type: TransferType.Send,
from: '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C',
to: '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D',
caip19: 'eip155:1/slip44:60',
totalValue: '42673718176645189',
components: [{ value: '42673718176645189' }],
},
],
}
const actual = await txParser.parse(txMempool, address)
expect(expected).toEqual(actual)
})
it('should be able to parse add liquidity', async () => {
const { tx } = uniAddLiquidity
const address = '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C'
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: undefined,
value: tx.value,
status: Status.Confirmed,
fee: {
value: '26926494400000000',
caip19: 'eip155:1/slip44:60',
},
transfers: [
{
type: TransferType.Send,
from: '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C',
to: '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D',
caip19: 'eip155:1/slip44:60',
totalValue: '42673718176645189',
components: [{ value: '42673718176645189' }],
},
{
type: TransferType.Send,
from: '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C',
to: '0x470e8de2eBaef52014A47Cb5E6aF86884947F08c',
caip19: 'eip155:1/erc20:0xc770eefad204b5180df6a14ee197d99d808ee52d',
totalValue: '100000000000000000000',
components: [{ value: '100000000000000000000' }],
token: foxToken,
},
{
type: TransferType.Receive,
from: '0x0000000000000000000000000000000000000000',
to: '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C',
caip19: 'eip155:1/erc20:0x470e8de2ebaef52014a47cb5e6af86884947f08c',
totalValue: '1888842410762840601',
components: [{ value: '1888842410762840601' }],
token: uniV2Token,
},
],
}
const actual = await txParser.parse(tx, address)
expect(expected).toEqual(actual)
})
it('should be able to parse remove liquidity mempool', async () => {
const { txMempool } = uniRemoveLiquidity
const address = '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C'
const expected: Tx = {
txid: txMempool.txid,
blockHeight: txMempool.blockHeight,
blockTime: txMempool.blockTime,
address: address,
caip2: 'eip155:1',
confirmations: txMempool.confirmations,
data: {
method: 'removeLiquidityETH',
parser: 'uniV2',
},
value: txMempool.value,
status: Status.Pending,
transfers: [
{
type: TransferType.Send,
from: '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C',
to: '0x470e8de2eBaef52014A47Cb5E6aF86884947F08c',
caip19: 'eip155:1/erc20:0x470e8de2ebaef52014a47cb5e6af86884947f08c',
totalValue: '298717642142382954',
components: [{ value: '298717642142382954' }],
token: uniV2Token,
},
],
}
const actual = await txParser.parse(txMempool, address)
expect(expected).toEqual(actual)
})
it('should be able to parse remove liquidity', async () => {
const { tx, internalTxs } = uniRemoveLiquidity
const address = '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C'
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: undefined,
value: tx.value,
status: Status.Confirmed,
fee: {
value: '4082585000000000',
caip19: 'eip155:1/slip44:60',
},
transfers: [
{
type: TransferType.Send,
from: '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C',
to: '0x470e8de2eBaef52014A47Cb5E6aF86884947F08c',
caip19: 'eip155:1/erc20:0x470e8de2ebaef52014a47cb5e6af86884947f08c',
totalValue: '298717642142382954',
components: [{ value: '298717642142382954' }],
token: uniV2Token,
},
{
type: TransferType.Receive,
from: '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D',
to: '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C',
caip19: 'eip155:1/erc20:0xc770eefad204b5180df6a14ee197d99d808ee52d',
totalValue: '15785079906515930982',
components: [{ value: '15785079906515930982' }],
token: foxToken,
},
{
type: TransferType.Receive,
from: '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D',
to: '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C',
caip19: 'eip155:1/slip44:60',
totalValue: '6761476182340434',
components: [{ value: '6761476182340434' }],
},
],
}
const actual = await txParser.parse(tx, address, internalTxs)
expect(expected).toEqual(actual)
})
})
describe('fox', () => {
it('should be able to parse claim', async () => {
const { tx } = foxClaim
const address = '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C'
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: undefined,
value: tx.value,
status: Status.Confirmed,
fee: {
value: '2559843000000000',
caip19: 'eip155:1/slip44:60',
},
transfers: [
{
type: TransferType.Receive,
from: '0x02FfdC5bfAbe5c66BE067ff79231585082CA5fe2',
to: address,
caip19: 'eip155:1/erc20:0xc770eefad204b5180df6a14ee197d99d808ee52d',
totalValue: '1500000000000000000000',
components: [{ value: '1500000000000000000000' }],
token: foxToken,
},
],
}
const actual = await txParser.parse(tx, address)
expect(expected).toEqual(actual)
})
// TODO: parse pending LP Token send to staking contract using stake() contract call
it('should be able to parse stake mempool', async () => {
const { txMempool } = foxStake
const address = '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C'
const expected: Tx = {
txid: txMempool.txid,
blockHeight: txMempool.blockHeight,
blockTime: txMempool.blockTime,
address: address,
caip2: 'eip155:1',
confirmations: txMempool.confirmations,
data: undefined,
value: txMempool.value,
status: Status.Pending,
transfers: [],
}
const actual = await txParser.parse(txMempool, address)
expect(expected).toEqual(actual)
})
it('should be able to parse stake', async () => {
const { tx } = foxStake
const address = '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C'
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: undefined,
value: tx.value,
status: Status.Confirmed,
fee: {
value: '4650509500000000',
caip19: 'eip155:1/slip44:60',
},
transfers: [
{
type: TransferType.Send,
from: address,
to: '0xDd80E21669A664Bce83E3AD9a0d74f8Dad5D9E72',
caip19: 'eip155:1/erc20:0x470e8de2ebaef52014a47cb5e6af86884947f08c',
totalValue: '99572547380794318',
components: [{ value: '99572547380794318' }],
token: <PASSWORD>,
},
],
}
const actual = await txParser.parse(tx, address)
expect(expected).toEqual(actual)
})
it('should be able to parse exit mempool', async () => {
const { txMempool } = foxExit
const address = '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C'
const expected: Tx = {
txid: txMempool.txid,
blockHeight: txMempool.blockHeight,
blockTime: txMempool.blockTime,
address: address,
caip2: 'eip155:1',
confirmations: txMempool.confirmations,
data: undefined,
value: txMempool.value,
status: Status.Pending,
transfers: [],
}
const actual = await txParser.parse(txMempool, address)
expect(expected).toEqual(actual)
})
it('should be able to parse exit', async () => {
const { tx } = foxExit
const address = '0x6bF198c2B5c8E48Af4e876bc2173175b89b1DA0C'
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: undefined,
value: tx.value,
status: Status.Confirmed,
fee: {
value: '6136186875000000',
caip19: 'eip155:1/slip44:60',
},
transfers: [
{
type: TransferType.Receive,
from: '0xDd80E21669A664Bce83E3AD9a0d74f8Dad5D9E72',
to: address,
caip19: 'eip155:1/erc20:0x470e8de2ebaef52014a47cb5e6af86884947f08c',
totalValue: '531053586030903030',
components: [{ value: '531053586030903030' }],
token: uniV2Token,
},
{
type: TransferType.Receive,
from: '0xDd80E21669A664Bce83E3AD9a0d74f8Dad5D9E72',
to: address,
caip19: 'eip155:1/erc20:0xc770eefad204b5180df6a14ee197d99d808ee52d',
totalValue: '317669338073988',
components: [{ value: '317669338073988' }],
token: foxToken,
},
],
}
const actual = await txParser.parse(tx, address)
expect(expected).toEqual(actual)
})
})
describe('yearn', () => {
it('should parse approval', async () => {
const { tx } = yearnApproval
const address = '0x1399D13F3A0aaf08f7C5028D81447a311e4760c4'
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: {
method: 'approve',
parser: 'yearn',
},
value: tx.value,
status: Status.Confirmed,
fee: {
value: '4519526097650998',
caip19: 'eip155:1/slip44:60',
},
transfers: [],
}
const actual = await txParser.parse(tx, address)
expect(expected).toEqual(actual)
})
it('should parse deposit', async () => {
const { tx } = yearnDeposit
const address = '0x1399D13F3A0aaf08f7C5028D81447a311e4760c4'
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: {
method: 'deposit',
parser: 'yearn',
},
value: tx.value,
status: Status.Confirmed,
fee: {
value: '18139009291874667',
caip19: 'eip155:1/slip44:60',
},
transfers: [
{
type: TransferType.Send,
from: address,
to: SHAPE_SHIFT_ROUTER_CONTRACT,
caip19: 'eip155:1/erc20:0x514910771af9ca656af840dff83e8264ecf986ca',
totalValue: '999961394864662132',
components: [{ value: '999961394864662132' }],
token: linkToken,
},
{
type: TransferType.Receive,
from: '0x0000000000000000000000000000000000000000',
to: address,
caip19: 'eip155:1/erc20:0x671a912c10bba0cfa74cfc2d6fba9ba1ed9530b2',
totalValue: '987002304279657611',
components: [{ value: '987002304279657611' }],
token: linkYearnVault,
},
],
}
const actual = await txParser.parse(tx, address)
expect(expected).toEqual(actual)
})
it('should parse withdrawal', async () => {
const { tx } = yearnWithdrawal
const address = '0x1399D13F3A0aaf08f7C5028D81447a311e4760c4'
const expected: Tx = {
txid: tx.txid,
blockHeight: tx.blockHeight,
blockTime: tx.blockTime,
blockHash: tx.blockHash,
address: address,
caip2: 'eip155:1',
confirmations: tx.confirmations,
data: {
method: 'withdraw',
parser: 'yearn',
},
value: tx.value,
status: Status.Confirmed,
fee: {
value: '19460274119661600',
caip19: 'eip155:1/slip44:60',
},
transfers: [
{
type: TransferType.Send,
from: address,
to: '0x0000000000000000000000000000000000000000',
caip19: 'eip155:1/erc20:0x671a912c10bba0cfa74cfc2d6fba9ba1ed9530b2',
totalValue: '493501152139828806',
components: [{ value: '493501152139828806' }],
token: link<PASSWORD>,
},
{
type: TransferType.Receive,
from: '0x671a912C10bba0CFA74Cfc2d6Fba9BA1ed9530B2',
to: address,
caip19: 'eip155:1/erc20:0x514910771af9ca656af840dff83e8264ecf986ca',
totalValue: '500482168225493862',
components: [{ value: '500482168225493862' }],
token: linkToken,
},
],
}
const actual = await txParser.parse(tx, address)
expect(expected).toEqual(actual)
})
})
})
|
#!/bin/bash
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
export DISPLAY="${DISPLAY:-:0}"
python3 run_yt_voice_control.py
|
#!/bin/bash
set -euo pipefail
service_role_stack="$(buildkite-agent meta-data get service-role-stack-name)"
if [ -n "${service_role_stack}" ]
then
echo "--- Deleting service-role stack $service_role_stack"
aws cloudformation delete-stack --stack-name "$service_role_stack"
aws cloudformation wait stack-delete-complete --stack-name "$service_role_stack"
fi
|
const iconv = require('iconv-lite');
export async function exportCsv(){
var results = [];
var csvRow = [];
var bgForm = [];
await fetch('http://localhost:3001/api/settingProgramForm/getSettingBgForm')
.then(res => res.json())
.then(data => {
bgForm = data
});
await fetch('http://localhost:3001/api/BgDataEntry/getResultForm')
.then(res => res.json())
.then(data => {
results = data
});
var data = [[bgForm[0].topic_name],['ลำดับ','รหัสสมาชิก','หน่วยงาน','หมายเลขเครื่อง','Serial number','Lot.strip','วันที่ได้รับ','วันที่ตรวจวิเคราะห์','เครื่อง']];
console.log(results)
for (let index = 0; index < results.length; index++){
for (let meter_number_index = 0; meter_number_index < results[index].meter_number.length; meter_number_index++){
data.push([
index+1,
results[index].blood_glucose_member_id,
results[index].department_name,
results[index].meter_number[meter_number_index],
results[index].serial_number[0],
results[index].lot_strip[0],
results[index].receive_date[0],
results[index].analyze_date[0],
results[index].meter_brand
])
}
}
for (var index = 0; index < data.length; ++index) {
csvRow.push(data[index].join(","))
}
var csvString = csvRow.join("\n");
var csvThai = iconv.encode(csvString, "TIS-620");
var a = document.createElement("a");
a.href = "data:attachment/csv;base64,"+csvThai.toString('base64');
a.target = "_Blank";
a.download = "testFile.csv";
document.body.appendChild(a);
return a.click();
} |
import re
def extract_product_info(product_line):
fields = product_line.split('|')
DESCR_ITEM = fields[2].replace('+', ' ')
COD_BARRA = fields[3]
COD_ANT_ITEM = fields[4]
UNID_INV = fields[5]
TIPO_ITEM = fields[6] if len(fields) > 6 else '0'
COD_NCM = fields[7]
EX_IPI = fields[8]
return (DESCR_ITEM, COD_BARRA, COD_ANT_ITEM, UNID_INV, TIPO_ITEM, COD_NCM, EX_IPI)
# Test the function
product_line = "123|Product+Name|456|789|Unit|Type|NCM123|EX123"
print(extract_product_info(product_line)) # Output: ("Product Name", "456", "789", "Unit", "Type", "NCM123", "EX123") |
/**
* @license
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { FirebaseApp } from '@firebase/app';
import { GreCAPTCHA, GreCAPTCHATopLevel } from '../src/recaptcha';
import { Provider } from '@firebase/component';
import { AppCheck, CustomProvider } from '../src';
export declare const FAKE_SITE_KEY = "fake-site-key";
export declare function getFakeApp(overrides?: Record<string, any>): FirebaseApp;
export declare function getFakeAppCheck(app: FirebaseApp): AppCheck;
export declare function getFullApp(): FirebaseApp;
export declare function getFakeCustomTokenProvider(): CustomProvider;
export declare function getFakeHeartbeatServiceProvider(fakeLogString?: string): Provider<'heartbeat'>;
export declare function getFakeGreCAPTCHA(isTopLevel?: boolean): GreCAPTCHATopLevel | GreCAPTCHA;
/**
* Returns all script tags in DOM matching our reCAPTCHA url pattern.
* Tests in other files may have inserted multiple reCAPTCHA scripts, because they don't
* care about it.
*/
export declare function findgreCAPTCHAScriptsOnPage(): HTMLScriptElement[];
export declare function removegreCAPTCHAScriptsOnPage(): void;
|
class Trees {
public static long numTrees(int n)
{
long C[] = new long[n + 1];
// For a single node tree, there is one
// possible tree
C[0] = 1;
C[1] = 1;
for (int i = 2; i <= n; i++) {
for (int j = 0; j < i; j++)
C[i] += C[j] * C[i - j - 1];
}
return C[n];
}
public static void main(String[] args)
{
int n = 4;
System.out.println(numTrees(n));
}
} |
<reponame>mgonnet/ludorum-player-cbr.js
/**
*/
games.Risk = (function () {
return {
/** The "Risk" encoding has 83 features , 42 to define the number of troops in a territory,
* 42 to define to which player that territory corresponds based on its turn,
* being 0 the corresponding player with the current turn,
* 1 the next and so successively and 1 that determines the stage of the game */
Turn: function turn(game, otherPlayer) {
var active = game.players.indexOf(active);
var other = game.players.indexOf(otherPlayer);
if (other > active) {
return other - active;
} else {
return 6 - (active - other);
}
},
Risk: function encodingRisk(game, moves, ply) {
return {
ply: ply,
features: game.boardMap.territories
.map(t => turn(game, s[t][0])).concat(s[t][1]).concat(stage), // For each territory , assign colour and number of troops , change colour based on turn.
actions: !moves ? null : game.players.map(function (p) {
return moves.hasOwnProperty(p) ? moves[p] : null;
})
};
}
};
})();
|
<reponame>thelegendoflinas/ImageEditor
package com.createchance.imageeditor.transitions;
import com.createchance.imageeditor.drawers.SqueezeTransDrawer;
/**
* Squeeze transition.
*
* @author createchance
* @date 2019/1/1
*/
public class SqueezeTransition extends AbstractTransition {
private static final String TAG = "SqueezeTransition";
private float mColorSeparation = 0.04f;
public SqueezeTransition() {
super(SqueezeTransition.class.getSimpleName(), TRANS_SQUEEZE);
}
@Override
protected void getDrawer() {
mDrawer = new SqueezeTransDrawer();
}
@Override
protected void setDrawerParams() {
super.setDrawerParams();
((SqueezeTransDrawer) mDrawer).setColorSeparation(mColorSeparation);
}
}
|
<gh_stars>0
import React from "react";
export const CarImg = props =>
// eslint-disable-next-line
<img
className="d-block img-fluid"
{...props}>{props.children}
</img>; |
<gh_stars>0
package com.example.admin.bluetoothrwdemo.presenter;
public interface ICheckTagPresenter {
/**
* 初始化数据
*/
void onCreate();
/**
* 开始盘点
*/
void startCheckTag();
/**
* 停止盘点
*/
void stopCheckTag();
/**
* 清除已盘点标签信息
*/
void clearCheckedTagInfo();
}
|
package acceptance.td;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.CharStreams;
import com.treasuredata.client.TDClient;
import com.treasuredata.client.model.TDJob;
import com.treasuredata.client.model.TDJobRequest;
import com.treasuredata.client.model.TDJobSummary;
import com.treasuredata.client.model.TDResultFormat;
import io.digdag.client.DigdagClient;
import io.digdag.client.api.Id;
import io.netty.handler.codec.http.FullHttpRequest;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.littleshoot.proxy.HttpProxyServer;
import utils.TemporaryDigdagServer;
import utils.TestUtils;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.file.Path;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.Callable;
import static acceptance.td.Secrets.TD_API_KEY;
import static acceptance.td.Secrets.TD_API_ENDPOINT;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.isEmptyOrNullString;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeThat;
import static utils.TestUtils.addWorkflow;
import static utils.TestUtils.attemptSuccess;
import static utils.TestUtils.copyResource;
import static utils.TestUtils.expect;
import static utils.TestUtils.objectMapper;
import static utils.TestUtils.pushAndStart;
import static utils.TestUtils.startRequestTrackingProxy;
public class TdResultExportIT
{
@Rule
public TemporaryFolder folder = new TemporaryFolder();
private TDClient client;
private String database;
private String table;
private String connectionId;
private static final MediaType JSON = MediaType.parse("application/json; charset=utf-8");
private OkHttpClient httpClient;
private Path projectDir;
public TemporaryDigdagServer server;
private HttpProxyServer proxyServer;
private String sampleJobId;
@Before
public void setUp()
throws Exception
{
assumeThat(TD_API_KEY, not(isEmptyOrNullString()));
assumeThat(TD_API_ENDPOINT, not(isEmptyOrNullString()));
projectDir = folder.getRoot().toPath().toAbsolutePath().normalize();
client = TDClient.newBuilder(false)
.setApiKey(TD_API_KEY)
.setEndpoint(TD_API_ENDPOINT)
.build();
sampleJobId = client.submit(TDJobRequest.newPrestoQuery("sample_datasets",
"select time from www_access"));
database = "_digdag_integration_result_export_td_test_db";
client.createDatabaseIfNotExists(database);
table = "_digdag_integration_result_export_td_test_table";
client.createTableIfNotExists(database, table);
this.httpClient = new OkHttpClient();
}
@Test
public void testSubmitResultExportJob()
throws Exception
{
String resultConnectorName = "digdag_test_" + UUID.randomUUID().toString().replace('-', '_');
String json = "{\"description\":null,\"name\":\"" + resultConnectorName + "\"," +
"\"settings\":{\"api_key\":\"\",\"api_hostname\":\"\"}," +
"\"shared\":false,\"type\":\"treasure_data\"}";
RequestBody body = RequestBody.create(JSON, json);
Request request = new Request.Builder()
.url("https://" + TD_API_ENDPOINT + "/v4/connections")
.header("authorization", "TD1 " + TD_API_KEY)
.post(body)
.build();
Response response = httpClient.newCall(request).execute();
ObjectMapper objectMapper = new ObjectMapper();
JsonNode jsonNode = objectMapper.readTree(response.body().string());
connectionId = jsonNode.get("id").asText();
List<FullHttpRequest> requests = Collections.synchronizedList(new ArrayList<>());
addWorkflow(projectDir, "acceptance/td/td_result_export/td_result_export.dig");
proxyServer = startRequestTrackingProxy(requests);
String proxyUrl = "http://" + proxyServer.getListenAddress().getHostString() + ":" + proxyServer.getListenAddress().getPort();
TemporaryDigdagServer server = TemporaryDigdagServer.builder()
.configuration(Secrets.secretsServerConfiguration())
.environment(ImmutableMap.of("http_proxy", proxyUrl))
.build();
server.start();
copyResource("acceptance/td/td_result_export/td_result_export.dig", projectDir.resolve("td_result_export.dig"));
TestUtils.addWorkflow(projectDir, "acceptance/td/td_result_export/td_result_export.dig");
Id projectId = TestUtils.pushProject(server.endpoint(), projectDir);
DigdagClient digdagClient = DigdagClient.builder()
.host(server.host())
.port(server.port())
.build();
digdagClient.setProjectSecret(projectId, "td.apikey", TD_API_KEY);
Id attemptId = pushAndStart(server.endpoint(), projectDir, "td_result_export", ImmutableMap.of(
"test_job_id", sampleJobId,
"test_result_settings", "{\"user_database_name\":\""+database+"\",\"user_table_name\":\""+table+"\",\"mode\":\"replace\"}",
"test_result_connection", resultConnectorName,
"td.use_ssl", "false")
);
expect(Duration.ofMinutes(5), attemptSuccess(server.endpoint(), attemptId));
String selectCountJobId = client.submit(TDJobRequest.newPrestoQuery(database, "select count(*) from " + table));
TestUtils.expect(Duration.ofMinutes(5), jobSuccess(client, selectCountJobId));
List<ArrayNode> result = downloadResult(selectCountJobId);
assertThat(result.get(0).get(0).asInt(), is(5000));
}
private List<ArrayNode> downloadResult(String jobId)
{
return client.jobResult(jobId, TDResultFormat.JSON, input -> {
try {
List<String> lines = CharStreams.readLines(new InputStreamReader(input));
ObjectReader reader = objectMapper().readerFor(ArrayNode.class);
List<ArrayNode> result = new ArrayList<>();
for (String line : lines) {
result.add(reader.readValue(line));
}
return result;
}
catch (IOException e) {
throw Throwables.propagate(e);
}
});
}
private static Callable<Boolean> jobSuccess(TDClient client, String jobId)
{
return () -> {
TDJobSummary status = client.jobStatus(jobId);
if (status.getStatus() == TDJob.Status.SUCCESS) {
return true;
}
if (status.getStatus().isFinished()) {
fail(status.getStatus().toString());
}
return false;
};
}
@After
public void deleteConnection()
throws IOException
{
if(connectionId != null){
Request request = new Request.Builder()
.url("https://" + TD_API_ENDPOINT + "/v4/connections/" + connectionId)
.header("authorization", "TD1 " + TD_API_KEY)
.delete()
.build();
httpClient.newCall(request).execute();
}
}
}
|
<reponame>liuzhen9327/meida
package com.meida.controller;
import com.jfinal.aop.Before;
import com.jfinal.core.Controller;
import com.jfinal.weixin.sdk.api.OpenIdApi;
import com.meida.interceptor.AuthInterceptor;
import org.apache.commons.lang.StringUtils;
/**
* @author liuzhen
* 2015年9月2日下午7:05:28
*/
@Before(AuthInterceptor.class)
public class IndexController extends BaseController {
public void index() {
renderJsp("/index.jsp");
}
// public void login() {
// renderJsp("/login.jsp");
// }
public void register() {
String code = getPara("code");
// System.out.println(code);
if (StringUtils.isNotEmpty(code)) {
String openId = OpenIdApi.getOpenId(code);
setAttr("openId", openId);
// System.out.println(openId);
}
renderJsp("/WEB-INF/register.jsp");
}
}
|
function updateEntities(entities, deltaTime) {
const friction = 0.3;
entities.forEach(entity => {
// Update position based on velocity
entity.position.x += entity.velocity.vx * deltaTime;
entity.position.y += entity.velocity.vy * deltaTime;
// Apply friction
entity.velocity.vx *= (1 - friction);
entity.velocity.vy *= (1 - friction);
// Handle collisions with the environment
if (entity.position.x < 0) {
entity.position.x = 0;
entity.velocity.vx = -entity.velocity.vx; // Reverse velocity on collision
}
if (entity.position.y < 0) {
entity.position.y = 0;
entity.velocity.vy = -entity.velocity.vy; // Reverse velocity on collision
}
// Assuming environment boundaries are known, handle collisions accordingly
// For example, if the environment has a boundary at maxX, maxY:
// if (entity.position.x > maxX) {
// entity.position.x = maxX;
// entity.velocity.vx = -entity.velocity.vx; // Reverse velocity on collision
// }
// if (entity.position.y > maxY) {
// entity.position.y = maxY;
// entity.velocity.vy = -entity.velocity.vy; // Reverse velocity on collision
// }
});
} |
<reponame>MarcosRibas/Projeto100Exercicios
"""Ex027 Faça um programa que leia o nome completo de uma pessoa, mostrando em seguida o primeiro e o
Último nome separadamente.
Exemplo: <NAME>
Primeiro = Ana
Último = Souza"""
nome = str(input('Digite seu nome completo: ')).strip().split()
last = len(nome) - 1
print('Seu primeiro nome é: ', nome[0])
print('Seu ultimo nome é: ', nome[last]) |
#!/bin/bash
set -x
function clean_up() {
scp -r bash_history $Username@${HOST}:.${HISTORY}
}
if [ $# -eq 2 ]; then
HISTORY=bash_history
HOST=$1
Username=$2
clean_up
else
echo -e "\n\nUsage: $0 {Hostname} {username}"
echo -e "ex: $0 devlacpo1 billn\n\n"
fi
|
#! /usr/bin/env bash
#
# Copyright (c) 2015 Nat! - Mulle kybernetiK
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of Mulle kybernetiK nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
#
MULLE_MAKE_COMPILER_SH="included"
# r_platform_c_compiler()
# {
# log_entry "r_platform_c_compiler" "$@"
#
# local name
#
# if [ "${MULLE_FLAG_LOG_SETTINGS}" = 'YES' ]
# then
# log_trace2 "CC: ${DEFINITION_CC}"
# fi
#
# name="${DEFINITION_CC}"
#
# case "${MULLE_UNAME}" in
# mingw)
# RVAL="${name:-cl}"
# ;;
#
# *)
# RVAL="${name:-cc}"
# ;;
# esac
# }
#
#
# r_platform_cxx_compiler()
# {
# log_entry "r_platform_cxx_compiler" "$@"
#
# local name
#
# if [ "${MULLE_FLAG_LOG_SETTINGS}" = 'YES' ]
# then
# log_trace2 "CXX: ${DEFINITION_CXX}"
# fi
#
# name="${DEFINITION_CXX}"
#
# case "${MULLE_UNAME}" in
# mingw)
# RVAL="${name:-cl}"
# ;;
#
# *)
# RVAL="${name:-c++}"
# ;;
# esac
# }
#
# assume default is release and the flags
# are set for that
#
make::compiler::r_configuration_options()
{
log_entry "make::compiler::r_configuration_options" "$@"
local name="$1"
local configuration="$2"
RVAL=
case "${name%.*}" in
cl|*-cl)
case "${configuration}" in
Debug|Test)
RVAL="${OPTION_CL_DEBUG:-/Od /Zi}"
;;
esac
;;
*)
case "${configuration}" in
Debug|Test)
RVAL="${DEFINITION_CC_DEBUG:--g -O0}"
;;
esac
;;
esac
}
#
# this should be part of mulle-platform
#
make::compiler::r_darwin_sdkpath_for_sdk()
{
local sdk="$1"
local sdkpath
if [ "${sdk}" = "Default" ]
then
# on 10.6 this will fail as --show-sdk-path ain't there
sdkpath="`rexekutor xcrun --show-sdk-path 2> /dev/null`"
if [ -z "${sdkpath}" ]
then
# hardcode SDK for now
sdkpath="`xcode-select -print-path`" || exit 1
case "`sw_vers -productVersion 2> /dev/null`" in
10\.6\.*)
r_filepath_concat "${sdkpath}" "SDKs/MacOSX10.6.sdk"
;;
*)
r_filepath_concat "${sdkpath}" "Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk"
;;
esac
sdkpath="${RVAL}"
if [ ! -d "${sdkpath}" ]
then
fail "Couldn't figure out default SDK"
fi
fi
else
# on 10.6 this is basically impossible to do
sdkpath="`rexekutor xcrun --sdk "${sdk}" --show-sdk-path`"
fi
if [ "${sdkpath}" = "" ]
then
fail "SDK \"${sdk}\" is not installed"
fi
RVAL="${sdkpath}"
}
# compiler is re
make::compiler::r_get_sdkpath()
{
log_entry "make::compiler::r_get_sdkpath" "$@"
local sdk="$1"
local sdkpath
RVAL=""
if [ "${DEFINITION_DETERMINE_SDK}" = 'NO' ]
then
return 1
fi
case "${MULLE_UNAME}" in
darwin)
make::compiler::r_darwin_sdkpath_for_sdk "${sdk}"
;;
esac
return 0
}
make::compiler::r_default_flag_definer()
{
RVAL="-D$*"
}
#
# Mash some known settings from xcodebuild together for regular
# OTHER_CFLAGS
# WARNING_CFLAGS
# COMPILER_PREPROCESSOR_DEFINITIONS
#
# The -I/-isystem generation s done somewhere else
#
make::compiler::r_cppflags_value()
{
log_entry "make::compiler::r_cppflags_value" "$@"
local flag_definer="${1:-make::compiler::r_default_flag_definer}"
if [ "${MULLE_FLAG_LOG_SETTINGS}" = 'YES' ]
then
log_trace2 "ENV CPPFLAGS: ${CPPFLAGS}"
log_trace2 "ENV OTHER_CPPFLAGS: ${OTHER_CPPFLAGS}"
log_trace2 "CPPFLAGS: ${DEFINITION_CPPFLAGS}"
log_trace2 "OTHER_CPPFLAG: ${DEFINITION_OTHER_CPPFLAGS}"
fi
local result
if make::definition::is_plus_key "DEFINITION_CPPFLAGS"
then
r_concat "${DEFINITION_CPPFLAGS}" "${CPPFLAGS}"
result="${RVAL}"
else
result="${DEFINITION_CPPFLAGS:-${CPPFLAGS}}"
fi
if make::definition::is_plus_key "DEFINITION_OTHER_CPPFLAGS"
then
r_concat "${DEFINITION_OTHER_CPPFLAGS}" "${OTHER_CPPFLAGS}"
r_concat "${result}" "${RVAL}"
result="${RVAL}"
else
r_concat "${result}" "${DEFINITION_OTHER_CPPFLAGS:-${OTHER_CPPFLAGS}}"
result="${RVAL}"
fi
case "${compiler%.*}" in
c++|cc|gcc*|*clang*|"")
if [ "${MULLE_FLAG_LOG_SETTINGS}" = 'YES' ]
then
log_trace2 "DEFINITION_GCC_PREPROCESSOR_DEFINITIONS: ${DEFINITION_GCC_PREPROCESSOR_DEFINITIONS}"
fi
local definition
IFS=","
shell_disable_glob
for definition in ${DEFINITION_GCC_PREPROCESSOR_DEFINITIONS}
do
"${flag_definer}" "${i}"
r_concat "${result}" "${RVAL}"
result="${RVAL}"
done
IFS="${DEFAULT_IFS}"
shell_enable_glob
;;
esac
RVAL="${result}"
}
make::compiler::_r_cflags_value()
{
log_entry "make::compiler::_r_cflags_value" "$@"
local compiler="$1"
local configuration="$2"
local addoptflags="${3:-YES}"
if [ "${MULLE_FLAG_LOG_SETTINGS}" = 'YES' ]
then
log_trace2 "WARNING_CFLAGS: ${DEFINITION_WARNING_CFLAGS}"
fi
local result
result="${DEFINITION_WARNING_CFLAGS}"
if [ "${addoptflags}" = 'YES' ]
then
make::compiler::r_configuration_options "${compiler}" "${configuration}"
r_concat "${result}" "${RVAL}"
result="${RVAL}"
fi
RVAL="${result}"
}
make::compiler::r_cflags_value()
{
log_entry "make::compiler::r_cflags_value" "$@"
local compiler="$1"
local configuration="$2"
local addoptflags="$3"
if [ "${MULLE_FLAG_LOG_SETTINGS}" = 'YES' ]
then
log_trace2 "ENV CFLAGS: ${CFLAGS}"
log_trace2 "ENV OTHER_CFLAGS: ${OTHER_CFLAGS}"
log_trace2 "CFLAGS: ${DEFINITION_CFLAGS}"
log_trace2 "OTHER_CFLAGS: ${DEFINITION_OTHER_CFLAGS}"
fi
local result
# DEFINITION_LDFLAGS overrides LDFLAGS except it its += defined
if make::definition::is_plus_key "DEFINITION_CFLAGS"
then
r_concat "${DEFINITION_CFLAGS}" "${CFLAGS}"
result="${RVAL}"
else
result="${DEFINITION_CFLAGS:-${CFLAGS}}"
fi
if make::definition::is_plus_key "DEFINITION_OTHER_CFLAGS"
then
r_concat "${DEFINITION_OTHER_CFLAGS}" "${OTHER_CFLAGS}"
r_concat "${result}" "${RVAL}"
result="${RVAL}"
else
r_concat "${result}" "${DEFINITION_OTHER_CFLAGS:-${OTHER_CFLAGS}}"
result="${RVAL}"
fi
make::compiler::_r_cflags_value "${compiler}" "${configuration}" "${addoptflags}"
r_concat "${result}" "${RVAL}"
}
make::compiler::r_cxxflags_value()
{
log_entry "make::compiler::r_cxxflags_value" "$@"
local compiler="$1"
local configuration="$2"
local addoptflags="$3"
if [ "${MULLE_FLAG_LOG_SETTINGS}" = 'YES' ]
then
log_trace2 "ENV CXXFLAGS: ${CXXFLAGS}"
log_trace2 "ENV OTHER_CXXFLAGS: ${OTHER_CXXFLAGS}"
log_trace2 "CXXFLAGS: ${DEFINITION_CXXFLAGS}"
log_trace2 "OTHER_CXXFLAGS: ${DEFINITION_OTHER_CXXFLAGS}"
fi
local result
# DEFINITION_CXXFLAGS overrides CXXFLAGS except it its += defined
if make::definition::is_plus_key "DEFINITION_CXXFLAGS"
then
r_concat "${DEFINITION_CXXFLAGS}" "${CXXFLAGS}"
result="${RVAL}"
else
result="${DEFINITION_CXXFLAGS:-${CXXFLAGS}}"
fi
if make::definition::is_plus_key "DEFINITION_OTHER_CXXFLAGS"
then
r_concat "${DEFINITION_OTHER_CXXFLAGS}" "${OTHER_CXXFLAGS}"
r_concat "${result}" "${RVAL}"
result="${RVAL}"
else
r_concat "${result}" "${DEFINITION_OTHER_CXXFLAGS:-${OTHER_CXXFLAGS}}"
result="${RVAL}"
fi
make::compiler::_r_cflags_value "${compiler}" "${configuration}" "${addoptflags}"
r_concat "${result}" "${RVAL}"
}
make::compiler::r_ldflags_value()
{
log_entry "make::compiler::r_ldflags_value" "$@"
local compiler="$1"
local configuration="$2"
if [ "${MULLE_FLAG_LOG_SETTINGS}" = 'YES' ]
then
log_trace2 "ENV LDFLAGS: ${LDFLAGS}"
log_trace2 "ENV OTHER_LDFLAGS: ${OTHER_LDFLAGS}"
log_trace2 "LDFLAGS: ${DEFINITION_LDFLAGS}"
log_trace2 "OTHER_LDFLAGS: ${DEFINITION_OTHER_LDFLAGS}"
fi
local result
# DEFINITION_LDFLAGS overrides LDFLAGS except it its += defined
if make::definition::is_plus_key "DEFINITION_LDFLAGS"
then
r_concat "${DEFINITION_LDFLAGS}" "${LDFLAGS}"
result="${RVAL}"
else
result="${DEFINITION_LDFLAGS:-${LDFLAGS}}"
fi
if make::definition::is_plus_key "DEFINITION_OTHER_LDFLAGS"
then
r_concat "${DEFINITION_OTHER_LDFLAGS}" "${OTHER_LDFLAGS}"
r_concat "${result}" "${RVAL}"
result="${RVAL}"
else
r_concat "${result}" "${DEFINITION_OTHER_LDFLAGS:-${OTHER_LDFLAGS}}"
result="${RVAL}"
fi
# doesn't work for me though...
# https://stackoverflow.com/questions/11731229/dladdr-doesnt-return-the-function-name/11732893?r=SearchResults&s=3|31.5239#11732893
case "${configuration}" in
'Debug'|'Test')
case "${MULLE_UNAME}" in
linux)
case "${compiler%.*}" in
*gcc|*clang)
r_concat "${result}" "-Wl,--export-dynamic"
result="${RVAL}"
;;
esac
;;
esac
;;
esac
RVAL="${result}"
}
make::compiler::r_cmakeflags_values()
{
log_entry "make::compiler::r_cmakeflags_values" "$@"
local compiler="$1"
local configuration="$2"
RVAL=""
case "${compiler}" in
*tcc)
headerpath="`mulle-platform includepath --cmake`"
RVAL="-DCMAKE_C_COMPILER_WORKS=ON
-DCMAKE_C_STANDARD_INCLUDE_DIRECTORIES=${headerpath}
-DHAVE_FLAG_SEARCH_PATHS_FIRST=OFF
-DHAVE_HEADERPAD_MAX_INSTALL_NAMES=OFF"
;;
esac
}
:
|
#!/bin/bash
shell_dir=$(dirname $0)
set -ex
source ${GOPATH}/src/github.com/teramoby/speedle-plus/setTestEnv.sh
go clean -testcache
#Reconfig spctl
${GOPATH}/bin/spctl config ads-endpoint http://localhost:6734/authz-check/v1/
${GOPATH}/bin/spctl config pms-endpoint http://localhost:6733/policy-mgmt/v1/
startPMS mongodb --config-file ${shell_dir}/config_mongodb.json
sleep 5
${GOPATH}/bin/spctl delete service --all
go test ${TEST_OPTS} github.com/teramoby/speedle-plus/pkg/svcs/pmsrest $*
|
from time import sleep
import glob
import os
import random
os.chdir("/Users/dorislee/Desktop/PersonalProj/")
while (True):
old_file = random.choice(glob.glob("public_photos/*"))
os.system("mv {} web_diary/assets/img/main.jpg".format(old_file))
sleep(18000) # 5 hours
os.system("mv web_diary/assets/img/main.jpg {}".format(old_file))
|
from typing import Optional, TypeVar, Tuple
T = TypeVar('T')
class ExpiringCache:
def __init__(self):
self._cache = {}
def get(self, key: Optional[str], current_time: float) -> Optional[T]:
if key in self._cache:
value, expires_at = self._cache[key]
if expires_at < current_time:
del self._cache[key]
return None
return value
return None
def put(self, key: Optional[str], value: T, expires_at: float) -> None:
self._cache[key] = value, expires_at |
<reponame>Danonovsky/Leasing
function appendOffer(ble,ble1) {
var object='<article class="col-xs-12 col-md-4">';
object+='<div class="panel panel-default">';
object+='<div class="panel-heading"><h5>'+ble.name+'</h5></div>';
object+='<div class="panel-body"><div class="smallPic">';
if(ble1.length>0) {
object+='<img class="img-responsive" src="'+$('#baseUrl').data('baseurl')+ble1[0].path+'" alt="picture">';
}
else object+='<img class="img-responsive" src="'+$('#baseUrl').data('baseurl')+'/img/nopic.png" alt="no picture">';
object+='</div><p>Model: '+ble.mark+' '+ble.model+'</p>';
object+='<p>Engine: '+ble.capacity+' '+ble.fuel+'</p>';
object+='<p>Body: '+ble.body+'</p>';
object+='<p>Year: '+ble.year+'</p>';
object+='<p>Wage: '+ble.wage+'PLN</p>';
object+='<p><a class="user-anchor" href="'+$('#baseUrl').data('baseurl')+'index.php/offer/details/'+ble.id+'">See details</a></p>';
object+='</div></div></article';
$('.offers').append(object);
}
function getOffers() {
$.ajax({
type: "POST",
url: $('#baseUrl').data('baseurl')+'index.php/ajax/getCars',
dataType: "json",
data: {
orderby: $('#offerOrder').find(':selected').data('orderby'),
way: $('#offerOrder').find(':selected').data('way'),
page: $('#page').data('page')
},
success: function(json) {
console.log(json);
$('.offers').empty();
for(i=0;i<json.basic.length;i++) {
appendOffer(json.basic[i],json.pics[i]);
}
}
});
}
$(document).ready(function(){
getOffers();
$('#offerOrder').on('change', function(){
getOffers();
});
});
|
<filename>example/node/services/admin/model/admin.model.js
import mongoose, { Schema } from 'mongoose';
const adminSchema = new Schema({
deletedAt:{ date: {type:Date} , actor:{ type: Schema.Types.ObjectId , ref:'contact'} },
createdAt: { type: Date , default: Date.now() },
updatedAt: { type: Date },
});
const adminModel = mongoose.model('admin', adminSchema);
export default adminModel;
|
const Actions = require('@action')
const AbstractJob = require('./Job').AbstractJob
const PDFRetrieverAction = Actions.PDFFileRetrieverAction
const PDFParseAction = Actions.PDFParseAction
const HandleDownloadErrorAction = Actions.HandleDownloadErrorAction
class BillPDFFetchJob extends AbstractJob {
constructor (params, callback) {
super(params, callback)
this.params = params
}
static create (params, callback) {
return new BillPDFFetchJob(params, callback)
.addAction(new PDFRetrieverAction(params.url, params.id))
.addAction(new PDFParseAction(params.url, params.id))
.addAction(new Actions.QueryResponseAdapterAction(params))
.addAction(new Actions.FileOutputAction(params))
.addErrorAction(new HandleDownloadErrorAction(callback, BillPDFFetchJob.create, params))
}
}
module.exports.PDFRetrievalJob = BillPDFFetchJob
|
<filename>view/middleware.go
package view
///////////////////////////////////////////////////////////////////////////////
// Middleware
type Middleware interface {
PreRender(ctx *Context) (abort bool)
PostRender(response *Response, html string, err error) (newHtml string, newErr error)
}
|
package bitbot
import (
"math/rand"
"net/http"
"os"
"sync"
"time"
"github.com/mb-14/gomarkov"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promhttp"
"github.com/whyrusleeping/hellabot"
bolt "go.etcd.io/bbolt"
log "gopkg.in/inconshreveable/log15.v2"
)
type Bot struct {
Bot *hbot.Bot
DB *bolt.DB
Random *rand.Rand // Initialized PRNG
Config Config
triggers map[string]NamedTrigger // For "registered" triggers
triggerMutex *sync.RWMutex
counters map[string]*prometheus.CounterVec
gauges map[string]*prometheus.GaugeVec
mChain *gomarkov.Chain // Initialized Markov chain. Accessed and updated by markov triggers.
markovMutex *sync.RWMutex
}
type Config struct {
NickservPass string // Nickserv password
OperUser string // Username for server oper
OperPass string // Password for server oper
Channels []string // slice of channels to connect to (must include #)
Nick string // nick to use
Server string // server:port for connections
SSL bool // Enable SSL for the connection
Admins ACL // slice of masks representing administrators
Plugins []NamedTrigger // Plugins to start with
Prometheus bool // Enable Prometheus
PromAddr string // Listen address for prometheus endpoint
}
var b Bot = Bot{}
func (b *Bot) RegisterTrigger(t NamedTrigger) {
b.triggerMutex.Lock()
b.triggers[t.Name()] = t
b.triggerMutex.Unlock()
b.Bot.AddTrigger(t)
}
func (b *Bot) FetchTrigger(name string) (NamedTrigger, bool) {
b.triggerMutex.RLock()
defer b.triggerMutex.RUnlock()
t, ok := b.triggers[name]
return t, ok
}
func (b *Bot) DropTrigger(t NamedTrigger) bool {
b.triggerMutex.Lock()
delete(b.triggers, t.Name())
b.triggerMutex.Unlock()
return true
}
func Run(config Config) {
db, err := newDB()
if err != nil {
log.Error(err.Error())
os.Exit(1)
}
b.DB = db
b.Random = rand.New(rand.NewSource(time.Now().UnixNano()))
b.Config = config
b.triggerMutex = &sync.RWMutex{}
b.markovMutex = &sync.RWMutex{}
b.triggers = make(map[string]NamedTrigger)
b.counters = make(map[string]*prometheus.CounterVec)
b.gauges = make(map[string]*prometheus.GaugeVec)
chans := func(bot *hbot.Bot) {
bot.Channels = b.Config.Channels
}
sslOptions := func(bot *hbot.Bot) {
bot.SSL = b.Config.SSL
}
irc, err := hbot.NewBot(b.Config.Server, b.Config.Nick, chans, sslOptions)
if err != nil {
log.Error(err.Error())
os.Exit(1)
}
b.Bot = irc
b.Bot.Logger.SetHandler(log.StreamHandler(os.Stdout, log.JsonFormat()))
// These are non-optional and added to every bot instance
b.Bot.AddTrigger(OperLogin)
b.Bot.AddTrigger(loadTrigger)
b.Bot.AddTrigger(unloadTrigger)
b.Bot.AddTrigger(NickTakenTrigger)
for _, trigger := range config.Plugins {
log.Info(trigger.Name() + " loaded")
b.RegisterTrigger(trigger)
}
b.Bot.Logger.SetHandler(log.StreamHandler(os.Stdout, log.JsonFormat()))
// Prometheus stuff
if b.Config.Prometheus {
b.createCounters()
b.Bot.AddTrigger(MessageCounterTrigger)
b.Bot.AddTrigger(ChannelPopGaugeTrigger)
b.Bot.AddTrigger(SetChanPopGaugeTrigger)
b.Bot.AddTrigger(HandleListReplyTrigger)
http.Handle("/metrics", promhttp.Handler())
go http.ListenAndServe(b.Config.PromAddr, nil)
}
// GOOOOOOO
defer b.DB.Close()
b.Bot.Run()
}
|
#!/bin/bash
# Publish site changes to staging site
URL="staging.openbudgetoakland.org"
# BASEDIR=$(dirname $0)
FILE="../_staging/CNAME"
COUNTER=$((COUNTER+1))
# go to the 'source file' directory
cd _src
# build webpack bundles
yarn run build
# compile files to 'staging' directory
# harp needs to ignore node stuff
mv -f node_modules _node_modules
harp compile ./ ../_staging
mv -f _node_modules node_modules
# update CNAME file with staging url
echo $URL > $FILE
# ask for description of commit
echo "Briefly describe your changes: "
read MESSAGE
# commit preview files and push to github
cd ../_staging
git checkout gh-pages
git add -A
git commit -m "$TIMESTAMP: $MESSAGE"
git push
|
#!/bin/sh
IMAGE=$(sirius docker_image_name | head -n 1)
# replace DEPLOY_SERVER with the release server
sirius docker_deploy:{{cookiecutter.project_slug}},${IMAGE},server=DEPLOY_SERVER,ports="9201;8080" |
<filename>FieldWorkerApp/Android/src/esri/mrm/mobile/WorkOrderStatus.java
package esri.mrm.mobile;
public enum WorkOrderStatus
{
Assigned, Dispatched, AtStop, Completed, Exception, Unassigned
}
|
#!/usr/bin/env bash
set -ev
PHP_INI_DIR="$HOME/.phpenv/versions/$(phpenv version-name)/etc/conf.d/"
TRAVIS_INI_FILE="$PHP_INI_DIR/travis.ini"
echo "memory_limit=3072M" >> "$TRAVIS_INI_FILE"
sed --in-place "s/\"dev-master\":/\"dev-${TRAVIS_COMMIT}\":/" composer.json
if [ "$SYMFONY" != "" ]; then composer require "symfony/symfony:$SYMFONY" --no-update; fi;
if [ "$SONATA_CORE" != "" ]; then composer require "sonata-project/core-bundle:$SONATA_CORE" --no-update; fi;
if [ "$SONATA_BLOCK" != "" ]; then composer require "sonata-project/block-bundle:$SONATA_BLOCK" --no-update; fi;
# TODO: remove when dropping sf < 3.4 support
if [[ -z "${SYMFONY}" || ("${SYMFONY:0:3}" != "2.8" && "${SYMFONY:0:3}" != "3.3") ]]; then
composer require "symfony/maker-bundle:${SYMFONY_MAKER:=1.7}" --no-update
fi
|
#!/bin/sh
# This is a generated file; do not edit or check into version control.
export "FLUTTER_ROOT=/Library/Flutter"
export "FLUTTER_APPLICATION_PATH=/Users/shz/Desktop/LearnFlutter/flutter_update"
export "FLUTTER_TARGET=lib/main.dart"
export "FLUTTER_BUILD_DIR=build"
export "SYMROOT=${SOURCE_ROOT}/../build/ios"
export "FLUTTER_FRAMEWORK_DIR=/Library/Flutter/bin/cache/artifacts/engine/ios"
export "FLUTTER_BUILD_NAME=1.0.0"
export "FLUTTER_BUILD_NUMBER=1"
|
#!/bin/bash
suite=($(echo "$1" | tr ',' '\n'))
testruns=($(echo "$2"))
for suite in "${suite[@]}"
do
for i in $(seq 1 $testruns)
do
job_title=$suite-$i
ci_run_id=$(cat /proc/sys/kernel/random/uuid)
cat <<EOF
job_queue_test_run_$job_title:
stage: .pre
image: k8spatterns/curl-jq
script:
- |
accessToken=\$(echo \$(curl --request POST \
--url "$reportingServiceURL/api/auth/refresh" \
--header "content-type: application/json" \
--data '{ "refreshToken": "$reportingAccessToken"}') | jq '.accessToken' | tr -d '"')
- |
curl --request POST \
--url "$reportingServiceURL/api/tests/runs/queue" \
--header "content-type: application/json" \
--header 'Authorization: Bearer '"\$accessToken"'' \
--data '{
"jobUrl": "$CI_JOB_URL",
"buildNumber": "$CI_JOB_ID",
"branch": "$CI_COMMIT_BRANCH",
"env": "$env",
"ciRunId": "$ci_run_id",
"ciParentUrl": "",
"ciParentBuild": "",
"project": "$reportingProject"
}'
allow_failure: false
rules:
- if: (\$CI_COMMIT_BRANCH == "feature/apitests")
when: always
job_run_tests_$job_title:
stage: test
needs: ["job_queue_test_run_$job_title"]
image: maven:3.8-jdk-8
script:
- mvn help:effective-settings
- mvn -B -U -Dzafira_enabled=true -Dzafira_service_url=$reportingServiceURL -Dzafira_access_token=$reportingAccessToken -Duser.timezone=UTC clean test -DADMIN_EMAILS=ivan.dobrinov@1crew.com -DJOB_URL=$CI_JOB_URL -DJOB_NAME=$job_title -DJOB_BASE_NAME=$job_title -Dplatform=* -Dsuite=$suite -Dzafira_rerun_failures=false -Dbrowser=API -Dbranch=$CI_COMMIT_BRANCH -Dzafira_project=$reportingProject -Dci_run_id=$ci_run_id -Dqueue_registration=true -Denv=$env -Dthread_count=$threads -Dtest_run_rules= -f pom.xml
allow_failure: true
rules:
- if: (\$CI_COMMIT_BRANCH == "feature/apitests")
when: always
EOF
done
done
|
<filename>server/src/config.ts
import * as nconf from 'nconf';
import * as nconfYaml from 'nconf-yaml';
import * as SmartConfig from 'hapi-config/lib/smart-config';
import { ConnectionOptions } from 'typeorm';
import { PlatformTools } from 'typeorm/platform/PlatformTools';
nconf.argv().env().file({
file: './config.yaml',
format: nconfYaml,
});
export default SmartConfig(nconf);
export function getDatabaseConfig(): ConnectionOptions[] {
// Source: https://github.com/typeorm/typeorm/blob/c12dc0002c84737b21fd2a1437e61fe9d3ed91fe/src/connection/ConnectionManager.ts#L362
const path: string = undefined;
const optionsArray: ConnectionOptions[] = PlatformTools.load(path || (PlatformTools.load("app-root-path").path + "/ormconfig.json"));
if (!optionsArray)
throw new Error(`Configuration ${path || "ormconfig.json"} was not found. Add connection configuration inside ormconfig.json file.`);
// const promises = optionsArray
// .filter(options => !options.environment || options.environment === PlatformTools.getEnvVariable("NODE_ENV")) // skip connection creation if environment is set in the options, and its not equal to the value in the NODE_ENV variable
// .map(options => this.createAndConnectByConnectionOptions(options));
return optionsArray.filter(options => !options.environment || options.environment === PlatformTools.getEnvVariable("NODE_ENV"));
}
|
<reponame>tactilenews/100eyes
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Field::Field, type: :component do
subject { render_inline(described_class.new(**params)) { content } }
let(:contributor) { build(:contributor) }
let(:content) { 'Text input' }
let(:params) { { object: contributor, attr: :name } }
it { should have_css('.BaseField') }
it { should have_text('Text input') }
it { should have_css('.BaseField label[for="contributor[name]"]') }
before(:each) { allow(I18n).to receive(:t).and_return(nil) }
describe 'label' do
context 'with translation' do
before { allow(I18n).to receive(:t).with('contributor.form.name.label').and_return('Name') }
it { should have_css('.BaseField label', text: 'Name') }
end
context 'with label parameter' do
let(:params) { { object: contributor, attr: :name, label: 'Custom label' } }
it { should have_css('.BaseField label', text: 'Custom label') }
end
end
describe 'help text' do
context 'with translation' do
before { allow(I18n).to receive(:t).with('contributor.form.name.help', default: nil).and_return('First and last name') }
it { should have_css('.BaseField-helpText', text: 'First and last name') }
end
context 'with help parameter' do
let(:params) { { object: contributor, attr: :name, help: 'Custom help text' } }
it { should have_css('.BaseField-helpText', text: 'Custom help text') }
end
end
context 'errors' do
it { should_not have_css('.BaseField-errorText') }
context 'with invalid object' do
let(:contributor) { build(:contributor, email: 'INVALID') }
let(:params) { { object: contributor, attr: :email } }
before { contributor.validate }
it { should have_css('strong.BaseField-errorText', text: 'ist nicht gültig') }
end
end
end
|
from enum import Enum
class QueryAggregations(Enum):
TOTAL = "TOTAL"
AVERAGE = "AVERAGE"
MAX = "MAX"
MIN = "MIN"
def serialize_alert_rule(alert_rule):
query_aggregation = alert_rule["query_aggregation"].value
threshold_values = ",".join(map(str, alert_rule["threshold_values"]))
time_interval = str(alert_rule["time_interval"])
serialized_format = f"{query_aggregation}:{threshold_values}:{time_interval}"
return serialized_format
# Example usage
alert_rule = {
"query_aggregation": QueryAggregations.TOTAL,
"threshold_values": [10, 1000, 400, 1],
"time_interval": 5
}
serialized_output = serialize_alert_rule(alert_rule)
print(serialized_output) # Output: TOTAL:10,1000,400,1:5 |
import requests
import sqlite3
from bs4 import BeautifulSoup
# Get the web page content
url = 'https://blog.floydhub.com/'
page = requests.get(url)
# Parse the page
soup = BeautifulSoup(page.content, 'html.parser')
# Extract the posts
posts = soup.find_all('div', {'class': 'post-content'})
# Create a database connection
conn = sqlite3.connect('blog_posts.db')
cur = conn.cursor()
# Create the table
cur.execute('CREATE TABLE IF NOT EXISTS posts (title TEXT, content TEXT)')
# Insert the posts into the database
for post in posts:
title = post.find('h2').get_text()
content = post.find('p').get_text()
cur.execute('INSERT INTO posts VALUES (?,?)', (title,content))
# Commit the changes
conn.commit()
# Close the connection
conn.close() |
import Data.List
reverseList list = reverse list
main = do
let list = [1,2,3,4,5]
print (reverseList list)
# Output
# [5,4,3,2,1] |
#!/bin/sh
set -ex
# This script will install smileycoin globally, create a systemd service for
# it and start the daemon.
#
# It is tested on Debian, but should hopefully work on anything systemd-based.
TARGETBIN="/usr/local/bin/smileycoind"
TARGETDATA="/var/local/smly"
TARGETCONF="${TARGETDATA}/smileycoin.conf"
TARGETUSER="smly"
TARGETGROUP="nogroup"
# ---------------------------
cp src/smileycoind "${TARGETBIN}"
chown root:root "${TARGETBIN}"
adduser --system \
--home "${TARGETDATA}" --no-create-home \
--disabled-password \
${TARGETUSER}
mkdir -p "${TARGETDATA}"
chown -R ${TARGETUSER}:${TARGETGROUP} "${TARGETDATA}"
RPCPASS="$(xxd -ps -l 22 /dev/urandom)"
[ -e "${TARGETCONF}" ] || cat <<EOF > "${TARGETCONF}"
rpcuser=smileycoinrpc
rpcpassword=${RPCPASS}
EOF
cat <<EOF > /etc/systemd/system/smly.service
[Unit]
Description=SMLYcoin
After=network.target
[Service]
ExecStart=${TARGETBIN} -datadir=${TARGETDATA} --server -printtoconsole
User=${TARGETUSER}
Group=${TARGETGROUP}
[Install]
WantedBy=multi-user.target
EOF
systemctl enable smly.service
systemctl start smly.service
|
<reponame>jayfallon/allicin-design
import styled from 'styled-components';
import Link from 'next/link';
const AdsLink = styled.a`
color: ${props => props.theme.adsLinkDef};
&:hover {
color: ${props => props.theme.adsLinkHov};
}
&:focus {
color: ${props => props.theme.adsLinkFoc};
}
&:visited {
color: ${props => props.theme.adsLinkVis};
}
`;
const LinkList = props => (
<ul className={props.cname}>
{props.list.map(function(item, i) {
const d = item.replace(/-/g, ' ');
return (
<li key={i}>
<Link href={item}>
<a title={item}>{d}</a>
</Link>
</li>
);
})}
</ul>
);
export { LinkList };
export default AdsLink;
|
#!/usr/bin/env bash
printf '\033[2J\033[H\033[3J'
|
<reponame>jawher/go-dockercommand
package dockercommand
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestDockerPs(t *testing.T) {
docker, err := NewDocker("")
if err != nil {
t.Fatalf("err: %s", err)
}
_, err = docker.Run(&RunOptions{
Image: "ubuntu",
Cmd: []string{"ls", "/"},
})
if err != nil {
t.Fatalf("err: %s", err)
}
containers, err := docker.Ps(&PsOptions{})
if err != nil {
t.Fatalf("err: %s", err)
}
assert.NotEmpty(t, containers)
}
|
<gh_stars>1-10
import React from "react";
import * as S from "./style";
import BusanImg from "../../../assets/img/banner/부산.svg";
import { WindowOpenUtil } from "../../../util/openWindow";
const Busan: React.FC = () => {
return (
<S.Wrapper>
<S.Banner style={{ backgroundImage: `url(${BusanImg})` }}>
<S.SchoolName width="510px" height="47px" margin="92px 0 21px 82px">
부산 소프트웨어 마이스터고
</S.SchoolName>
<S.SchoolText width="278px" height="22px" margin="92px 0 21px 82px">
꿈을 향한 도전, 미래를 여는 학교
</S.SchoolText>
<S.Hr margin="0 0 21px 82px" />
<S.SchoolClass1 width="160px" height="20px" margin="0 619px 13px 82px">
소프트웨어개발과
</S.SchoolClass1>
<S.SchoolClass2 width="160px" height="20px" margin="0 619px 109px 82px">
임베디드소프트웨어과
</S.SchoolClass2>
<S.SchoolHomePage
onClick={() => WindowOpenUtil("http://bssm.hs.kr/")}
width="140px"
margin="0 623px 0 82px"
>
홈페이지 바로가기
</S.SchoolHomePage>
</S.Banner>
</S.Wrapper>
);
};
export default Busan;
|
<gh_stars>1-10
import { h, Component } from 'preact'
import Showdown from 'showdown'
import ReactMde from 'react-mde'
import 'draft-js/dist/Draft.css'
// import ReactQuill from 'react-quill'
import MediumButton from '../../components/_buttons/mediumButton/index.js'
// import 'react-quill/dist/quill.snow.css'
import firebase from '../../modules/firebaseFirestore'
const db = firebase.firestore()
db.settings({
timestampsInSnapshots: true
})
export default class Editor extends Component {
constructor () {
super()
this.handleSubmit = this.handleSubmit.bind(this)
this.handleChange = this.handleChange.bind(this)
this.handleEdit = this.handleEdit.bind(this)
this.converter = new Showdown.Converter({
tables: true,
simplifiedAutoLink: true,
strikethrough: true,
tasklists: true
})
this.state = {
name: '',
value: ''
}
}
handleSubmit (e) {
e.preventDefault()
db.collection('posts').add({
name: this.state.name,
value: this.state.value
})
.then((res) => {
console.log('Post uploaded successfully. ID: ' + res.id)
})
}
handleChange (e) {
this.setState({
[e.target.name]: e.target.value
})
}
handleEdit (value) {
this.setState({ value: value })
}
render (props, state) {
return (
<div class='editor fb-ccolumn'>
<form class='editor__form' onSubmit={this.handleSubmit}>
<label>Name</label>
<input name='name' onChange={this.handleChange}></input>
{/* <ReactQuill
className='editor__form__editor'
value={state.value}.
onChange={this.handleEdit} /> */}
<ReactMde
onChange={this.handleEdit}
value={state.value}
// generateMarkdownPreview={markdown =>
// Promise.resolve(this.converter.makeHtml(markdown))
// }
/>
<MediumButton
text='SUBMIT'
onClick={this.handleSubmit}
iconName='FaPaperPlane'
iconPos='right' />
</form>
</div>
)
}
}
|
<reponame>alirya/validator<filename>src/validatable/replace.ts<gh_stars>0
import Validatable from './validatable';
import ReplaceValue from '../value/replace';
type Replace<
ValueType,
Boolean extends boolean,
ValidatableType extends Validatable
> = ReplaceValue<ValueType, Boolean, ValidatableType>;
export default Replace;
|
package gopostgres
import (
"fmt"
"github.com/jackc/pgx/v4"
"reflect"
"testing"
"time"
)
func TestFindAllBy(t *testing.T) {
InitDB("test", "user", "pass", "localhost", nil, pgx.LogLevelDebug)
tableExists, _ := DB.tableExists(testTableForFindAllBy)
if tableExists {
purgeTableQuery := query{
Statement: fmt.Sprintf(truncateTestTable, testTableForFindAllBy),
}
DB.execQuery(purgeTableQuery)
} else {
createTestTableQuery := query{
Statement: fmt.Sprintf(createTestTable, testTableForFindAllBy),
}
DB.execQuery(createTestTableQuery)
}
testUsername := "username1"
testUpdateTime := time.Date(2000, 1, 1, 13, 10, 20, 0, time.UTC)
//No record should be found first
res, err := DB.FindAllBy(testTableForFindAllBy, "username", testUsername)
if err == nil {
t.Error("Expected an error, got nothing")
}else {
switch err.(type) {
case NoRecordFound:
//
default:
t.Errorf("Expected to get a NoRecordFound error, got %s", reflect.TypeOf(err))
}
}
if res != nil {
t.Errorf("Expected to receive nil result, got %v", res)
}
//now we insert a single record
insertQuery := query{
Statement: fmt.Sprintf(insertOneRowInTestTable, testTableForFindAllBy),
Args: []interface{} {
testUsername,
testUpdateTime,
},
}
DB.execQuery(insertQuery)
//the single record should be found
res, err = DB.FindAllBy(testTableForFindAllBy, "username", testUsername)
if err != nil {
t.Errorf("Expected to get no error, got %s", err.Error())
}
if len(res) != 1 {
t.Errorf("Expected the found result to be of length %d, got %d", 1, len(res))
}
receivedUsername := res[0][1].(string)
if receivedUsername != testUsername {
t.Errorf("Expected the found username to be %s, got %s", testUsername, receivedUsername)
}
receivedUpdateTime := res[0][2].(time.Time)
if receivedUpdateTime != testUpdateTime {
t.Errorf("Expected the found update time to be %v, got %v", testUpdateTime, receivedUpdateTime)
}
testUsername2 := "username2"
insertQuery.Args = []interface{}{testUsername2, testUpdateTime}
DB.execQuery(insertQuery)
res, err = DB.FindAllBy(testTableForFindAllBy, "last_updated", testUpdateTime)
if err != nil {
t.Errorf("Expected no error, got %s", err.Error())
}
if len(res) != 2 {
t.Errorf("Expected the found result to be of length %d, got %d", 2, len(res))
}
receivedUsername1 := res[0][1].(string)
if receivedUsername1 != testUsername {
t.Errorf("Expected the found username to be %s, got %s", testUsername, receivedUsername)
}
receivedUpdateTime1 := res[0][2].(time.Time)
if receivedUpdateTime1 != testUpdateTime {
t.Errorf("Expected the found update time to be %v, got %v", testUpdateTime, receivedUpdateTime)
}
receivedUsername2 := res[1][1].(string)
if receivedUsername2 != testUsername2 {
t.Errorf("Expected the found username to be %s, got %s", testUsername2, receivedUsername2)
}
receivedUpdateTime2 := res[1][2].(time.Time)
if receivedUpdateTime2 != testUpdateTime {
t.Errorf("Expected the found update time to be %v, got %v", testUpdateTime, receivedUpdateTime2)
}
}
|
<filename>Toonland-2013-master/toonland/playground/funnyfarm/_BarkingBoulevard.py
########################## THE TOON LAND PROJECT ##########################
# Filename: _BarkingBoulevard.py
# Created by: Cody/Fd Green Cat Fd (February 19th, 2013)
####
# Description:
#
# The Barking Boulevard added Python implementation.
####
filepath = __filebase__ + '/toonland/playground/funnyfarm/maps/%s'
sidewalkTexture = loader.loadTexture(filepath % 'sidewalkyellow.jpg')
for tunnelNode in render.findAllMatches('**/linktunnel*'):
tunnelNode.find('**/tunnel_floor*').setTexture(sidewalkTexture, 1)
toonHq = render.find('**/tb42:toon_landmark_hqFF_DNARoot')
for doorFrameHole in toonHq.findAllMatches('**/doorFrameHole*'):
doorFrameHole.hide() |
package com.github.chen0040.leetcode.day06.easy;
/**
* Created by xschen on 1/8/2017.
*
*
* link: https://leetcode.com/problems/count-and-say/description/
*/
public class CountAndSay {
public class Solution {
private int zero;
public String countAndSay(int n) {
zero = (int)'0';
return countAndSay("1", 1, n);
}
public String countAndSay(String seq, int d, int n) {
if(d == n) {
return seq;
}
int count = 0;
int prev_digit = -1;
int digit = 0;
StringBuilder sb = new StringBuilder();
for(int i=0; i < seq.length(); ++i) {
digit = (int)seq.charAt(i) - zero;
if(i != 0 && digit != prev_digit) {
sb.append(count);
sb.append(prev_digit);
count = 0;
}
prev_digit = digit;
count++;
}
sb.append(count);
sb.append(digit);
return countAndSay(sb.toString(), d+1, n);
}
}
}
|
import java.util.HashSet;
public class RemoveDuplicatesExample
{
public static int removeDuplicates(int arr[])
{
// Variables
int result = 0;
HashSet<Integer> set = new HashSet<>();
// Traverse array from left to right
for (int i = 0; i < arr.length; i++)
{
// If element is not in set
if (!set.contains(arr[i]))
{
set.add(arr[i]);
arr[result] = arr[i];
result++;
}
}
return result;
}
public static void main(String[] args)
{
int arr[] = {2, 3, 5, 1, 2, 4, 8, 5};
int result = removeDuplicates(arr);
System.out.println("New length is: " + result);
}
} |
<reponame>payshares/interstellar-core
export function Inject(...dependencies) {
return function decorator(target) {
target.$inject = dependencies;
}
}
|
<reponame>eengineergz/Lambda
exports.up = function(knex) {
return knex.schema.createTable("hubs", tbl => {
tbl.increments();
tbl
.string("name")
.notNullable()
.unique();
tbl.timestamps(true, true);
});
};
exports.down = function(knex) {
return knex.schema.dropTableIfExists("hubs");
};
|
<filename>assets/components/splitters/vertical-splitter/vertical-splitter.js
component.exports = {
data: {
x: 70
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.indexstore.blockletindex;
import java.io.ByteArrayOutputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.Serializable;
import java.util.List;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.dev.DataMapModel;
import org.apache.carbondata.core.datastore.block.SegmentProperties;
import org.apache.carbondata.core.datastore.block.SegmentPropertiesAndSchemaHolder;
import org.apache.carbondata.core.datastore.block.TableBlockInfo;
import org.apache.carbondata.core.indexstore.BlockMetaInfo;
import org.apache.carbondata.core.indexstore.BlockletDetailInfo;
import org.apache.carbondata.core.indexstore.ExtendedBlocklet;
import org.apache.carbondata.core.indexstore.row.DataMapRow;
import org.apache.carbondata.core.indexstore.row.DataMapRowImpl;
import org.apache.carbondata.core.indexstore.schema.CarbonRowSchema;
import org.apache.carbondata.core.indexstore.schema.SchemaGenerator;
import org.apache.carbondata.core.memory.MemoryException;
import org.apache.carbondata.core.metadata.blocklet.BlockletInfo;
import org.apache.carbondata.core.metadata.blocklet.DataFileFooter;
import org.apache.carbondata.core.metadata.blocklet.index.BlockletMinMaxIndex;
/**
* Datamap implementation for blocklet.
*/
public class BlockletDataMap extends BlockDataMap implements Serializable {
private static final long serialVersionUID = -2170289352240810993L;
@Override public void init(DataMapModel dataMapModel) throws IOException, MemoryException {
super.init(dataMapModel);
}
/**
* Method to check the cache level and load metadata based on that information
*
* @param blockletDataMapInfo
* @param indexInfo
* @throws IOException
* @throws MemoryException
*/
protected DataMapRowImpl loadMetadata(SegmentProperties segmentProperties,
BlockletDataMapModel blockletDataMapInfo, List<DataFileFooter> indexInfo)
throws IOException, MemoryException {
if (isLegacyStore) {
return loadBlockInfoForOldStore(segmentProperties, blockletDataMapInfo, indexInfo);
} else {
return loadBlockletMetaInfo(segmentProperties, blockletDataMapInfo, indexInfo);
}
}
/**
* Method to create blocklet schema
*
* @param addToUnsafe
* @throws MemoryException
*/
protected void createMemoryDMStore(boolean addToUnsafe) throws MemoryException {
memoryDMStore = getMemoryDMStore(addToUnsafe);
}
/**
* Creates the schema to store summary information or the information which can be stored only
* once per datamap. It stores datamap level max/min of each column and partition information of
* datamap
*
* @param segmentProperties
* @throws MemoryException
*/
protected void createSummarySchema(SegmentProperties segmentProperties, boolean addToUnsafe)
throws MemoryException {
CarbonRowSchema[] taskSummarySchema =
SchemaGenerator.createTaskSummarySchema(segmentProperties, false, isFilePathStored);
SegmentPropertiesAndSchemaHolder.getInstance()
.getSegmentPropertiesWrapper(segmentPropertiesIndex)
.setTaskSummarySchema(taskSummarySchema);
taskSummaryDMStore = getMemoryDMStore(addToUnsafe);
}
/**
* Method to load blocklet metadata information
*
* @param blockletDataMapInfo
* @param indexInfo
* @throws IOException
* @throws MemoryException
*/
private DataMapRowImpl loadBlockletMetaInfo(SegmentProperties segmentProperties,
BlockletDataMapModel blockletDataMapInfo, List<DataFileFooter> indexInfo)
throws IOException, MemoryException {
String tempFilePath = null;
DataMapRowImpl summaryRow = null;
// Relative blocklet ID is the id assigned to a blocklet within a part file
int relativeBlockletId = 0;
for (DataFileFooter fileFooter : indexInfo) {
TableBlockInfo blockInfo = fileFooter.getBlockInfo().getTableBlockInfo();
BlockMetaInfo blockMetaInfo =
blockletDataMapInfo.getBlockMetaInfoMap().get(blockInfo.getFilePath());
// Here it loads info about all blocklets of index
// Only add if the file exists physically. There are scenarios which index file exists inside
// merge index but related carbondata files are deleted. In that case we first check whether
// the file exists physically or not
if (blockMetaInfo != null) {
// this case is for CACHE_LEVEL = BLOCKLET
// blocklet ID will start from 0 again only when part file path is changed
if (null == tempFilePath || !tempFilePath.equals(blockInfo.getFilePath())) {
tempFilePath = blockInfo.getFilePath();
relativeBlockletId = 0;
}
summaryRow =
loadToUnsafe(fileFooter, segmentProperties, blockInfo.getFilePath(), summaryRow,
blockMetaInfo, relativeBlockletId);
// this is done because relative blocklet id need to be incremented based on the
// total number of blocklets
relativeBlockletId += fileFooter.getBlockletList().size();
}
}
return summaryRow;
}
private DataMapRowImpl loadToUnsafe(DataFileFooter fileFooter,
SegmentProperties segmentProperties, String filePath, DataMapRowImpl summaryRow,
BlockMetaInfo blockMetaInfo, int relativeBlockletId) {
int[] minMaxLen = segmentProperties.getColumnsValueSize();
List<BlockletInfo> blockletList = fileFooter.getBlockletList();
CarbonRowSchema[] schema = getSchema();
CarbonRowSchema[] taskSummarySchema = getTaskSummarySchema();
// Add one row to maintain task level min max for segment pruning
if (!blockletList.isEmpty() && summaryRow == null) {
summaryRow = new DataMapRowImpl(taskSummarySchema);
}
for (int index = 0; index < blockletList.size(); index++) {
DataMapRow row = new DataMapRowImpl(schema);
int ordinal = 0;
int taskMinMaxOrdinal = 0;
BlockletInfo blockletInfo = blockletList.get(index);
BlockletMinMaxIndex minMaxIndex = blockletInfo.getBlockletIndex().getMinMaxIndex();
row.setRow(addMinMax(minMaxLen, schema[ordinal], minMaxIndex.getMinValues()), ordinal);
// compute and set task level min values
addTaskMinMaxValues(summaryRow, minMaxLen, taskSummarySchema, taskMinMaxOrdinal,
minMaxIndex.getMinValues(), TASK_MIN_VALUES_INDEX, true);
ordinal++;
taskMinMaxOrdinal++;
row.setRow(addMinMax(minMaxLen, schema[ordinal], minMaxIndex.getMaxValues()), ordinal);
// compute and set task level max values
addTaskMinMaxValues(summaryRow, minMaxLen, taskSummarySchema, taskMinMaxOrdinal,
minMaxIndex.getMaxValues(), TASK_MAX_VALUES_INDEX, false);
ordinal++;
row.setInt(blockletInfo.getNumberOfRows(), ordinal++);
// add file name
byte[] filePathBytes =
getFileNameFromPath(filePath).getBytes(CarbonCommonConstants.DEFAULT_CHARSET_CLASS);
row.setByteArray(filePathBytes, ordinal++);
// add version number
row.setShort(fileFooter.getVersionId().number(), ordinal++);
// add schema updated time
row.setLong(fileFooter.getSchemaUpdatedTimeStamp(), ordinal++);
byte[] serializedData;
try {
// Add block footer offset, it is used if we need to read footer of block
row.setLong(fileFooter.getBlockInfo().getTableBlockInfo().getBlockOffset(), ordinal++);
setLocations(blockMetaInfo.getLocationInfo(), row, ordinal++);
// Store block size
row.setLong(blockMetaInfo.getSize(), ordinal++);
// add blocklet info
ByteArrayOutputStream stream = new ByteArrayOutputStream();
DataOutput dataOutput = new DataOutputStream(stream);
blockletInfo.write(dataOutput);
serializedData = stream.toByteArray();
row.setByteArray(serializedData, ordinal++);
// add pages
row.setShort((short) blockletInfo.getNumberOfPages(), ordinal++);
// for relative blocklet id i.e blocklet id that belongs to a particular carbondata file
row.setShort((short) relativeBlockletId++, ordinal);
memoryDMStore.addIndexRow(schema, row);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
return summaryRow;
}
public ExtendedBlocklet getDetailedBlocklet(String blockletId) {
if (isLegacyStore) {
super.getDetailedBlocklet(blockletId);
}
int absoluteBlockletId = Integer.parseInt(blockletId);
DataMapRow safeRow =
memoryDMStore.getDataMapRow(getSchema(), absoluteBlockletId).convertToSafeRow();
short relativeBlockletId = safeRow.getShort(BLOCKLET_ID_INDEX);
String filePath = getFilePath();
return createBlocklet(safeRow, getFileNameWithFilePath(safeRow, filePath), relativeBlockletId);
}
protected short getBlockletId(DataMapRow dataMapRow) {
return dataMapRow.getShort(BLOCKLET_ID_INDEX);
}
protected CarbonRowSchema[] getSchema() {
return SegmentPropertiesAndSchemaHolder.getInstance()
.getSegmentPropertiesWrapper(segmentPropertiesIndex).getBlocketSchema();
}
protected ExtendedBlocklet createBlocklet(DataMapRow row, String fileName, short blockletId) {
ExtendedBlocklet blocklet = new ExtendedBlocklet(fileName, blockletId + "");
BlockletDetailInfo detailInfo = getBlockletDetailInfo(row, blockletId, blocklet);
detailInfo.setColumnSchemas(getColumnSchema());
detailInfo.setBlockletInfoBinary(row.getByteArray(BLOCKLET_INFO_INDEX));
detailInfo.setPagesCount(row.getShort(BLOCKLET_PAGE_COUNT_INDEX));
blocklet.setDetailInfo(detailInfo);
return blocklet;
}
}
|
#!/bin/bash
# Copyright 2021 The Knative Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
KOURIER_CONTROL_NAMESPACE=knative-serving
out_dir="$(mktemp -d /tmp/certs-XXX)"
subdomain="example.com"
openssl req -x509 -sha256 -nodes -days 365 -newkey rsa:2048 \
-subj "/O=Example Inc./CN=Example" \
-keyout "${out_dir}"/root.key \
-out "${out_dir}"/root.crt
openssl req -nodes -newkey rsa:2048 \
-subj "/O=Example Inc./CN=Example" \
-reqexts san \
-config <(printf "[req]\ndistinguished_name=req\n[san]\nsubjectAltName=DNS:*.%s" "$subdomain") \
-keyout "${out_dir}"/wildcard.key \
-out "${out_dir}"/wildcard.csr
openssl x509 -req -days 365 -set_serial 0 \
-extfile <(printf "subjectAltName=DNS:*.%s" "$subdomain") \
-CA "${out_dir}"/root.crt \
-CAkey "${out_dir}"/root.key \
-in "${out_dir}"/wildcard.csr \
-out "${out_dir}"/wildcard.crt
kubectl create -n ${KOURIER_CONTROL_NAMESPACE} secret tls wildcard-certs \
--key="${out_dir}"/wildcard.key \
--cert="${out_dir}"/wildcard.crt --dry-run=client -o yaml | kubectl apply -f -
|
#!/bin/bash
dieharder -d 100 -g 37 -S 4072298758
|
foamCleanTutorials | tee run.log
blockMesh | tee -a run.log
# checkMesh | tee -a run.log
decomposePar | tee -a run.log
mpirun -np 8 $FOAM_USER_APPBIN/cokeCombustionFoam2 -parallel 2>&1 >> run.log
./reconstruct.sh
|
<filename>students/alireza-mirzaei/week1/jadval.java
public class jadval
{
public static void main(String[] args)
{
for (int i = 0; i < 11; i++)
{
for (int j = 1; j < 11; j++)
{
System.out.print(j + "*" + i + "=" + i * j + "\t\t");
if (j == 10)
{
System.out.print("\n");
}
}
}
}
} |
<filename>frontend/src/Redux/index.js
import { createStore, applyMiddleware, compose } from 'redux';
import thunk from 'redux-thunk';
import createSocketIoMiddleware from 'redux-socket.io';
import { apiMiddleware } from 'redux-api-middleware';
import { webSocket } from 'Configuration';
import reducers from './reducers';
let store = null;
// Get the Redux DevTools extension and fallback to a no-op function
let devtools = f => f;
if (window.__REDUX_DEVTOOLS_EXTENSION__) {
devtools = window.__REDUX_DEVTOOLS_EXTENSION__();
}
const create = (initialState = {}) => {
const socketIoMiddleware = createSocketIoMiddleware(webSocket, 'CLIENT:');
return createStore(
reducers,
initialState,
compose(
applyMiddleware(thunk, apiMiddleware, socketIoMiddleware),
devtools,
), // Compose redux dev tools
);
};
export default function initRedux(initialState = {}) {
if (!store) {
store = create(initialState);
}
// Export store to be a global variable in development environment
if (process.env.NODE_ENV === 'development') {
window.store = store;
}
return store;
}
|
var socket = io.connect('http://localhost:5000');
var answersFrom = {}, offer;
var peerConnection = window.RTCPeerConnection ||
window.mozRTCPeerConnection ||
window.webkitRTCPeerConnection ||
window.msRTCPeerConnection;
var sessionDescription = window.RTCSessionDescription ||
window.mozRTCSessionDescription ||
window.webkitRTCSessionDescription ||
window.msRTCSessionDescription;
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
var pc = new peerConnection({ iceServers: [{ url: "stun:stun.services.mozilla.com",
username: "somename",
credential: "<PASSWORD>" }]
});
pc.onaddstream = function (obj) {
var vid = document.createElement('video');
vid.setAttribute('class', 'video-small');
vid.setAttribute('autoplay', 'autoplay');
vid.setAttribute('id', 'video-small');
document.getElementById('users-container').appendChild(vid);
vid.src = window.URL.createObjectURL(obj.stream);
}
navigator.getUserMedia({video: true}, function (stream) {
var video = document.querySelector('video');
video.src = window.URL.createObjectURL(stream);
pc.addStream(stream);
}, error);
function error (err) {
console.warn('Error', err);
}
function createOffer (id) {
pc.createOffer(function(offer) {
pc.setLocalDescription(new sessionDescription(offer), function () {
socket.emit('make-offer', {
offer: offer,
to: id
});
}, error);
}, error);
}
socket.on('answer-made', function (data) {
pc.setRemoteDescription(new sessionDescription(data.answer), function () {
document.getElementById(data.socket).setAttribute('class', 'active');
if (!answersFrom[data.socket]) {
createOffer(data.socket);
answersFrom[data.socket] = true;
}
}, error);
});
socket.on('offer-made', function (data) {
offer = data.offer;
pc.setRemoteDescription(new sessionDescription(data.offer), function () {
pc.createAnswer(function (answer) {
pc.setLocalDescription(new sessionDescription(answer), function () {
socket.emit('make-answer', {
answer: answer,
to: data.socket
});
}, error);
}, error);
}, error);
});
socket.on('add-users', function (data) {
for (var i = 0; i < data.users.length; i++) {
var el = document.createElement('div'),
id = data.users[i];
el.setAttribute('id', id);
el.innerHTML = id;
el.addEventListener('click', function () {
createOffer(id);
});
document.getElementById('users').appendChild(el);
}
});
socket.on('remove-user', function (id) {
var div = document.getElementById(id);
document.getElementById('users').removeChild(div);
});
|
/*
* Pencil Messages
*
* This contains all the text for the Pencil container.
*/
import { defineMessages } from 'react-intl';
export const scope = 'app.containers.Pencil';
export default defineMessages({
header: {
id: `${scope}.header`,
defaultMessage: 'This is the Pencil container!',
},
});
|
<reponame>kxepal/couchdb-fauxton
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy of
// the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
define([
'app',
'api',
'addons/documents/helpers',
'addons/documents/resources',
'addons/databases/base',
'addons/documents/doc-editor/actions',
'addons/documents/doc-editor/components.react'
],
function (app, FauxtonAPI, Helpers, Documents, Databases, Actions, ReactComponents) {
var DocEditorRouteObject = FauxtonAPI.RouteObject.extend({
layout: 'doc_editor',
disableLoader: true,
selectedHeader: 'Databases',
roles: ['fx_loggedIn'],
initialize: function (route, masterLayout, options) {
this.databaseName = options[0];
this.docID = options[1] || 'new';
this.database = this.database || new Databases.Model({ id: this.databaseName });
this.doc = new Documents.Doc({ _id: this.docID }, { database: this.database });
this.isNewDoc = false;
this.wasCloned = false;
},
routes: {
'database/:database/:doc/code_editor': 'codeEditor',
'database/:database/:doc': 'codeEditor',
'database/:database/_design/:ddoc': 'showDesignDoc'
},
events: {
'route:duplicateDoc': 'duplicateDoc'
},
crumbs: function () {
var previousPage = Helpers.getPreviousPageForDoc(this.database, this.wasCloned);
return [
{ type: 'back', link: previousPage },
{ name: this.docID, link: '#' }
];
},
codeEditor: function (database, doc) {
// if either the database or document just changed, we need to get the latest doc/db info
if (this.databaseName !== database) {
this.databaseName = database;
this.database = new Databases.Model({ id: this.databaseName });
}
if (this.docID !== doc) {
this.docID = doc;
this.doc = new Documents.Doc({ _id: this.docID }, { database: this.database });
}
Actions.initDocEditor({ doc: this.doc, database: this.database });
this.setComponent('#dashboard-content', ReactComponents.DocEditorController, {
database: this.database,
isNewDoc: this.isNewDoc,
previousPage: '#/' + Helpers.getPreviousPageForDoc(this.database)
});
},
showDesignDoc: function (database, ddoc) {
this.codeEditor(database, '_design/' + ddoc);
},
duplicateDoc: function (newId) {
var doc = this.doc,
database = this.database;
this.docID = newId;
var that = this;
doc.copy(newId).then(function () {
doc.set({ _id: newId });
that.wasCloned = true;
FauxtonAPI.navigate('/database/' + database.safeID() + '/' + app.utils.safeURLName(newId), { trigger: true });
FauxtonAPI.addNotification({
msg: 'Document has been duplicated.'
});
}, function (error) {
var errorMsg = 'Could not duplicate document, reason: ' + error.responseText + '.';
FauxtonAPI.addNotification({
msg: errorMsg,
type: 'error'
});
});
},
apiUrl: function () {
return [this.doc.url('apiurl'), this.doc.documentation()];
}
});
var NewDocEditorRouteObject = DocEditorRouteObject.extend({
initialize: function (route, masterLayout, options) {
var databaseName = options[0];
this.database = this.database || new Databases.Model({ id: databaseName });
this.doc = new Documents.NewDoc(null, {
database: this.database
});
this.isNewDoc = true;
},
apiUrl: function () {
return [this.doc.url('apiurl'), this.doc.documentation()];
},
crumbs: function () {
var previousPage = Helpers.getPreviousPageForDoc(this.database);
return [
{ type: 'back', link: previousPage },
{ name: 'New Document', link: '#' }
];
},
routes: {
'database/:database/new': 'codeEditor'
},
selectedHeader: 'Databases'
});
return {
NewDocEditorRouteObject: NewDocEditorRouteObject,
DocEditorRouteObject: DocEditorRouteObject
};
});
|
def generate_combinations(input_list):
import itertools
comb_list = []
# Get all possible combinations of the list
for subset in itertools.combinations(input_list,
len(input_list)):
comb_list.append(subset)
return comb_list |
<filename>src/templates/index-page.js
import React from "react";
// import PropTypes from "prop-types";
import { graphql, Link } from "gatsby";
import Layout from "../components/Layout";
import Hero from "../components/Hero";
import SlideShow from "../components/home/SlideShow";
import Fade from "react-reveal/Fade";
import Leaf1 from "../img/leaf-1.png";
import Leaf2 from "../img/leaf-2.png";
import Leaf3 from "../img/leaf-3.png";
import FamilySearchLogo from "../img/familysearch-logo.png";
import { ParallaxProvider } from "react-scroll-parallax";
import { Parallax } from "react-scroll-parallax";
import Line from "../components/Line";
import HugeText from "../components/HugeText";
import IntroBackgroundSection from "../components/IntroBackgroundSection";
import BodyBackgroundSection from "../components/BodyBackgroundSection";
import FeaturedStoryHome from "../components/home/FeaturedStoryHome";
import TellStory from "../components/home/TellStory";
import SubscribeForm from "../components/SubscribeForm";
import AboutStatue from "../components/home/AboutStatue";
import VisitStatue from "../components/home/VisitStatue";
export const IndexPageTemplate = ({ frontmatter }) => (
<div>
<ParallaxProvider>
<Hero
image={frontmatter.heroImage}
content={{
heading: frontmatter.heading,
subheading: frontmatter.subheading
}}
home={true}
/>
<LeafEl1 />
<LeafEl2 />
<div className="relative">
<div className="home-scroll-line"></div>
<Line mobile={4} desk={24} />
</div>
<IntroBackgroundSection>
<section>
<Fade>
<div className="text-center mx-auto max-w-3xl px-8 relative z-10">
<h4 className="uppercase tracking-widest text-green mb-4 max-w-lg mx-auto">
{frontmatter.intro.subheading}
</h4>
<h2 className="text-3xl lg:text-4xl mb-6">
{frontmatter.intro.heading}
</h2>
<div
dangerouslySetInnerHTML={{
__html: frontmatter.intro.description
}}
></div>
<Link
to={frontmatter.intro.link}
className="inline-block mt-6 py-4 px-8 bg-green text-white uppercase tracking-widest text-sm"
>
{frontmatter.intro.linkText}
</Link>
</div>
</Fade>
<Line mobile={2} desk={64} />
</section>
<LeafEl3 />
<section>
<div className="container mx-auto lg:mt-24">
<SlideShow images={frontmatter.gallery} />
</div>
</section>
<Line mobile={20} desk={48} />
</IntroBackgroundSection>
<div className="relative">
<div className="lg:absolute md:w-1/2 lg:w-2/5">
<Parallax y={[-20, 0]} tagOuter="figure">
<AboutStatue />
</Parallax>
</div>
</div>
<HugeText text="The Statues" start="20" finish="-40" />
<section className="p-4 lg:py-24 mb-12">
<div className="container mx-auto">
<div className="flex justify-end">
<div className="w-full lg:w-1/2 p-4">
<Fade right distance="50px">
<div>
<h4 className="uppercase tracking-widest text-green mb-4 text-center lg:text-left">
{frontmatter.mainpitch.subheading}
</h4>
<h2 className="text-3xl lg:text-4xl mb-6 text-center lg:text-left">
{frontmatter.mainpitch.heading}
</h2>
<div
className="mb-8"
dangerouslySetInnerHTML={{
__html: frontmatter.mainpitch.description
}}
></div>
<div className="text-center lg:text-left">
<Link
to={frontmatter.mainpitch.link}
className="inline-block py-4 px-8 bg-green text-white uppercase tracking-widest text-sm"
>
{frontmatter.mainpitch.linkText}
</Link>
</div>
</div>
</Fade>
</div>
</div>
</div>
</section>
<section>
<div className="container mx-auto">
<FeaturedStoryHome />
</div>
</section>
<section className="py-4 mt-48" id="tell-a-story">
<TellStory content={frontmatter.submitStoryCta} />
</section>
<BodyBackgroundSection>
{/*
<Line mobile={20} desk={20} />
<section className="py-4 lg:py-12">
<div className="container mx-auto text-center">
<div className="mb-16 lg:mb-36">
<h4 className="uppercase tracking-widest text-green mb-4 max-w-xl mx-auto">
This website has been made possible by the hard work and
generous gifts from our donors.
</h4>
<h2 className="text-3xl lg:text-4xl">Our Donors</h2>
</div>
<ul className="font-lora text-green text-4xl flex justify-around flex-wrap italic leading-tight tracking-wide">
<Fade bottom>
{DonorList.map(donor => {
return (
<li key={donor} className="w-full md:w-1/3 my-3 lg:my-8">
<div
className="max-w-sm p-4 mx-auto"
dangerouslySetInnerHTML={{ __html: donor }}
></div>
</li>
);
})}
</Fade>
</ul>
</div>
</section> */}
<Line mobile={20} desk={48} />
<section id="visit">
<div className="container mx-auto relative">
<div className="w-full lg:w-2/5 mb-32 lg:mb-4">
<Fade left distance="50px">
<div className="p-8">
{/* <h4 className="uppercase tracking-widest text-green mb-4 text-center lg:text-left">
{frontmatter.visit.subheading}
</h4> */}
<h2 className="text-3xl lg:text-4xl mb-6 text-center lg:text-left">
{frontmatter.visit.heading}
</h2>
<div className="mb-8">{frontmatter.visit.description}</div>
<Link
to={frontmatter.visit.link}
className="inline-block py-4 px-8 bg-green text-white uppercase tracking-widest text-sm"
>
{frontmatter.visit.linkText}
</Link>
</div>
</Fade>
</div>
</div>
<div className="lg:float-right w-full" style={{ maxWidth: "1000px" }}>
<Parallax y={[-45, 0]} tagOuter="figure">
<VisitStatue />
</Parallax>
</div>
</section>
<section id="subscribe" style={{ clear: "both" }}>
<div className="container bg-tan p-8 py-24 lg:py-24 mb-12 relative">
<div className="max-w-2xl mx-auto text-center">
<h2 className="text-3xl lg:text-4xl mb-6">
“Children of the Trail” Newsletter
</h2>
<p>
To receive more information about the pioneer children’s
memorial and how you can help it grow, please provide your name
and email address in the form below.
</p>
<SubscribeForm />
</div>
</div>
<div className="container bg-tan p-8 py-24 lg:py-24 float-right relative z-10">
<div className="max-w-2xl mx-auto text-center">
<h2 className="text-3xl lg:text-4xl mb-6">Find Your Family</h2>
<p className="mb-4">
<img
src={FamilySearchLogo}
alt="Family Search Logo"
className="mx-auto"
/>
</p>
<p className="mb-4">{frontmatter.familysearch}</p>
<p>
<a
href="https://www.familysearch.org/campaign/pioneerchildrensmemorial/"
target="_blank"
className="font-bold text-green uppercase tracking-wider"
rel="noopener noreferrer"
>
Explore Family Search →
</a>
</p>
</div>
</div>
<div className="clearfix"></div>
</section>
</BodyBackgroundSection>
</ParallaxProvider>
</div>
);
// IndexPageTemplate.propTypes = {
// heroImage: PropTypes.oneOfType([PropTypes.object, PropTypes.string]),
// title: PropTypes.string,
// heading: PropTypes.string,
// subheading: PropTypes.string,
// mainpitch: PropTypes.object,
// intro: PropTypes.object,
// gallery: PropTypes.array
// };
const LeafEl1 = () => (
<div className="leaf-image">
<div
style={{
position: "absolute",
left: "5%",
top: "-80px",
zIndex: "7",
pointerEvents: "none"
}}
>
<Parallax y={[-10, 80]} tagOuter="figure">
<img src={Leaf1} alt="leaf" />
</Parallax>
</div>
</div>
);
const LeafEl2 = () => (
<div className="leaf-image">
<div
style={{
position: "absolute",
left: "30%",
top: "0px",
zIndex: "6",
pointerEvents: "none"
}}
>
<Parallax y={[-20, 0]} tagOuter="figure">
<img src={Leaf2} alt="leaf" />
</Parallax>
</div>
</div>
);
const LeafEl3 = () => (
<div className="leaf-image">
<div
style={{
position: "absolute",
bottom: "0px",
left: "50%",
marginLeft: "-260px",
zIndex: "5",
pointerEvents: "none"
}}
>
<Parallax y={[0, 20]} tagOuter="figure">
<img src={Leaf3} alt="leaf" />
</Parallax>
</div>
</div>
);
// const DonorList = [
// "Jacob <br />Larsen",
// "Kyle <br />Herriman",
// "Jane <br />Smith",
// "Andrew <br />Farseid",
// "Brigham Young <br />University",
// "Abby <br />Hafen"
// ];
const IndexPage = ({ data }) => {
// console.log(data);
const { frontmatter } = data.markdownRemark;
return (
<Layout>
<IndexPageTemplate frontmatter={frontmatter} />
</Layout>
);
};
// IndexPage.propTypes = {
// data: PropTypes.shape({
// markdownRemark: PropTypes.shape({
// frontmatter: PropTypes.object
// }),
// introBg: PropTypes.object,
// donorsBg: PropTypes.object
// })
// };
export default IndexPage;
export const pageQuery = graphql`
query IndexPageTemplate {
markdownRemark(frontmatter: { templateKey: { eq: "index-page" } }) {
frontmatter {
title
heading
heroImage {
childImageSharp {
fluid(maxWidth: 2048, quality: 100) {
...GatsbyImageSharpFluid_withWebp
}
}
}
intro {
subheading
heading
description
linkText
link
}
mainpitch {
subheading
heading
description
linkText
link
}
submitStoryCta {
heading
description
linkText
link
}
visit {
subheading
heading
description
linkText
link
}
familysearch
gallery {
image {
childImageSharp {
fluid(maxWidth: 600, quality: 90) {
...GatsbyImageSharpFluid_withWebp
}
}
}
}
}
}
}
`;
|
presto --server 127.0.0.1:7060 --catalog kafka --schema default
|
#!/bin/bash
echo "Now Running The CSV Files Comparison"
context_name='ProcessEvent'
csv_file='./contexts/ProcessEvent_Small_Context.csv'
rcf_file='./contexts/Context_ProcessEvent.rcf'
output_scoring_file='./contexts/Soft_Scoring_Of_Context_ProcessEvent.csv'
MinSup='90'
MinConf='90'
Rscript soft_contexts_scoring_shell.r $context_name $csv_file $rcf_file $output_scoring_file $MinSup $MinConf
|
<reponame>zouvier/BlockChain-Voting
import { BN } from "ethereumjs-util";
import { SenderTransactions, SerializedTransaction } from "../PoolState";
/**
* Move as many transactions as possible from the queued list
* to the pending list.
*
* Returns the new lists and the new executable nonce of the sender.
*/
export declare function reorganizeTransactionsLists(pending: SenderTransactions, queued: SenderTransactions, retrieveNonce: (serializedTx: SerializedTransaction) => BN): {
newPending: SenderTransactions;
newQueued: SenderTransactions;
};
//# sourceMappingURL=reorganizeTransactionsLists.d.ts.map |
#!/bin/bash
##
## Docker のインストール
##
## ドッカーのリポジトリを yum に登録
touch /etc/yum.repos.d/docker.repo
cat <<'EOF' > /etc/yum.repos.d/docker.repo
[dockerrepo]
name=Docker Repository
baseurl=https://yum.dockerproject.org/repo/main/centos/$releasever/
enabled=1
gpgcheck=1
gpgkey=https://yum.dockerproject.org/gpg
EOF
## インストールしてバージョン確認
yum install -y docker-engine-1.12.6
docker -v
##
## 設定の配置、更新
cp -p /usr/lib/systemd/system/docker.service /etc/systemd/system/
crudini --set /etc/systemd/system/docker.service Service Environment "\"HTTP_PROXY=${http_proxy}\" \"HTTPS_PROXY=${https_proxy}\" \"NO_PROXY=${no_proxy},/var/run/docker.sock\""
## systemd に設定変更を把握させる
systemctl daemon-reload
##
## 起動
systemctl restart docker
systemctl enable docker
docker info
|
#!/bin/sh
xrandr \
--output LVDS1 --primary --mode 1600x900 --pos 0x0 --rotate normal
|
<gh_stars>1-10
package pl.bliw.emulator.cpu;
import pl.bliw.util.Constants;
import static pl.bliw.util.Binary.checkIfItIsUnsignedByteOrThrow;
import static pl.bliw.util.Binary.checkIfItIsUnsignedWordOrThrow;
/**
* The class Registers represents all of the registers in Chip8.
*/
public class Registers {
/**
* The total number of registers available in Chip8.
*/
private static final int NUM_OF_CPU_REGISTERS = 16;
/**
* An array used for storing values of general purpose registers.
*/
private int[] registers;
/**
* The address register
*/
private int I;
/**
* The program counter register, points to the next instruction to execute.
*/
private int PC;
/**
* Constructs registers, sets program counter to offset where rom starts.
*/
public Registers() {
registers = new int[NUM_OF_CPU_REGISTERS];
PC = Constants.ROM_CODE_OFFSET;
}
/**
* Returns value from given register by enum.
*
* @param register value from AvailableRegisters enum.
* @return value from register.
*/
public int get(AvailableRegisters register) {
return registers[register.id] & 0xFF;
}
/**
* Returns value from given register by index.
* @param index register index.
* @return value from register.
*/
public int get(int index) {
try {
return registers[index] & 0xFF;
} catch (IndexOutOfBoundsException e) {
throw new IllegalArgumentException("Incorrect register index");
}
}
/**
* Sets register to given value.
* @param register value from AvailableRegisters enum.
* @param value new value for register.
*/
public void set(AvailableRegisters register, int value) {
try {
checkIfItIsUnsignedByteOrThrow(String.format("V%d", register.id), value);
registers[register.id] = value;
} catch (IndexOutOfBoundsException e) {
throw new IllegalArgumentException("Incorrect register index");
}
}
/**
* Sets register to given value.
* @param index register index.
* @param value new value for register.
*/
public void set(int index, int value) {
try {
checkIfItIsUnsignedByteOrThrow(String.format("V%d", index), value);
registers[index] = value;
} catch (IndexOutOfBoundsException e) {
throw new IllegalArgumentException("Incorrect register index");
}
}
/**
* @return address register.
*/
public int getI() {
return I & 0xFFFF;
}
/**
* Sets address registers to given value.
* @param I new value for address register.
*/
public void setI(int I) {
checkIfItIsUnsignedWordOrThrow("I register", I);
this.I = I;
}
/**
* @return program counter register.
*/
public int getPC() {
return PC & 0xFFFF;
}
/**
* Sets program counter register to given value.
* @param PC new value for program counter register.
*/
public void setPC(int PC) {
checkIfItIsUnsignedWordOrThrow("PC register", PC);
this.PC = PC;
}
/**
* The method increments program counter register by given value.
* @param value incremental value
*/
public void incrementPC(int value) {
this.PC += value;
}
/**
* Maps registers names to indexes in array.
*/
public enum AvailableRegisters {
V0(0), V1(1), V2(2), V3(3), V4(4), V5(5), V6(6), V7(7), V8(8), V9(9), VA(10), VB(11), VC(12), VD(13), VE(14), VF(15);
/**
* An index in registers array
*/
private int id;
/**
* Constructs new register enum.
* @param id id
*/
AvailableRegisters(int id) {
this.id = id;
}
}
}
|
#!/bin/bash
echo $0 $*
dirmodel=$1
nommodel=`basename $2 .prep`
if [ -d $dirmodel/../prepro/trans_geom ]
then
cd $dirmodel/../prepro/trans_geom
dist=$dirmodel/..
# Pb si etude Xprepro differente de Etude TRUST
# Donc on fixe a ETUDE_TRUST
[ "$ETUDE_TRUST" != "" ] && dist=$ETUDE_TRUST
echo $dist
echo "*****************************"
echo "Copie des fichiers sous $dist"
echo "*****************************"
cp -f ${dirmodel}/$2 $dist
is2D=0 && is2DAxi=0
for file in `ls *_2D*.geom 2>/dev/null`
do
is2D=1 && [ ${file%Axi.geom} != $file ] && is2DAxi=1
echo on recupere $file
cp -f $file $dist/${nommodel}_$file
geoms=$geoms" "${nommodel}_$file
done
if [ $is2D = 0 ]
then
echo on recupere $file
for file in `ls *.geom 2>/dev/null`
do
echo on recupere $file
cp -f $file $dist/${nommodel}_$file
geoms=$geoms" "${nommodel}_$file
done
fi
# On recupere aussi les .geos
for file in `ls *.geos 2>/dev/null`
do
echo on recupere $file
cp -f $file $dist/${nommodel}_$file
done
# on regarde si Def_Suz existe si oui il faut copier Suz_Def
for fileD in `ls Def_Suz_def* 2>/dev/null`
do
file=${fileD#Def_}
echo on recupere $file
cp -f $file $dist/${nommodel}_$file
Suzs=$Suzs" "${nommodel}_$file 2>/dev/null
done
for file in `ls *.med 2>/dev/null`
do
echo on recupere $file
cp -f $file $dist/$file
done
cd ../..
NOMCAS=$nommodel
file=$dist/$NOMCAS.mesh
echo $ECHO_OPTS "# ############################################ #
# File to save and include into your data file #
# either by copy-paste, or by the instruction: #
# lire_fichier name_of_this_file ; #
# It defines dimension, domains and read mesh #
# generated by Xprepro #
# ############################################ #
" > $file
# 2D ou 3D ?
if [ $is2D = 1 ]
then
echo $ECHO_OPTS "dimension 2" >> $file
else
echo $ECHO_OPTS "dimension 3" >> $file
fi
# Axi 2D ?
[ $is2DAxi = 1 ] && echo $ECHO_OPTS "Bidim_axi" >> $file
[ ${#geoms} = 0 ] && $Xterm $Petite_geometrie -title "ERROR" -e "echo Files containing meshes not generated ;read"
n=0
for geom in $geoms
do
let n=$n+1
dom=dom_pb$n && [ $is2D = 1 ] && dom=dom_$n
echo $ECHO_OPTS "export domaine $dom" >> $file
done
echo $ECHO_OPTS "# DEBUT MAILLAGE #" >> $file
n=0
for geom in $geoms
do
let n=$n+1
dom=dom_pb$n && [ $is2D = 1 ] && dom=dom_$n
echo $ECHO_OPTS "lire_fichier $dom $geom" >> $file
done
n=0
# Ajout d'un test pour prevenir que les Sous Zones
# restent 3D et ne sont pas coupees par 3Dto2D (a developper?)
if [ $is2D = 1 ]
then
echo $ECHO_OPTS "# FIN MAILLAGE #" >> $file
echo "# Remarque:Les Sous Zones restent 3D et ne sont pas encore coupees par 3Dto2D. #\n" >> $file
else
# Les sous zones
cat prepro/trans_geom/Def_Suz_def* >> $file
echo $ECHO_OPTS "# FIN MAILLAGE #\n" >> $file
fi
echo $ECHO_OPTS "# DEBUT DECOUPAGE" >> $file
n=0
for geom in $geoms
do
let n=$n+1
dom=dom_pb$n && [ $is2D = 1 ] && dom=dom_$n
echo $ECHO_OPTS "Decouper $dom
{
Partitionneur metis { Nb_parts 2 }
Larg_joint 1
Nom_Zones DOM$n
}" >> $file
done
echo $ECHO_OPTS "Fin
FIN DECOUPAGE #" >> $file
echo $ECHO_OPTS "\n# DEBUT LECTURE" >> $file
n=0
for geom in $geoms
do
let n=$n+1
dom=dom_pb$n && [ $is2D = 1 ] && dom=dom_$n
echo $ECHO_OPTS "Scatter DOM"$n".Zones "$dom >> $file
done
n=0
# Ajout les Sous Zones uniquement en 3D
if [ $is2D = 0 ]
then
# La ligne suivante n'est pas suffisante pour le //
cat prepro/trans_geom/Def_Suz_def* >> $file
fi
echo $ECHO_OPTS "1,$ s? Suz_def_pb? "$nommodel"_Suz_def_pb?g\nw" | ed $file 1>/dev/null 2>&1
echo $ECHO_OPTS "FIN LECTURE #" >> $file
echo $ECHO_OPTS "\n#" >> $file
$TRUST_Awk '/Lecture du fichier/ {print "---------------------------\nBOUNDARY NAMES OF "$NF":\n---------------------------"}' prepro/trans_geom/TRUST.log >> $file
if (( `grep -i "bord conserve" prepro/trans_geom/TRUST.log | wc -l` == 0 ))
then
$TRUST_Awk '/commence a la face/ && !/coupe_2D/ && !/coupe_Axi2D/ {print $3}' prepro/trans_geom/TRUST.log | sort -u >> $file
else
$TRUST_Awk '/bord conserve/ && !/coupe_2D/ && !/coupe_Axi2D/ {print $3}' prepro/trans_geom/TRUST.log | sort -u >> $file
fi
echo $ECHO_OPTS "#" >> $file
$TRUST_EDITOR $file
cd $dirmodel
`dirname $0`/nettoie ${dirmodel}
else
echo prepro must be run before!!!
fi
|
<filename>server/mqtt/mqtt_impl.go
package mqtt
import (
"crypto/md5"
"encoding/hex"
"fmt"
"strings"
"time"
g "github.com/chryscloud/video-edge-ai-proxy/globals"
"github.com/chryscloud/video-edge-ai-proxy/models"
"github.com/chryscloud/video-edge-ai-proxy/utils"
badger "github.com/dgraph-io/badger/v2"
"github.com/docker/docker/api/types/events"
)
const (
ProcessActionDie = "die"
ProcessActionStart = "start"
)
type ProcessState struct {
Time int64 // seconds since epoch
DeviceID string // deviceID
Action string // process action from docker events
}
// Check settings and also if MQTT initial connection has been made
func (mqtt *mqttManager) getMQTTSettings() (*models.Settings, error) {
// check settings if they exist
settings, err := mqtt.settingsService.Get()
if err != nil {
if err == badger.ErrKeyNotFound {
return nil, ErrNoMQTTSettings
}
g.Log.Error("failed to retrieve edge settings", err)
return nil, err
}
if settings.ProjectID == "" || settings.Region == "" || settings.GatewayID == "" || settings.RegistryID == "" || settings.PrivateRSAKey == nil {
return nil, ErrNoMQTTSettings
}
return settings, nil
}
// config and commans subscribers
func (mqtt *mqttManager) gatewaySubscribers() error {
// wait for connection to be opened and propagate
errBind := mqtt.bindAllDevices()
if errBind != nil {
g.Log.Error("failed to report bind devices", errBind)
return errBind
}
errCfg := mqtt.subscribeToConfig(mqtt.gatewayID)
if errCfg != nil {
g.Log.Error("failed to subscribe to mqtt config subscription", mqtt.gatewayID, errCfg)
return errCfg
}
errCmd := mqtt.subscribeToCommands(mqtt.gatewayID)
if errCmd != nil {
g.Log.Error("failed to subscribe to mqtt commands", mqtt.gatewayID, errCmd)
return errCmd
}
return nil
}
// detecting device state change and reporting if changes occured
func (mqtt *mqttManager) changedDeviceState(gatewayID string, message events.Message) error {
actor := message.Actor
// fairly complicated logic to handle container restarts and report only true changes, not attempty of restarting the container
if deviceID, ok := actor.Attributes["name"]; ok {
mqtt.mutex.Lock()
defer mqtt.mutex.Unlock()
var history []events.Message
if val, ok := mqtt.processEvents.Load(deviceID); ok {
history = val.([]events.Message)
if len(history) >= 10 {
startIndex := len(history) - 10
history = history[startIndex:]
}
history = append(history, message)
} else {
history = []events.Message{message}
}
mqtt.processEvents.Store(deviceID, history)
// check last value after 5 seconds (avoiding the possible burst of events for a specific container)
go func(deviceID string) {
time.Sleep(time.Second * 5)
if val, ok := mqtt.processEvents.Load(deviceID); ok {
history := val.([]events.Message)
last := history[len(history)-1]
// for _, last := range history {
if lastNotified, ok := mqtt.lastProcessEventNotified.Load(deviceID); ok {
if mqtt.hasDeviceDifferences(lastNotified.(events.Message), last) {
stat := mqtt.deviceActionToStatus(last.Action)
rErr := mqtt.reportDeviceStateChange(deviceID, stat)
if rErr != nil {
g.Log.Error("failed to report device state change", rErr)
return
}
g.Log.Info("device status reported ", stat, deviceID)
}
} else {
mqtt.lastProcessEventNotified.Store(deviceID, last)
stat := mqtt.deviceActionToStatus(last.Action)
rErr := mqtt.reportDeviceStateChange(deviceID, stat)
if rErr != nil {
g.Log.Error("failed to report device state change", rErr)
return
}
g.Log.Info("device with no history yet; status reported ", stat, deviceID)
}
}
}(deviceID)
}
return nil
}
// converting docker event name to status
func (mqtt *mqttManager) deviceActionToStatus(lastAction string) string {
stat := models.ProcessStatusRestarting
switch action := lastAction; action {
case ProcessActionDie:
stat = models.ProcessStatusRestarting
case ProcessActionStart:
stat = models.ProcessStatusRunning
default:
stat = lastAction
}
return stat
}
func (mqtt *mqttManager) reportDeviceStateChange(deviceID string, status string) error {
tp := models.MQTTProcessType(models.ProcessTypeUnknown)
var imageTag string
var rtmpEndpoint string
var rtspEndpoint string
device, err := mqtt.processService.Info(deviceID)
if err != nil {
// check if application (prevent reporting of events happening not related to chrysalis)
if err == models.ErrProcessNotFoundDatastore || err == models.ErrProcessNotFound {
proc, pErr := mqtt.appService.Info(deviceID)
if pErr != nil {
if pErr == models.ErrProcessNotFoundDatastore || pErr == models.ErrProcessNotFound {
return nil
}
g.Log.Error("failed to find application for reporting state change", pErr)
return pErr
}
tp = models.MQTTProcessType(models.ProcessTypeApplication)
imageTag = utils.ImageTagPartToString(proc.DockerHubUser, proc.DockerhubRepository, proc.DockerHubVersion)
} else {
g.Log.Error("failed to retrieve device info for reporting state change", err)
return err
}
} else {
tp = models.MQTTProcessType(models.ProcessTypeRTSP)
rtmpEndpoint = device.RTMPEndpoint
rtspEndpoint = device.RTSPEndpoint
}
sett, err := mqtt.settingsService.Get()
if err != nil {
g.Log.Error("failed to retrieve settings", err)
return err
}
mqttMsg := &models.MQTTMessage{
DeviceID: deviceID,
ImageTag: imageTag,
RTMPEndpoint: rtmpEndpoint,
RTSPConnection: rtspEndpoint,
State: status,
Created: time.Now().UTC().Unix() * 1000,
ProcessOperation: models.MQTTProcessOperation(models.DeviceOperationState),
ProcessType: tp,
}
pErr := utils.PublishMonitoringTelemetry(sett.GatewayID, (*mqtt.client), mqttMsg)
if pErr != nil {
g.Log.Error("Failed to publish monitoring telemetry", pErr)
return pErr
}
return nil
}
// GatewayState reporting gateway state to ChrysalisCloud (the way for entire gateway to check in)
func (mqtt *mqttManager) gatewayState(gatewayID string) error {
// report state to coreiot (this can be removed also)
gatewayStateTopic := fmt.Sprintf("/devices/%s/state", gatewayID)
gatewayInitPayload := fmt.Sprintf("%d", time.Now().Unix())
if token := (*mqtt.client).Publish(gatewayStateTopic, 1, false, gatewayInitPayload); token.Wait() && token.Error() != nil {
g.Log.Error("failed to publish initial gateway payload", token.Error())
return token.Error()
}
// report state to chrysalis cloud
g.Log.Info("Gateway state reported", time.Now())
mqttMsg := &models.MQTTMessage{
Created: time.Now().UTC().Unix() * 1000,
ProcessOperation: models.MQTTProcessOperation(models.GatewayOperationCheckIn),
}
pErr := utils.PublishMonitoringTelemetry(gatewayID, (*mqtt.client), mqttMsg)
if pErr != nil {
g.Log.Error("Failed to publish monitoring telemetry", pErr)
return pErr
}
return nil
}
// subscribing to mqtt config notifications from ChrysalisCloud
func (mqtt *mqttManager) subscribeToConfig(gatewayID string) error {
config := fmt.Sprintf("/devices/%s/config", gatewayID)
if token := (*mqtt.client).Subscribe(config, 1, mqtt.configHandler); token.Wait() && token.Error() != nil { // using default handler
g.Log.Error("failed to subscribe to ", config, token.Error())
return token.Error()
}
g.Log.Info("Subscribed to mqtt config topic")
return nil
}
// subscribing to mqtt commands
func (mqtt *mqttManager) subscribeToCommands(gatewayID string) error {
comm := fmt.Sprintf("/devices/%s/commands/#", gatewayID)
if token := (*mqtt.client).Subscribe(comm, 1, nil); token.Wait() && token.Error() != nil {
g.Log.Error("Failed to subscribe to mqtt commands", comm, token.Error())
return token.Error()
}
return nil
}
// bind single device to this gateway
func (mqtt *mqttManager) bindDevice(deviceID string, processType models.MQTTProcessType) error {
device, err := mqtt.processService.Info(deviceID)
if err != nil {
return err
}
mqttMsg := &models.MQTTMessage{
DeviceID: device.Name,
ImageTag: device.ImageTag,
RTMPEndpoint: device.RTMPEndpoint,
RTSPConnection: device.RTSPEndpoint,
State: device.State.Status,
Created: time.Now().UTC().Unix() * 1000,
ProcessOperation: models.MQTTProcessOperation(models.DeviceOperationAdd),
ProcessType: processType,
}
attErr := utils.AttachDeviceToGateway(mqtt.gatewayID, (*mqtt.client), mqttMsg)
if attErr != nil {
g.Log.Error("failed to attach ", device.Name, "to this gateway", attErr)
}
return nil
}
// unbiding single device from this gateway
func (mqtt *mqttManager) unbindDevice(deviceID string, processType models.MQTTProcessType) error {
set, err := mqtt.settingsService.Get()
if err != nil {
return err
}
mqttMsg := &models.MQTTMessage{
DeviceID: deviceID,
Created: time.Now().UTC().Unix() * 1000,
ProcessOperation: models.MQTTProcessOperation(models.DeviceOperationRemove),
ProcessType: processType,
}
attErr := utils.DetachGatewayDevice(set.GatewayID, (*mqtt.client), mqttMsg)
if attErr != nil {
g.Log.Error("failed to dettach ", deviceID, "from this gateway", attErr)
}
return nil
}
// Lists all processes (running ones) and binds them to this gateway
func (mqtt *mqttManager) bindAllDevices() error {
all, err := mqtt.processService.List()
if err != nil {
g.Log.Error("failed to list all processes", err)
return err
}
var hasErr error
for _, device := range all {
processType := models.ProcessTypeUnknown
if strings.Contains(device.ImageTag, "chrysedgeproxy") {
processType = models.ProcessTypeRTSP
}
mqttMsg := &models.MQTTMessage{
DeviceID: device.Name,
ImageTag: device.ImageTag,
Created: device.Created,
State: device.State.Status,
ProcessOperation: models.MQTTProcessOperation(models.DeviceOperationAdd),
ProcessType: models.MQTTProcessType(processType),
}
attErr := utils.AttachDeviceToGateway(mqtt.gatewayID, (*mqtt.client), mqttMsg)
if attErr != nil {
g.Log.Error("failed to attach device", device.Name, attErr)
hasErr = attErr
}
}
return hasErr
}
// hasDeviceDifferences checks if the previously reported device has changed (only important fields check):
// status, state, rtsp link, rtmp link, containerID
func (mqtt *mqttManager) hasDeviceDifferences(stored events.Message, current events.Message) bool {
h1 := extractDeviceSignature(stored)
h2 := extractDeviceSignature(current)
return h1 != h2
}
// creates StreamProcess signature for it's main fields
func extractDeviceSignature(processMsg events.Message) string {
payload := processMsg.Status + processMsg.Actor.ID
h := md5.New()
h.Write([]byte(payload))
return hex.EncodeToString(h.Sum(nil))
}
|
sudo yum install -y epel-release
sudo yum install -y ansible
cd /vagrant
ansible-playbook playbook.yml |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.