text stringlengths 1 1.05M |
|---|
package com.codefinity.microcontinuum.xservice;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.codefinity.microcontinuum.xservice.messaging.Sender;
import com.codefinity.microcontinuum.xservice.remotecalls.YService;
import com.codefinity.microcontinuum.xservice.remotecalls.dto.User;
@RestController
//Refreshes the property on refresh command
@RefreshScope
public class TestController {
private static final Logger logger = LoggerFactory.getLogger(TestController.class);
@Value("${x-microservice.config-test-value}")
private String testProperty;
@Value("${x-microservice.config-busrefresh-test-value}")
private String testBusProperty;
@Autowired
private YService yService;
@Autowired
private Sender sender;
@RequestMapping(value = "/hello")
public String testService() {
return "X-MicroService Working";
}
@RequestMapping(value = "/configservertest")
public String testProperty() {
return testProperty;
}
@RequestMapping(value = "/configserverbusrefreshtest")
public String configServerBusRefreshTest() {
return testBusProperty;
}
@RequestMapping(value = "/feigntest/{id}")
public User testLoadBalancing(@PathVariable("id") Long id) {
return yService.getUser(id);
}
/*@RequestMapping(value = "/testloadbalancing/{id}")
public User testLoadBalancing(@PathVariable("id") Long id) {
return yService.getUser(id);
}*/
@RequestMapping(value = "/cloudstreamingtest")
public String testSpringCloudStreaming() {
logger.info("Controller: Sleuth Message at X-Microservice for RabbitMQ");
sender.send("Hello !");
/*
* Map<String, Object> message = new HashMap<String, Object>();
* message.put("MESSAGE1", "message-1"); message.put("MESSAGE2", "message-2");
* message.put("MESSAGE3", "message-3");
*
* sender.send(message);\
*/
return "Message Sent";
}
@RequestMapping(value = "/elklogtest")
public String elkLoTest() {
logger.info("This is a log message");
return "Message Logged";
}
@RequestMapping(value = "/sleuthtest")
public String sleuthTest() {
logger.info("Sleuth Message at X-Microservice");
String message = yService.sleuthTest("Sleuth Test Message");
logger.info("Sleuth Message End");
return message;
}
}
|
<gh_stars>1-10
#include "wifi-telemetry/wpa/wpa_event_dpp_conf_received.hpp"
WpaEventDppConfigurationReceived::WpaEventDppConfigurationReceived(void) :
WpaEvent(WpaEventType::DppConfigurationReceived)
{}
|
<filename>poker-hands/avr-c/test/test_harness.py
import serial
import struct
card_face = {
2 : '2',
3 : '3',
4 : '4',
5 : '5',
6 : '6',
7 : '7',
8 : '8',
9 : '9',
10 : 'T',
11 : 'J',
12 : 'Q',
13 : 'K',
14 : 'A'
}
card_suit = {
0 : 'c',
1 : 'd',
2 : 'h',
3 : 's'
}
hand_type = {
0 : 'none',
1 : 'high_card',
2 : 'pair',
3 : 'two_pair',
4 : 'three_of_a_kind',
5 : 'straight',
6 : 'flush',
7 : 'full_house',
8 : 'four_of_a_kind',
9 : 'straight_flush'
}
rank_mask = ord(b'\x0f')
suit_mask = ord(b'\x30')
wild_mask = ord(b'\x80') # not used for now
hands = [
{
'name' : 'high_card',
'hand' : b'\x03\x15\x09\x2a\x1c',
'expected': b'\x1c\x2a\x09\x15\x03\x01'
},
{
'name' : 'pair',
'hand' : b'\x03\x13\x09\x2a\x1c',
'expected': b'\x03\x13\x1c\x2a\x09\x02'
},
{
'name' : 'two_pair',
'hand' : b'\x03\x13\x29\x39\x1c',
'expected': b'\x29\x39\x03\x13\x1c\x03'
},
{
'name' : 'three_of_a_kind',
'hand' : b'\x1c\x13\x29\x39\x19',
'expected': b'\x29\x39\x19\x1c\x13\x04'
},
{
'name' : 'straight',
'hand' : b'\x32\x05\x23\x34\x16',
'expected': b'\x16\x05\x34\x23\x32\x05'
},
{
'name' : 'straight_ace_low',
'hand' : b'\x32\x05\x23\x34\x1e',
'expected': b'\x05\x34\x23\x32\x1e\x05'
},
{
'name' : 'flush',
'hand' : b'\x23\x25\x29\x2a\x2c',
'expected': b'\x2c\x2a\x29\x25\x23\x06'
},
{
'name' : 'full_house',
'hand' : b'\x13\x03\x29\x39\x19',
'expected': b'\x29\x39\x19\x13\x03\x07'
},
{
'name' : 'four_of_a_kind',
'hand' : b'\x1c\x09\x29\x39\x19',
'expected': b'\x09\x29\x39\x19\x1c\x08'
},
{
'name' : 'straight_flush',
'hand' : b'\x32\x35\x33\x34\x36',
'expected': b'\x36\x35\x34\x33\x32\x09'
},
{
'name' : 'straight_flush_ace_low',
'hand' : b'\x32\x35\x33\x34\x3e',
'expected': b'\x35\x34\x33\x32\x3e\x09'
}
]
def get_rank(c):
return c & rank_mask
def get_suit(c):
return (c & suit_mask) >> 4
def get_wild(c): # not used for now
return (c & wild_mask) >> 7
def check_for_ack(b):
if b != b'\xfe':
raise Exception('Ranker 4000 did not send back ack - timed out')
def print_cards(output):
for c in output:
print('{0}{1}'.format(card_face[get_rank(c)], card_suit[get_suit(c)]))
ser = serial.Serial('/dev/ttyUSB0', 115200, timeout=3)
numtest = 0
numpass = 0
for test in hands:
numtest += 1
print('running ' + test['name'])
print('sent ready flag\n')
ser.write(b'\xff') # send ready flag
check_for_ack(ser.read())
print('ranker 4000 responded with ack\n')
hand = test['hand']
expected = test['expected']
print('sending:')
print_cards(hand)
print()
print('expected output:')
print_cards(expected[:5])
print(hand_type[expected[-1]])
print()
for c in hand:
ba = bytearray(1)
ba[0] = c
ser.write(ba)
check_for_ack(ser.read())
check_for_ack(ser.read())
print('ranker 4000 responded with ack\n')
output = ser.read(6)
print('received')
print_cards(output[:5])
print(hand_type[output[-1]])
print()
check_for_ack(ser.read())
print('ranker 4000 responded with ack\n')
print('verifying output')
if expected == output:
numpass += 1
print('output matches expected output')
else:
print('output does not match expected output')
print()
print('-------------------------------------------')
print()
if numpass == numtest:
print('all tests passed')
else:
print('failed test cases')
|
#!/bin/bash
cd /home/hnoorazar/NASA/02_remove_outliers_n_jumps/01_intersect_remove_jumps_JFD/qsubs/
for runname in {1..10}
do
qsub ./q_JFD$runname.sh
done
|
<gh_stars>0
var elixir = require('laravel-elixir');
/*
|--------------------------------------------------------------------------
| Elixir Asset Management
|--------------------------------------------------------------------------
|
| Elixir provides a clean, fluent API for defining some basic Gulp tasks
| for your Laravel application. By default, we are compiling the Less
| file for our application, as well as publishing vendor resources.
|
*/
elixir(function(mix) {
mix.less('app.less');
mix.copy('vendor/bower_components/startbootstrap-clean-blog/fonts', 'public/fonts')
mix.copy('vendor/bower_components/startbootstrap-clean-blog/css/bootstrap.min.css', 'public/css/bootstrap.min.css')
mix.copy('vendor/bower_components/startbootstrap-clean-blog/css/clean-blog.min.css', 'public/css/clean-blog.min.css')
mix.copy('vendor/bower_components/startbootstrap-clean-blog/js/jquery.js', 'public/js/jquery.js')
mix.copy('vendor/bower_components/startbootstrap-clean-blog/js/bootstrap.min.js', 'public/js/bootstrap.min.js')
mix.copy('vendor/bower_components/startbootstrap-clean-blog/js/clean-blog.min.js', 'public/js/clean-blog.min.js')
mix.scripts([
'app.js'
], 'public/js/app.js');
mix.version(['public/js/app.js']);
});
|
#!/bin/sh
set -e
mkdir -p "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
> "$RESOURCES_TO_COPY"
install_resource()
{
case $1 in
*.storyboard)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile ${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .storyboard`.storyboardc ${PODS_ROOT}/$1 --sdk ${SDKROOT}"
ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .storyboard`.storyboardc" "${PODS_ROOT}/$1" --sdk "${SDKROOT}"
;;
*.xib)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile ${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .xib`.nib ${PODS_ROOT}/$1 --sdk ${SDKROOT}"
ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .xib`.nib" "${PODS_ROOT}/$1" --sdk "${SDKROOT}"
;;
*.framework)
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
echo "rsync -av ${PODS_ROOT}/$1 ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
rsync -av "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
;;
*.xcdatamodel)
echo "xcrun momc \"${PODS_ROOT}/$1\" \"${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1"`.mom\""
xcrun momc "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcdatamodel`.mom"
;;
*.xcdatamodeld)
echo "xcrun momc \"${PODS_ROOT}/$1\" \"${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcdatamodeld`.momd\""
xcrun momc "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcdatamodeld`.momd"
;;
*.xcmappingmodel)
echo "xcrun mapc \"${PODS_ROOT}/$1\" \"${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcmappingmodel`.cdm\""
xcrun mapc "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcmappingmodel`.cdm"
;;
*.xcassets)
;;
/*)
echo "$1"
echo "$1" >> "$RESOURCES_TO_COPY"
;;
*)
echo "${PODS_ROOT}/$1"
echo "${PODS_ROOT}/$1" >> "$RESOURCES_TO_COPY"
;;
esac
}
install_resource "PubNub/iOS/iPadDemoApp/pubnub/libs/PubNub/Resources/PNLoggerSymbols.plist"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
if [[ "${ACTION}" == "install" ]]; then
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
rm -f "$RESOURCES_TO_COPY"
if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ `find . -name '*.xcassets' | wc -l` -ne 0 ]
then
case "${TARGETED_DEVICE_FAMILY}" in
1,2)
TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
;;
1)
TARGET_DEVICE_ARGS="--target-device iphone"
;;
2)
TARGET_DEVICE_ARGS="--target-device ipad"
;;
*)
TARGET_DEVICE_ARGS="--target-device mac"
;;
esac
find "${PWD}" -name "*.xcassets" -print0 | xargs -0 actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${IPHONEOS_DEPLOYMENT_TARGET}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
|
import { getBuffers } from "@/connector/buffers"
import { colorize, colorizeFile } from "@/fzf/syntax/colorize"
import { globalVariableSelector } from "@/module/selector/vim-variable"
import { readFile } from "@/system/file"
import type { FzfCommandDefinitionDefaultOption, Resource, ResourceLines, SourceFuncArgs } from "@/type"
export const bufferLines = async (_args: SourceFuncArgs): Promise<Resource> => {
const buffers = await getBuffers()
const lines = buffers.reduce((acc: ResourceLines, cur) => {
const fileLines = readFile(cur.fileName)
.split("\n")
.map((line, lineIndex) => ({
lineNumber: lineIndex + 1,
text: line,
}))
.slice(0, -1)
const resourceLines: ResourceLines = fileLines.map((line) => ({
data: {
command: "FzfPreviewBufferLines",
type: "line",
file: cur.fileName,
text: line.text,
lineNumber: line.lineNumber,
},
displayText: `${colorizeFile(cur.fileName)}:${colorize(line.lineNumber.toString(), "green")}:${line.text}`,
}))
return [...acc, ...resourceLines]
}, [])
return {
type: "json",
lines,
}
}
const previewCommand = () => {
const grepPreviewCommand = globalVariableSelector("fzfPreviewGrepPreviewCmd") as string
return `"${grepPreviewCommand} {}"`
}
export const bufferLinesDefaultOptions = (): FzfCommandDefinitionDefaultOption => ({
"--prompt": '"BufferLines> "',
"--multi": true,
"--preview": previewCommand(),
})
|
<reponame>survivejs/lunr-demo
const fs = require('fs');
const path = require('path');
const lunr = require('lunr');
const removeMarkdown = require('remove-markdown');
main();
function main() {
const readmePath = path.resolve(__dirname, '..', 'README.md');
console.log(JSON.stringify(generateIndex(
fs.readFileSync(readmePath, {
encoding: 'utf-8'
})
)));
}
function generateIndex(file) {
// Skip index and empty lines.
const lines = file.split('\n').slice(1).filter(id).map(removeMarkdown);
const index = lunr(function() {
this.ref('id');
this.field('line');
lines.forEach((line, i) => {
this.add({
id: i, // Line number
line: line
});
});
});
return {
lines: lines,
index: index
};
}
function id(a) {return a;};
|
#!/bin/bash
# MIT License
#
# Copyright (C) 2019-2020, Entynetproject. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
N="\033[1;37m"
C="\033[0m"
CE="\033[0m"
RS="\033[1;31m"
YS="\033[1;33m"
BS="\033[1;34m"
GNS="\033[1;32m"
R="\033[1;31m"
WS="\033[0m"
printf '\033]2;install.sh\a'
if [[ $EUID -ne 0 ]]
then
sleep 1
echo -e ""$RS"[-]"$WS" This script must be run as root!"$CE"" 1>&2
sleep 1
exit
fi
if [[ -d ~/quack ]]
then
sleep 0
else
cd ~
{
git clone https://github.com/entynetproject/quack.git
} &> /dev/null
fi
sleep 0.5
clear
sleep 0.5
echo
cd ~/quack
cat banner/banner.txt
echo
sleep 1
echo -e ""$BS"[*]"$WS" Installing dependencies..."$CE""
sleep 1
{
pkg update
pkg -y install git
pkg -y install python
apt-get update
apt-get -y install git
apt-get -y install python3
apt-get -y install python3-pip
apk update
apk add git
apk add python3
apk add py3-pip
pacman -Sy
pacman -S --noconfirm git
pacman -S --noconfirm python3
pacman -S --noconfirm python3-pip
zypper refresh
zypper install -y git
zypper install -y python3
zypper install -y python3-pip
yum -y install git
yum -y install python3
yum -y install python3-pip
dnf -y install git
dnf -y install python3
dnf -y install python3-pip
eopkg update-repo
eopkg -y install git
eopkg -y install python3
eopkg -y install pip
xbps-install -S
xbps-install -y git
xbps-install -y python3
xbps-install -y python3-pip
} &> /dev/null
{
python3 -m pip install setuptools
python3 -m pip install -r requirements.txt
} &> /dev/null
{
cd ~/quack/bin
cp quack /usr/local/bin
chmod +x /usr/local/bin/quack
cp quack /bin
chmod +x /bin/quack
cp quack /data/data/com.termux/files/usr/bin
chmod +x /data/data/com.termux/files/usr/bin/quack
} &> /dev/null
sleep 1
echo -e ""$GNS"[+]"$WS" Successfully installed!"$CE""
sleep 1
|
import { Injectable } from '@angular/core';
import { CanActivate } from '@angular/router';
import { LeagueService } from '../services';
import { Observable } from 'rxjs/Rx';
import { ActivatedRouteSnapshot, RouterStateSnapshot } from '@angular/router';
@Injectable()
export class EditLeagueGuard implements CanActivate {
constructor(
private leagueService: LeagueService)
{}
canActivate(destination: ActivatedRouteSnapshot,
state: RouterStateSnapshot) : Observable<boolean> {
return this.leagueService.isOwner(destination.params.leagueId);
}
}
export const EDIT_LEAGUE_GUARD_PROVIDERS: Array<any> = [
EditLeagueGuard
];
|
extern crate rand;
fn main() {
let mut rng = rand::thread_rng();
let username: String = format!("user_{}", rng.gen_range(0, 10000));
let password: String = format!("pass_{}", rng.gen_range(10000, 20000));
println!("Username: {}", username);
println!("Password: {}", password);
} |
import * as React from 'react';
import { render } from '@testing-library/react';
import { ModalContent } from '../ModalContent';
const modalContentProps = {
boxId: 'boxId',
labelId: 'labelId',
descriptorId: 'descriptorId'
};
test('Modal Content Test only body', () => {
const { asFragment } = render(
<ModalContent title="Test Modal Content title" isOpen {...modalContentProps}>
This is a ModalBox header
</ModalContent>
);
expect(asFragment()).toMatchSnapshot();
});
test('Modal Content Test isOpen', () => {
const { asFragment } = render(
<ModalContent title="Test Modal Content title" isOpen {...modalContentProps}>
This is a ModalBox header
</ModalContent>
);
expect(asFragment()).toMatchSnapshot();
});
test('Modal Content Test description', () => {
const { asFragment } = render(
<ModalContent
title="Test Modal Content title"
isOpen
description="This is a test description."
{...modalContentProps}
>
This is a ModalBox header
</ModalContent>
);
expect(asFragment()).toMatchSnapshot();
});
test('Modal Content Test with footer', () => {
const { asFragment } = render(
<ModalContent title="Test Modal Content title" isOpen actions={['Testing']} {...modalContentProps}>
This is a ModalBox header
</ModalContent>
);
expect(asFragment()).toMatchSnapshot();
});
test('Modal Content test without footer', () => {
const { asFragment } = render(
<ModalContent title="Test Modal Content title" isOpen {...modalContentProps}>
This is a ModalBox header
</ModalContent>
);
expect(asFragment()).toMatchSnapshot();
});
test('Modal Content Test with onclose', () => {
const { asFragment } = render(
<ModalContent
title="Test Modal Content title"
actions={['Testing footer']}
variant="large"
onClose={() => undefined}
isOpen
{...modalContentProps}
>
This is a ModalBox header
</ModalContent>
);
expect(asFragment()).toMatchSnapshot();
});
test('Modal Test with custom header', () => {
const header = <span id="test-custom-header">TEST</span>;
const { asFragment } = render(
<ModalContent
header={header}
title="test-custom-header-modal"
actions={['Testing footer']}
variant="large"
onClose={() => undefined}
isOpen
{...modalContentProps}
>
This is a ModalBox header
</ModalContent>
);
expect(asFragment()).toMatchSnapshot();
});
test('Modal Test with custom footer', () => {
const footer = <span id="test-custom-footer">TEST</span>;
const { asFragment } = render(
<ModalContent
footer={footer}
title="Test Modal Custom Footer"
variant="large"
onClose={() => undefined}
isOpen
{...modalContentProps}
>
This is a ModalBox header
</ModalContent>
);
expect(asFragment()).toMatchSnapshot();
});
|
<gh_stars>100-1000
package fetch
import (
"context"
"fmt"
"net/http"
"os"
"sync"
"time"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
)
// Config provides basic configuration
type Config struct {
Host string
Version string
}
// HTMLServer represents the web service that serves up HTML
type HTMLServer struct {
server *http.Server
wg sync.WaitGroup
}
// Start func launches Parsing service
func Start(cfg Config) *HTMLServer {
// Setup Context
ctx := context.Background()
_, cancel := context.WithCancel(ctx)
defer cancel()
encoderCfg := zapcore.EncoderConfig{
TimeKey: "ts",
MessageKey: "msg",
LevelKey: "level",
NameKey: "fetcher",
EncodeLevel: zapcore.CapitalColorLevelEncoder,
EncodeTime: zapcore.ISO8601TimeEncoder,
EncodeDuration: zapcore.StringDurationEncoder,
EncodeName: zapcore.FullNameEncoder,
}
core := zapcore.NewCore(zapcore.NewConsoleEncoder(encoderCfg), os.Stdout, zapcore.DebugLevel)
logger := zap.New(core)
defer logger.Sync() // flushes buffer, if any
var svc Service
svc = FetchService{}
//svc = RobotsTxtMiddleware()(svc)
//svc = LoggingMiddleware(logger)(svc)
endpoints := endpoints{
fetchEndpoint: makeFetchEndpoint(svc),
}
r := newHttpHandler(ctx, endpoints)
// Create the HTML Server
htmlServer := HTMLServer{
server: &http.Server{
Addr: cfg.Host,
Handler: r,
MaxHeaderBytes: 1 << 20,
},
}
// Add to the WaitGroup for the listener goroutine
htmlServer.wg.Add(1)
go func() {
fmt.Printf("\n%s\nStarting ...%s",
cfg.Version,
htmlServer.server.Addr,
)
htmlServer.server.ListenAndServe()
htmlServer.wg.Done()
}()
return &htmlServer
}
// Stop turns off the HTML Server
func (htmlServer *HTMLServer) Stop() error {
// Create a context to attempt a graceful 5 second shutdown.
const timeout = 5 * time.Second
ctx, cancel := context.WithTimeout(context.Background(), timeout)
defer cancel()
fmt.Printf("\nFetch Server : Service stopping\n")
// Attempt the graceful shutdown by closing the listener
// and completing all inflight requests
if err := htmlServer.server.Shutdown(ctx); err != nil {
// Looks like we timed out on the graceful shutdown. Force close.
if err := htmlServer.server.Close(); err != nil {
fmt.Printf("\nFetch Server : Service stopping : Error=%v\n", err)
return err
}
}
// Wait for the listener to report that it is closed.
htmlServer.wg.Wait()
fmt.Printf("\nFetch Server : Stopped\n")
return nil
}
|
<reponame>mohamedpop871/Telemetry-System
/*
* Timer1.c
*
* Created: 10/14/2019 11:34:16 PM
* Author: Mohamed_Hassanin
*/
#include "Timer1.h"
void
Timer1_Init(void)
{
TIMSK1 |= 1 << OCIE1A; //interrupt enable at overflow
TCCR1B |= 1 << WGM12;
OCR1A = 2000;
TCCR1B |= 1 << CS10 | 1 << CS12;
}
ISR
(TIMER1_COMPA_vect)
{
//enable nested interrupts for Hall sensor
sei();
if (Send == 1)
{
After_Send = 1;
//byte 0x1B means the end of the frame
// so don't put it as a data and put it as a combination
send_RPM(RPM); //Sending the RPM value
send_KMPH(KMPH); //Sending the KMPH value
send_Current(current);//Sending the Current value
USART_TX(0x1B); //Send delimiter -> End of Package
}
}
void
send_RPM(uint32_t RPM)
{
uint8_t RPMDig3 = (RPM / 100) % 10; //Getting Digit X3
USART_TX( RPM % 10 + 0x30); //Send X0
USART_TX( (RPM / 10) - RPMDig3 * 10 + 0x30); //Send X1
USART_TX( RPMDig3 + 0x30); //Send X2
}
void
send_KMPH(float KMPH)
{
float KMPHDig10 = (KMPH - (int) KMPH) * 100; //Getting Digits X1 & X0 (e.g. : 35)
USART_TX( (uint8_t) KMPHDig10 % 10 + 0x30 ); //Send X0
USART_TX( (uint8_t) KMPHDig10 / 10 + 0x30 ); //Send X1
USART_TX( (uint8_t) KMPH % 10 + 0x30 ); //Send X2
USART_TX( ((uint8_t) KMPH / 10) % 10 + 0x30 ); //Send X3
}
void
send_Current(float current)
{
uint8_t currentDig43 = (int) current % 100; //Getting Digits X4 & X3 (e.g. : 35)
uint8_t currentDig4 = currentDig43 / 10;
uint16_t currentDig210 = (current - (int) current) * 1000; //Getting Digits X3 & X2 & X1 (e.g. : 143)
uint8_t currentDig1 = currentDig210 / 10 - (currentDig210 / 100) * 10;
USART_TX(( (currentDig210) %10 + 0x30 )); //Send X0
USART_TX(( (currentDig1) + 0x30 )); //Send X1
USART_TX(( (currentDig210 / 100) + 0x30 )); //Send X2
USART_TX(( (currentDig43 % 10 + 0x30 ))); //Send X3
USART_TX(( (currentDig4 + 0x30))); //Send X4
}
|
package arouter.dawn.zju.edu.module_forum.ui.show_image;
import android.annotation.SuppressLint;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import androidx.annotation.Nullable;
import androidx.viewpager.widget.ViewPager;
import androidx.appcompat.app.AlertDialog;
import android.view.View;
import android.view.WindowManager;
import android.widget.ImageView;
import android.widget.TextView;
import com.alibaba.android.arouter.facade.annotation.Autowired;
import com.alibaba.android.arouter.facade.annotation.Route;
import java.io.File;
import java.util.ArrayList;
import arouter.dawn.zju.edu.module_forum.R;
import arouter.dawn.zju.edu.module_forum.adapter.ForumShowImageAdapter;
import baselib.base.BaseActivity;
import baselib.constants.RouteConstants;
/**
* @Auther: Dawn
* @Date: 2018/11/22 22:01
* @Description:
* 展示图片页面
*/
@Route(path = RouteConstants.AROUTER_FORUM_SHOW_IMAGE)
public class ForumShowImageActivity extends BaseActivity<ForumShowImageContract.Presenter>
implements View.OnClickListener, ViewPager.OnPageChangeListener, ForumShowImageContract.View {
@Autowired(name = RouteConstants.FORUM_SHOW_IMAGE_POSTION)
int currentPosition;
@Autowired(name = RouteConstants.FORUM_SHOW_IMAGE_BUNDLE)
Bundle bundle;
ArrayList<String> imageList;
TextView positionTv;
ViewPager imageViewPager;
ImageView imageDetailIv;
ForumShowImageAdapter adapter;
@SuppressLint("DefaultLocale")
@Override
protected void initView() {
positionTv = findViewById(R.id.show_image_position);
imageViewPager = findViewById(R.id.show_image_view_pager);
imageDetailIv = findViewById(R.id.show_image_detail);
imageList = bundle.getStringArrayList(RouteConstants.FORUM_SHOW_IMAGE_LIST);
findViewById(R.id.show_image_detail).setOnClickListener(this);
adapter = new ForumShowImageAdapter(this, imageList);
imageViewPager.setAdapter(adapter);
imageViewPager.setOnPageChangeListener(this);
imageViewPager.setOffscreenPageLimit(imageList.size());
imageViewPager.setCurrentItem(currentPosition);
positionTv.setText(String.format("%d/%d", currentPosition + 1, imageList.size()));
// 如果当前图片数组在本地存在 显示详情按钮
if (new File(imageList.get(0)).exists()) {
imageDetailIv.setVisibility(View.VISIBLE);
}
}
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
super.onCreate(savedInstanceState);
}
@Override
protected int getLayoutId() {
return R.layout.activity_show_image;
}
@Override
protected void bindPresenter() {
}
@SuppressLint("DefaultLocale")
@Override
public void onClick(View v) {
int id = v.getId();
if (id == R.id.show_image_detail) {
File file = new File(imageList.get(currentPosition));
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;//这个参数设置为true才有效,
BitmapFactory.decodeFile(imageList.get(currentPosition), options);//这里的bitmap是个空
int outHeight =options.outHeight;
int outWidth = options.outWidth;
new AlertDialog.Builder(this)
.setMessage(String.format("大小:%d KB\n尺寸:%d x %d", (int)(file.length() / 1024), outWidth, outHeight))
.setPositiveButton(R.string.confirm, null)
.show();
}
}
@Override
public void onPageScrolled(int i, float v, int i1) {
}
@SuppressLint("DefaultLocale")
@Override
public void onPageSelected(int i) {
positionTv.setText(String.format("%d/%d", i + 1, imageList.size()));
currentPosition = i;
}
@Override
public void onPageScrollStateChanged(int i) {
}
}
|
/*
* Copyright 2013 <NAME>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.qfast.reports;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import javax.servlet.jsp.jstl.sql.Result;
/**
* @author <NAME>
*/
public class ReportResult {
private final String[] columnNames;
private final List<Object> rows;
public ReportResult(Result result) {
columnNames = result.getColumnNames();
Object[][] rowsByIndx = result.getRowsByIndex();
rows = new ArrayList<Object>(10);
for (Object[] rowByIndx : rowsByIndx) {
for (int j = 0; j < rowByIndx.length; j++) {
if (rowByIndx[j] instanceof Boolean) {
if ((Boolean) rowByIndx[j]) {
rowByIndx[j] = "نعم";
} else {
rowByIndx[j] = "لا";
}
}
}
}
rows.addAll(Arrays.asList(rowsByIndx));
}
public String[] getColumnNames() {
return columnNames;
}
public List<Object> getRows() {
return rows;
}
}
|
/*-
* Copyright (c) 2018 <NAME> <<EMAIL>>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#ifndef _ARM_STM_STM32F4_GPIO_H_
#define _ARM_STM_STM32F4_GPIO_H_
#define GPIO_MODER(n) (0x00 + 0x400 * (n)) /* port mode */
#define MODE_INP 0 /* Input mode */
#define MODE_OUT 1 /* General purpose output mode */
#define MODE_ALT 2 /* Alternate function mode */
#define MODE_ANA 3 /* Analog mode (reset state) */
#define GPIO_OTYPER(n) (0x04 + 0x400 * (n)) /* port output type */
#define OT_PUSHPULL 0
#define OT_OPENDRAIN 1
#define GPIO_OSPEEDR(n) (0x08 + 0x400 * (n)) /* port output speed */
#define GPIO_PUPDR(n) (0x0C + 0x400 * (n)) /* port pull-up/pull-down */
#define FLOAT 0 /* No pull-up, pull-down */
#define PULLUP 1
#define PULLDOWN 2
#define GPIO_IDR(n) (0x10 + 0x400 * (n)) /* port input data */
#define GPIO_ODR(n) (0x14 + 0x400 * (n)) /* port output data */
#define GPIO_BSRR(n) (0x18 + 0x400 * (n)) /* port bit set/reset */
#define GPIO_LCKR(n) (0x1C + 0x400 * (n)) /* port configuration lock */
#define GPIO_AFRL(n) (0x20 + 0x400 * (n)) /* alternate function low */
#define GPIO_AFRH(n) (0x24 + 0x400 * (n)) /* alternate function high */
#define GPIO_BRR(n) (0x28 + 0x400 * (n)) /* port bit reset */
#define PINS_END { -1, -1, -1, -1, -1 }
enum {
PORT_A,
PORT_B,
PORT_C,
PORT_D,
PORT_E,
PORT_F,
PORT_G,
PORT_H,
PORT_I,
PORT_J,
PORT_K,
};
struct gpio_pin {
uint32_t port;
uint32_t pin;
uint32_t mode;
uint32_t alt;
uint32_t pupdr;
};
struct stm32f4_gpio_softc {
uint32_t base;
};
int stm32f4_gpio_init(struct stm32f4_gpio_softc *sc, uint32_t base);
void pin_configure(struct stm32f4_gpio_softc *sc, const struct gpio_pin *pins);
void pin_set(struct stm32f4_gpio_softc *sc, uint32_t port,
uint32_t pin, uint32_t enable);
int pin_get(struct stm32f4_gpio_softc *sc, uint32_t port, uint32_t pin);
#endif /* !_ARM_STM_STM32F4_GPIO_H_ */
|
mux() {
local name cols
if [ -n "$1" ]; then
cd $1
fi
name="$(basename $PWD | sed -e 's/\./-/g')"
cols="$(tput cols)"
if ! $(tmux has-session -t $name &>/dev/null); then
tmux new-session -d -n code -s $name -x${cols-150} -y50 && \
tmux new-window -a -d -n tests -t $name:0 \; \
new-window -a -d -n server -t $name:1 \; \
select-layout -t $name main-vertical &>/dev/null
fi
tmux attach-session -t $name
}
|
def maxmin(list):
max = list[0]
min = list[0]
for i in range(1, len(list)):
if list[i] > max:
max = list[i]
if list[i] < min:
min = list[i]
return max, min |
'use strict';
const fs = require('fs');
const path = require('path');
const tryCatch = require('try-catch');
const readjson = require('readjson');
const beautify = require('js-beautify');
const DIR = __dirname + '/../';
const HOME = require('os').homedir();
const EXT = ['js', 'css', 'html'];
const name = 'beautify.json';
const ConfigPath = path.join(DIR, 'json/' + name);
const ConfigHome = path.join(HOME, '.' + name);
const config =
readjson.sync.try(ConfigHome) ||
readjson.sync.try(ConfigPath) || {};
module.exports = (name, callback) => {
const ext = getExt(name);
const is = ~EXT.indexOf(ext);
if (!is)
return callback(erorMsg(ext));
fs.readFile(name, 'utf8', (error, data) => {
if (!error) {
const result = tryCatch(beautify[ext], data, config);
error = result[0];
data = result[1];
}
callback(error, data);
});
};
function getExt(name) {
if (/(rc|\.json)$/.test(name))
return 'js';
return path
.extname(name)
.slice(1);
}
function erorMsg(ext) {
return Error(`File type "${ext}" not supported.`);
}
|
<reponame>trunksbomb/Cyclic<filename>src/main/java/com/lothrazar/cyclicmagic/event/EventPlayerData.java<gh_stars>0
/*******************************************************************************
* The MIT License (MIT)
*
* Copyright (C) 2014-2018 <NAME> (aka Lothrazar)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
******************************************************************************/
package com.lothrazar.cyclicmagic.event;
import com.lothrazar.cyclicmagic.ModCyclic;
import com.lothrazar.cyclicmagic.capability.IPlayerExtendedProperties;
import com.lothrazar.cyclicmagic.registry.CapabilityRegistry;
import com.lothrazar.cyclicmagic.util.Const;
import com.lothrazar.cyclicmagic.util.UtilEntity;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.nbt.NBTBase;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.ResourceLocation;
import net.minecraftforge.common.capabilities.Capability;
import net.minecraftforge.common.capabilities.ICapabilitySerializable;
import net.minecraftforge.event.AttachCapabilitiesEvent;
import net.minecraftforge.event.entity.EntityJoinWorldEvent;
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
import net.minecraftforge.fml.common.gameevent.PlayerEvent.PlayerLoggedInEvent;
public class EventPlayerData {
// send from both events to avoid NULL player; known issue due to threading race conditions
// https://github.com/MinecraftForge/MinecraftForge/issues/1583
// player data storage based on API source code example:
// https://github.com/MinecraftForge/MinecraftForge/blob/1.9/src/test/java/net/minecraftforge/test/NoBedSleepingTest.java
@SubscribeEvent
// @SideOnly(Side.SERVER) // no dont do this. breaks hearts in SSP
public void onSpawn(PlayerLoggedInEvent event) {
if (event.player instanceof EntityPlayerMP &&
event.player != null &&
event.player.isDead == false) {
EntityPlayerMP p = (EntityPlayerMP) event.player;
if (p != null) {
CapabilityRegistry.syncServerDataToClient(p);
setDefaultHealth(p);
}
}
}
@SubscribeEvent
// @SideOnly(Side.SERVER)// no dont do this. breaks hearts in SSP
public void onJoinWorld(EntityJoinWorldEvent event) {
if (event.getEntity() instanceof EntityPlayerMP &&
event.getEntity() != null &&
event.getEntity().isDead == false) {
EntityPlayerMP p = (EntityPlayerMP) event.getEntity();
if (p != null) {
CapabilityRegistry.syncServerDataToClient(p);
setDefaultHealth(p);
}
}
}
private void setDefaultHealth(EntityPlayerMP p) {
IPlayerExtendedProperties src = CapabilityRegistry.getPlayerProperties(p);
// UtilChat.sendStatusMessage(p,"Setting your maximum health to "+src.getMaxHealth());
if (src.getMaxHealth() > 0) {
UtilEntity.setMaxHealth(p, src.getMaxHealth());
}
}
/**
*
* TODO
*
* SHOULD BE AttachCapabilitiesEvent<EntityPlayer> ..BUT that NEVER EVER fires, so data never gets attached to player soo NPEs all over crash the game SO IM forced to do it this way, fire it on
* GLOBAL object and check instanceof at runtime NO IDEA if its a bug in forge or if there is a right way / wrong way. but of course forge has no docs and nobody to ask
*
* @param event
*/
@SuppressWarnings("rawtypes")
@SubscribeEvent
public void onEntityConstruct(AttachCapabilitiesEvent event) {//was AttachCapabilitiesEvent.Entity in 1.11 and previous
if (event.getObject() instanceof EntityPlayer) {
event.addCapability(new ResourceLocation(Const.MODID, "IModdedSleeping"), new PlayerCapInstance());
}
}
class PlayerCapInstance implements ICapabilitySerializable<NBTTagCompound> {
IPlayerExtendedProperties inst = ModCyclic.CAPABILITYSTORAGE.getDefaultInstance();
@Override
public boolean hasCapability(Capability<?> capability, EnumFacing facing) {
return capability == ModCyclic.CAPABILITYSTORAGE;
}
@Override
public <T> T getCapability(Capability<T> capability, EnumFacing facing) {
return capability == ModCyclic.CAPABILITYSTORAGE ? ModCyclic.CAPABILITYSTORAGE.<T> cast(inst) : null;
}
@Override
public NBTTagCompound serializeNBT() {
NBTBase ret = ModCyclic.CAPABILITYSTORAGE.getStorage().writeNBT(ModCyclic.CAPABILITYSTORAGE, inst, null);
if (ret instanceof NBTTagCompound) {
return (NBTTagCompound) ret;
}
return null;
}
@Override
public void deserializeNBT(NBTTagCompound nbt) {
ModCyclic.CAPABILITYSTORAGE.getStorage().readNBT(ModCyclic.CAPABILITYSTORAGE, inst, null, nbt);
}
}
}
|
<gh_stars>1-10
const { Pipeline, Plugin } = require('aid-bundler')
function inputMod (data) {
data.text += '\nInput Text.'
if (!data.state.actionCount) data.state.actionCount = 0
data.state.actionCount++
}
function contextMod (data) {
data.text += '\nContext text.'
}
function outputMod (data) {
data.text += '\nOutput text.'
}
const pipeline = new Pipeline()
const pluginA = new Plugin('Plugin A', inputMod, contextMod, outputMod)
pipeline.addPlugin(pluginA)
pipeline.build()
|
<reponame>bwind/iaaf-intervals
import re
from intervals import load
from intervals.regex import (
GROUPED_SETS_REGEX,
SETS_REGEX,
REPETITION_REGEX,
)
from intervals.repetition import Repetition
from intervals.set import Set
from intervals.utils import parse_time, to_time
class TestUtils:
def test_parse_time_minutes(self):
assert parse_time('12’') == 720
def test_parse_time_seconds(self):
assert parse_time('30”') == 30
def test_parse_time_none(self):
assert parse_time(None) is None
def test_to_time_string(self):
assert to_time('max') == 'max'
def test_to_time_minutes_and_seconds(self):
assert to_time(90) == '1’30”'
def test_to_time_seconds(self):
assert to_time(30) == '30”'
def test_to_time_minutes(self):
assert to_time(120) == '2’'
class TestIntervals:
def setup(self):
self.repetition_minimal = '1 x 300 (max)'
self.repetition_with_recovery = '10 x 400 (72”) [2’]'
self.multiple_repetitions = '2 x 500 (300/48”, 200/max) [8’] [15’] 8 x 200 (35”) [1’]' # noqa: E501
self.grouped_sets = '2 x {1 x 500 (1500) [1’] 1 x 700 (1500) [30”] 1 x 300 (max)} [12’]' # noqa: E501
self.sets = '3 x 4 x 300 (3000) [100m r/o & 5’]'
def test_regex_repetition_minimal(self):
match = re.match(REPETITION_REGEX, self.repetition_minimal)
assert match.group(1) == '1'
assert match.group(2) == '300'
assert match.group(3) == 'max'
assert match.group(4) is None
def test_regex_repetition_with_recovery(self):
match = re.match(REPETITION_REGEX, self.repetition_with_recovery)
assert match.group(1) == '10'
assert match.group(2) == '400'
assert match.group(3) == '72”'
assert match.group(4) == '2’'
def test_regex_multiple_repetitions_has_recovery_between_sets(self):
match = re.findall(REPETITION_REGEX, self.multiple_repetitions)
assert match[0][5] == '15’'
def test_regex_grouped_sets(self):
match = re.match(GROUPED_SETS_REGEX, self.grouped_sets)
assert match.group(1) == '2'
assert match.group(2) == '1 x 500 (1500) [1’] 1 x 700 (1500) [30”] 1 x 300 (max)' # noqa: E501
assert match.group(3) == '12’'
def test_sets_has_repeats(self):
match = re.match(SETS_REGEX, self.sets)
assert match.group(1) == '3'
assert match.group(2) == '4 x 300 (3000) [100m r/o & 5’]'
def test_load_repetition_minimal(self):
assert load(self.repetition_minimal) == Set(
repeats=1,
repetitions=[
Repetition(
repeats=1,
distance=300,
pace='max',
recovery=None,
),
],
recovery=None)
def test_load_repetition_with_recovery(self):
assert load(self.repetition_with_recovery) == Set(
repeats=1,
repetitions=[
Repetition(
repeats=10,
distance=400,
pace=72,
recovery=120,
),
],
recovery=None)
def test_load_multiple_repetitions(self):
assert load(self.multiple_repetitions) == Set(
repeats=1,
repetitions=[
Repetition(
repeats=2,
distance=500,
pace='300/48”, 200/max',
recovery=480,
),
Repetition(
repeats=8,
distance=200,
pace=35,
recovery=60,
),
],
recovery=900)
def test_load_grouped_sets(self):
assert load(self.grouped_sets) == Set(
repeats=2,
repetitions=[
Repetition(
repeats=1,
distance=500,
pace='1500',
recovery=60,
),
Repetition(
repeats=1,
distance=700,
pace='1500',
recovery=30,
),
Repetition(
repeats=1,
distance=300,
pace='max',
recovery=None,
)
],
recovery=720)
def test_load_sets(self):
'3 x 4 x 300 (3000) [100m r/o & 5’]'
assert load(self.sets) == Set(
repeats='3',
repetitions=[
Repetition(
repeats=4,
distance=300,
pace='3000',
recovery='100m r/o',
),
],
recovery=300)
|
#!/bin/bash
####################################################################################################
#
# FILENAME: run_all_tests
#
# PURPOSE: ##ADD_PURPOSE_HERE##
#
# DESCRIPTION: ##ADD_DESCRIPTIION_HERE##
#
# INSTRUCTIONS: Execute the following command relative to your project's root directory:
# ./dev-tools/run_all_tests
#
# RELATED DOCS: TODO: ?????
# └─ https://???.???.com
#
# TODO: ?????
# ├─ https://www.????.com
# └─ https://www.????.com
#
#### LOAD SHARED FUNCTIONS LIBRARY #################################################################
#
if [ ! -r `dirname $0`/lib/shared-functions.sh ]; then
echo "\nFATAL ERROR: `tput sgr0``tput setaf 1`Could not load dev-tools/lib/shared-functions.sh. File not found.\n"
exit 1
fi
source `dirname $0`/lib/shared-functions.sh
#
#
#### CONFIRM SCRIPT EXECUTION ######################################################################
#
confirmScriptExecution "Do you want to run all tests currently deployed to your scratch org?"
#
#
#### CREATE LOCAL VARIABLES ########################################################################
#
# No local variables are used by this script.
#
#
#### RUN ALL TESTS IN THE SCRATCH ORG ##############################################################
#
# 0. Reset the Step Message counter and set the TOTAL STEPS to 1.
resetStepMsgCounter 1
# 1. Run all tests as human readable, with final output made to the temp directory.
echoStepMsg "Run all tets currently deployed to $SCRATCH_ORG_ALIAS"
echo \
"Executing force:apex:test:run \\
--targetusername $SCRATCH_ORG_ALIAS \\
--testlevel RunLocalTests \\
--resultformat human \\
--synchronous \\
--codecoverage \\
--loglevel error)\n"
(cd $PROJECT_ROOT && exec sfdx force:apex:test:run \
--targetusername $SCRATCH_ORG_ALIAS \
--testlevel RunLocalTests \
--outputdir ./temp/apex-test-results \
--resultformat human \
--codecoverage \
--wait 15 \
--loglevel error)
# TODO: Need to add a check to see if force:source:convert worked.
# Check if the previous command executed successfully. If not, abort this script.
if [ $? -ne 0 ]; then
echoErrorMsg "The previous command did not execute as expected. Aborting script"
exit 1
fi
#### ECHO CLOSING SUCCESS MESSAGE ##################################################################
#
echoScriptCompleteMsg "Tests complete. Test results should be available in $PROJECT_ROOT/temp/apex-test-results"
##END## |
<reponame>shyga362/projetoPython<gh_stars>1-10
n = int(input("Digite um numero "))
if (n%2) == 0:
print("par")
else:
print("impar")
|
#!/bin/bash
# Copyright (c) Microsoft Corporation
# All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
cd /
wget https://issues.apache.org/jira/secure/attachment/12921722/hadoop-2.7.2.gpu-port.patch
git clone https://github.com/apache/hadoop.git
cd hadoop
git checkout branch-2.7.2
cp /hadoop-2.7.2.gpu-port.patch /hadoop
git apply hadoop-2.7.2.gpu-port.patch
mvn package -Pdist,native -DskipTests -Dtar
cp /hadoop/hadoop-dist/target/hadoop-2.7.2.tar.gz /hadoop-binary
echo "Successfully build hadoop 2.7.2 AI"
|
# src/bash/url-sniper/funcs/remove-action-files.test.sh
# v1.1.2
# ---------------------------------------------------------
# adds first an action to remove
# generates all the aciton files (( it will add this new )
# action to remove
# and tests the actual removal at the end
# ---------------------------------------------------------
doTestRemoveActionFiles(){
doLog "DEBUG START doTestRemoveActionFiles"
doSpecRemoveActionFiles
test -z "$sleep_interval" || sleep "$sleep_interval"
printf "\033[2J";printf "\033[0;0H"
doHelpRemoveActionFiles
test -z "$sleep_interval" || sleep "$sleep_interval"
printf "\033[2J";printf "\033[0;0H"
cat doc/txt/url-sniper/tests/remove-action-files.test.txt
test -z "$sleep_interval" || sleep "$sleep_interval"
printf "\033[2J";printf "\033[0;0H"
# add an action to remove
found=$(grep -c action-to-remove src/bash/url-sniper/tests/rem-url-sniper-actions.lst)
test $found -eq 0 && \
echo action-to-remove >> src/bash/url-sniper/tests/rem-url-sniper-actions.lst
found=0
found=$(grep -c action-to-remove src/bash/url-sniper/tests/all-url-sniper-tests.lst)
test $found -eq 0 && \
echo action-to-remove >> src/bash/url-sniper/tests/all-url-sniper-tests.lst
# now generate the code files for this action to remove
bash src/bash/url-sniper/url-sniper.sh -a generate-action-files
test -z "$sleep_interval" || sleep "$sleep_interval"
printf "\033[2J";printf "\033[0;0H"
# and test the actual removal of the action
bash src/bash/url-sniper/url-sniper.sh -a remove-action-files
doLog "DEBUG STOP doTestRemoveActionFiles"
test -z "$sleep_interval" || sleep "$sleep_interval"
printf "\033[2J";printf "\033[0;0H"
}
# eof func doTestRemoveActionFiles
# eof file: src/bash/url-sniper/funcs/remove-action-files.test.sh
|
<gh_stars>1-10
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import EnvelopeSquareSvg from '@rsuite/icon-font/lib/legacy/EnvelopeSquare';
const EnvelopeSquare = createSvgIcon({
as: EnvelopeSquareSvg,
ariaLabel: 'envelope square',
category: 'legacy',
displayName: 'EnvelopeSquare'
});
export default EnvelopeSquare;
|
#!/bin/bash
set -Ee
function finally {
echo "Un-register the runner"
./config.sh remove --unattended
}
trap finally EXIT SIGTERM
AGENT_NAME=${AGENT_NAME:="agent"}
if [ -n "${AGENT_TOKEN}" ]; then
echo "Connect to GitHub using AGENT_TOKEN environment variable."
else
echo "Connect to Azure AD using MSI ${MSI_ID}"
az login --identity -u ${MSI_ID} --allow-no-subscriptions
# Get AGENT_TOKEN from KeyVault if not provided from the AGENT_TOKEN environment variable
AGENT_TOKEN=$(az keyvault secret show -n ${KEYVAULT_SECRET} --vault-name ${KEYVAULT_NAME} -o json | jq -r .value)
fi
LABELS+="runner-version-$(./run.sh --version),"
LABELS+=$(cat /tf/rover/version.txt)
./config.sh \
--unattended \
--replace \
--url ${URL} \
--token ${AGENT_TOKEN} \
--labels ${LABELS} \
--name ${AGENT_NAME} \
./run.sh
|
__author__ = 'sstober'
import logging
log = logging.getLogger(__name__)
import os
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import mne
from mne.io import read_raw_edf
from mne.channels import rename_channels
from mne.preprocessing import ICA, read_ica
from mne.viz.topomap import plot_topomap
import deepthought
from deepthought.util.fs_util import ensure_parent_dir_exists
from deepthought.datasets.eeg.biosemi64 import Biosemi64Layout
from openmiir.eeg import recording_has_mastoid_channels
from openmiir.events import decode_event_id
from openmiir.preprocessing.events import \
merge_trial_and_audio_onsets, generate_beat_events, \
simple_beat_event_id_generator, extract_events_from_raw
from openmiir.metadata import get_stimuli_version, load_stimuli_metadata
from mneext.resample import fast_resample_mne
RAW_EOG_CHANNELS = [u'EXG1', u'EXG2', u'EXG3', u'EXG4']
MASTOID_CHANNELS = [u'EXG5', u'EXG6']
def load_raw_info(subject,
mne_data_root=None,
verbose=False):
if mne_data_root is None:
# use default data root
import deepthought
data_root = os.path.join(deepthought.DATA_PATH, 'OpenMIIR')
mne_data_root = os.path.join(data_root, 'eeg', 'mne')
mne_data_filepath = os.path.join(mne_data_root, '{}-raw.fif'.format(subject))
log.info('Loading raw data info for subject "{}" from {}'.format(subject, mne_data_filepath))
raw = mne.io.Raw(mne_data_filepath, preload=False, verbose=verbose)
return raw.info
def load_raw(subject, **args):
return _load_raw(subject=subject, has_mastoid_channels=recording_has_mastoid_channels, **args)
def _load_raw(subject,
mne_data_root=None,
verbose=False,
onsets=None,
interpolate_bad_channels=False,
has_mastoid_channels=None, # None=True, False, or callable(subject) returning True/False
apply_reference=True, # by default, reference the data
reference_mastoids=True):
if mne_data_root is None:
# use default data root
import deepthought
data_root = os.path.join(deepthought.DATA_PATH, 'OpenMIIR')
mne_data_root = os.path.join(data_root, 'eeg', 'mne')
mne_data_filepath = os.path.join(mne_data_root, '{}-raw.fif'.format(subject))
log.info('Loading raw data for subject "{}" from {}'.format(subject, mne_data_filepath))
raw = mne.io.Raw(mne_data_filepath, preload=True, verbose=verbose)
if apply_reference:
if has_mastoid_channels is None \
or has_mastoid_channels is True \
or has_mastoid_channels(subject) is True:
## referencing to mastoids
if reference_mastoids:
log.info('Referencing to mastoid channels: {}'.format(MASTOID_CHANNELS))
mne.io.set_eeg_reference(raw, MASTOID_CHANNELS, copy=False) # inplace
else:
log.info('This recording has unused mastoid channels: {} '
'To use them, re-run with reference_mastoids=True.'.format(MASTOID_CHANNELS))
raw.drop_channels(MASTOID_CHANNELS)
else:
## referencing to average
log.info('Referencing to average.')
mne.io.set_eeg_reference(raw, copy=False)
## optional event merging
if onsets == 'audio':
merge_trial_and_audio_onsets(raw,
use_audio_onsets=True,
inplace=True,
stim_channel='STI 014',
verbose=verbose)
elif onsets == 'trials':
merge_trial_and_audio_onsets(raw,
use_audio_onsets=True,
inplace=True,
stim_channel='STI 014',
verbose=verbose)
# else: keep both
bads = raw.info['bads']
if bads is not None and len(bads) > 0:
if interpolate_bad_channels:
log.info('Interpolating bad channels: {}'.format(bads))
raw.interpolate_bads()
else:
log.info('This file contains some EEG channels marked as bad: {}\n'
'To interpolate bad channels run load_raw() with interpolate_bad_channels=True.'
''.format(bads))
return raw
def interpolate_bad_channels(inst):
bads = inst.info['bads']
if bads is not None and len(bads) > 0:
log.info('Interpolating bad channels...')
inst.interpolate_bads()
else:
log.info('No channels marked as bad. Nothing to interpolate.')
def load_ica(subject, description, ica_data_root=None):
if ica_data_root is None:
# use default data root
import deepthought
data_root = os.path.join(deepthought.DATA_PATH, 'OpenMIIR')
ica_data_root = os.path.join(data_root, 'eeg', 'preprocessing', 'ica')
ica_filepath = os.path.join(ica_data_root,
'{}-{}-ica.fif'.format(subject, description))
return read_ica(ica_filepath)
def import_and_process_metadata(biosemi_data_root, mne_data_root, subject, verbose=True, overwrite=False):
## check whether output already exists
output_filepath = os.path.join(mne_data_root,
'{}-raw.fif'.format(subject))
if os.path.exists(output_filepath):
if not overwrite:
log.info('Skipping existing {}'.format(output_filepath))
return
## import raw BDF file from biosemi
bdf_filepath = os.path.join(biosemi_data_root, '{}.bdf'.format(subject))
## NOTE: marks EXT1-4 channels as EOG channels during import
log.info('Importing raw BDF data from: {}'.format(bdf_filepath))
raw = read_raw_edf(bdf_filepath, eog=RAW_EOG_CHANNELS, preload=True, verbose=verbose)
log.info('Imported raw data: {}'.format(raw))
sfreq = raw.info['sfreq']
if sfreq != 512:
log.warn('Unexpected sample rate: {} Hz'.format(sfreq))
log.warn('Re-sampling to 512 Hz')
fast_resample_mne(raw, 512, res_type='sinc_best', preserve_events=True, verbose=True)
## mark all unused channels as bad
raw.info['bads'] += [u'C1', u'C2', u'C3', u'C4', u'C5', u'C6', u'C7', u'C8', u'C9', u'C10',
u'C11', u'C12', u'C13', u'C14', u'C15', u'C16', u'C17', u'C18', u'C19', u'C20',
u'C21', u'C22', u'C23', u'C24', u'C25', u'C26', u'C27', u'C28', u'C29', u'C30',
u'C31', u'C32', u'D1', u'D2', u'D3', u'D4', u'D5', u'D6', u'D7', u'D8',
u'D9', u'D10', u'D11', u'D12', u'D13', u'D14', u'D15', u'D16', u'D17', u'D18',
u'D19', u'D20', u'D21', u'D22', u'D23', u'D24', u'D25', u'D26', u'D27', u'D28',
u'D29', u'D30', u'D31', u'D32', u'E1', u'E2', u'E3', u'E4', u'E5', u'E6',
u'E7', u'E8', u'E9', u'E10', u'E11', u'E12', u'E13', u'E14', u'E15',
u'E16', u'E17', u'E18', u'E19', u'E20', u'E21', u'E22', u'E23', u'E24',
u'E25', u'E26', u'E27', u'E28', u'E29', u'E30', u'E31', u'E32', u'F1',
u'F2', u'F3', u'F4', u'F5', u'F6', u'F7', u'F8', u'F9', u'F10', u'F11',
u'F12', u'F13', u'F14', u'F15', u'F16', u'F17', u'F18', u'F19', u'F20',
u'F21', u'F22', u'F23', u'F24', u'F25', u'F26', u'F27', u'F28', u'F29',
u'F30', u'F31', u'F32', u'G1', u'G2', u'G3', u'G4', u'G5', u'G6', u'G7',
u'G8', u'G9', u'G10', u'G11', u'G12', u'G13', u'G14', u'G15', u'G16', u'G17',
u'G18', u'G19', u'G20', u'G21', u'G22', u'G23', u'G24', u'G25', u'G26', u'G27',
u'G28', u'G29', u'G30', u'G31', u'G32', u'H1', u'H2', u'H3', u'H4', u'H5',
u'H6', u'H7', u'H8', u'H9', u'H10', u'H11', u'H12', u'H13', u'H14', u'H15',
u'H16', u'H17', u'H18', u'H19', u'H20', u'H21', u'H22', u'H23', u'H24', u'H25',
u'H26', u'H27', u'H28', u'H29', u'H30', u'H31', u'H32',
u'EXG7', u'EXG8',
u'GSR1', u'GSR2', u'Erg1', u'Erg2', u'Resp', u'Plet', u'Temp']
log.info('Marked unused channels as bad: {}'.format(raw.info['bads']))
if not recording_has_mastoid_channels(subject):
raw.info['bads'] += [u'EXG5', u'EXG6']
picks = mne.pick_types(raw.info, meg=False, eeg=True, eog=True, stim=True, exclude='bads')
## process events
markers_filepath = os.path.join(biosemi_data_root, '{}_EEG_Data.mat'.format(subject))
log.info('Processing events, external source: {}'.format(markers_filepath))
events = extract_events_from_raw(raw, markers_filepath, subject, verbose)
raw._data[-1,:].fill(0) # delete data in stim channel
raw.add_events(events)
# crop to first event - 1s ... last event + 20s (longer than longest trial)
onesec = raw.info['sfreq']
tmin, tmax = raw.times[[events[0,0]-onesec, events[-1,0]+20*onesec]]
log.info('Cropping raw inplace to {:.3f}s - {:.3f}s'.format(tmin, tmax))
raw.crop(tmin=tmin, tmax=tmax, copy=False)
# fix sample offser -> 0
raw.last_samp -= raw.first_samp
raw.first_samp = 0
ensure_parent_dir_exists(output_filepath)
log.info('Saving raw fif data to: {}'.format(output_filepath))
raw.save(output_filepath, picks=picks, overwrite=overwrite, verbose=False)
del raw
raw = fix_channel_infos(output_filepath, verbose=verbose)
log.info('Imported {}'.format(raw))
log.info('Metadata: {}'.format(raw.info))
def fix_channel_infos(mne_data_filepath, verbose=True):
log.info('Loading raw fif data from: {}'.format(mne_data_filepath))
raw = mne.io.Raw(mne_data_filepath, preload=True, verbose=verbose)
raw.info['bads'] = [] # reset bad channels as they have been removed already
montage = Biosemi64Layout().as_montage()
log.info('Applying channel montage: {}'.format(montage))
## change EEG channel names
mapping = dict()
bdf_channel_names = raw.ch_names
for i, channel_name in enumerate(montage.ch_names):
log.debug('renaming channel {}: {} -> {}'.format(
i, bdf_channel_names[i], channel_name))
mapping[bdf_channel_names[i]] = channel_name
rename_channels(raw.info, mapping)
# mne.channels.apply_montage(raw.info, montage) # in mne 0.9
raw.set_montage(montage) # in mne 0.9
log.info('Saving raw fif data to: {}'.format(mne_data_filepath))
raw.save(mne_data_filepath, overwrite=True, verbose=False)
return raw
def clean_data(mne_data_root, subject, verbose=True, overwrite=False):
## check whether output already exists
output_filepath = os.path.join(mne_data_root,
'{}_filtered-raw.fif'.format(subject))
if os.path.exists(output_filepath):
if not overwrite:
log.info('Skipping existing {}'.format(output_filepath))
return
input_filepath = os.path.join(mne_data_root,
'{}-raw.fif'.format(subject))
raw = mne.io.Raw(input_filepath, preload=True, verbose=verbose)
## apply bandpass filter
raw.filter(0.5, 30, filter_length='10s',
l_trans_bandwidth=0.1, h_trans_bandwidth=0.5,
method='fft', iir_params=None,
picks=None, n_jobs=1, verbose=verbose)
ensure_parent_dir_exists(output_filepath)
raw.save(output_filepath, overwrite=overwrite, verbose=False)
|
#!/bin/sh
T=$(date -Iseconds | tr : - | tr + - | tr T - | awk -F- {'print($4$5$6)'})
screen -AdmS blink_$T /root/blink/while.sh
|
# shellcheck shell=ksh
# Copyright 2022 Rawiri Blundell
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
# Provenance: https://github.com/rawiriblundell/sh_libpath
# SPDX-License-Identifier: Apache-2.0
# Get the top level PID and setup a trap so that we can call die() within subshells
trap "exit 1" TERM
_self_pid="${$}"
export _self_pid
# Function to print an error message and exit
die() {
if [ -t 0 ]; then
printf '\e[31;1m====>%s\e[0m\n' "${0}:(${LINENO}): ${*}" >&2
else
printf -- '====>%s\n' "${0}:(${LINENO}): ${*}" >&2
fi
# Send a TERM signal to the top level PID, this is trapped and exit 1 is forced
kill -s TERM "${_self_pid}"
}
Or in a terser form:
# shellcheck disable=SC2059
die() {
[ -t 0 ] && _diefmt='\e[31;1m====>%s\e[0m\n'
printf "${_diefmt:-====>%s\n}" "${0}:(${LINENO}): ${*}" >&2
# Send a TERM signal to the top level PID, this is trapped and exit 1 is forced
kill -s TERM "${_self_pid}"
}
With datestamps and [ERROR] tags:
die() {
if [ -t 0 ]; then
printf '\e[31;1m====>[%s] %s [ERROR]: %s\e[0m\n' "$(date +%s)" "${0}:${LINENO}" "${*}" >&2
else
printf -- '====>[%s] %s [ERROR]: %s\n' "$(date +%s)" "${0}:${LINENO}" "${*}" >&2
fi
# Send a TERM signal to the top level PID, this is trapped and exit 1 is forced
kill -s TERM "${_self_pid}"
}
|
/**************************************************************************\
|
| Copyright (C) 2009 <NAME>
|
| This program is free software: you can redistribute it and/or modify
| it under the terms of the GNU General Public License as published by
| the Free Software Foundation, either version 3 of the License, or
| (at your option) any later version.
|
| This program is distributed in the hope that it will be useful,
| but WITHOUT ANY WARRANTY; without even the implied warranty of
| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
| GNU General Public License for more details.
|
| You should have received a copy of the GNU General Public License
| along with this program. If not, see <http://www.gnu.org/licenses/>.
|
\**************************************************************************/
#ifndef HASHCLASH_SHA1DETAIL_HPP
#define HASHCLASH_SHA1DETAIL_HPP
#include "types.hpp"
#ifdef _DEBUG
#ifdef SHA1DETAIL_INLINE_IMPL
#undef SHA1DETAIL_INLINE_IMPL
#endif
#else
#ifndef SHA1DETAIL_INLINE_IMPL
#define SHA1DETAIL_INLINE_IMPL
#endif
#endif
namespace hashclash {
void sha1compress(uint32 ihv[5], const uint32 block[16]);
void sha1compress_me(uint32 ihv[5], const uint32 me[80]);
const uint32 Qoffsha1 = 4;
FUNC_PREFIX inline uint32 sha1_f1(uint32 b, uint32 c, uint32 d)
{ return d ^ (b & (c ^ d)); }
FUNC_PREFIX inline uint32 sha1_f2(uint32 b, uint32 c, uint32 d)
{ return b ^ c ^ d; }
FUNC_PREFIX inline uint32 sha1_f3(uint32 b, uint32 c, uint32 d)
{ return (b & (c | d)) | (c & d); }
FUNC_PREFIX inline uint32 sha1_f4(uint32 b, uint32 c, uint32 d)
{ return b ^ c ^ d; }
#ifndef SHA1DETAIL_INLINE_IMPL
extern const uint32 sha1_iv[];
extern const uint32 sha1_ac[];
#else
const uint32 sha1_iv[] = { 0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476, 0xC3D2E1F0 };
const uint32 sha1_ac[] = { 0x5A827999, 0x6ED9EBA1, 0x8F1BBCDC, 0xCA62C1D6 };
#endif
FUNC_PREFIX inline void sha1_me(uint32 block[80], const uint32 msg[16])
{
unsigned i;
for (i = 0; i < 16; ++i)
block[i]=(rotate_left(msg[i],24)&0xFF00FF00)|(rotate_left(msg[i],8)&0x00FF00FF);
for (i = 16; i < 80; ++i)
block[i]=rotate_left(block[i-3] ^ block[i-8] ^ block[i-14] ^ block[i-16], 1);
}
FUNC_PREFIX inline void sha1_me_simple(uint32 block[80], const uint32 msg[16])
{
unsigned i;
for (i = 0; i < 16; ++i)
block[i]=msg[i];
for (i = 16; i < 80; ++i)
block[i]=rotate_left(block[i-3] ^ block[i-8] ^ block[i-14] ^ block[i-16], 1);
}
FUNC_PREFIX inline void sha1_me_generalised(uint32 block[80], const uint32 msg[16], unsigned offset)
{
int i;
for (i = 0; i < 16; ++i)
block[offset+i]=msg[i];
for (i = offset+16; i < 80; ++i)
block[i]=rotate_left(block[i-3] ^ block[i-8] ^ block[i-14] ^ block[i-16], 1);
for (i = int(offset)-1; i >= 0; --i)
block[i]=rotate_right(block[i+16], 1) ^ block[i+13] ^ block[i+8] ^ block[i+2];
}
FUNC_PREFIX inline void sha1_step_round1(unsigned t, uint32 Q[], const uint32 me[])
{
const int offset = 4;
uint32 Ft = sha1_f1(Q[offset+t-1], rotate_left(Q[offset+t-2],30), rotate_left(Q[offset+t-3],30));
Q[offset+t+1] = Ft + sha1_ac[0] + me[t] + rotate_left(Q[offset+t],5) + rotate_left(Q[offset+t-4],30);
}
FUNC_PREFIX inline void sha1_step_round2(unsigned t, uint32 Q[], const uint32 me[])
{
const int offset = 4;
uint32 Ft = sha1_f2(Q[offset+t-1], rotate_left(Q[offset+t-2],30), rotate_left(Q[offset+t-3],30));
Q[offset+t+1] = Ft + sha1_ac[1] + me[t] + rotate_left(Q[offset+t],5) + rotate_left(Q[offset+t-4],30);
}
FUNC_PREFIX inline void sha1_step_round3(unsigned t, uint32 Q[], const uint32 me[])
{
const int offset = 4;
uint32 Ft = sha1_f3(Q[offset+t-1], rotate_left(Q[offset+t-2],30), rotate_left(Q[offset+t-3],30));
Q[offset+t+1] = Ft + sha1_ac[2] + me[t] + rotate_left(Q[offset+t],5) + rotate_left(Q[offset+t-4],30);
}
FUNC_PREFIX inline void sha1_step_round4(unsigned t, uint32 Q[], const uint32 me[])
{
const int offset = 4;
uint32 Ft = sha1_f4(Q[offset+t-1], rotate_left(Q[offset+t-2],30), rotate_left(Q[offset+t-3],30));
Q[offset+t+1] = Ft + sha1_ac[3] + me[t] + rotate_left(Q[offset+t],5) + rotate_left(Q[offset+t-4],30);
}
FUNC_PREFIX inline void sha1_step(unsigned t, uint32 Q[], const uint32 me[])
{
if (t < 40) {
if (t < 20)
sha1_step_round1(t, Q, me);
else
sha1_step_round2(t, Q, me);
} else {
if (t < 60)
sha1_step_round3(t, Q, me);
else
sha1_step_round4(t, Q, me);
}
}
template<unsigned t>
FUNC_PREFIX inline void sha1_step(uint32 Q[], const uint32 me[])
{
if (t < 40) {
if (t < 20)
sha1_step_round1(t, Q, me);
else
sha1_step_round2(t, Q, me);
} else {
if (t < 60)
sha1_step_round3(t, Q, me);
else
sha1_step_round4(t, Q, me);
}
}
inline void test_compress(const uint32 ihv[5], uint32 me[80]) {
uint32 Q[85];
uint32 ihv1[5];
uint32 ihv2[5];
for (unsigned i = 0; i < 5; ++i)
ihv1[i] = ihv[i];
sha1compress_me(ihv1, me);
Q[0] = rotate_right(ihv[4], 30);
Q[1] = rotate_right(ihv[3], 30);
Q[2] = rotate_right(ihv[2], 30);
Q[3] = ihv[1];
Q[4] = ihv[0];
for (unsigned t = 0; t < 80; ++t)
sha1_step(t, Q, me);
ihv2[0] = ihv[0] + Q[84];
ihv2[1] = ihv[1] + Q[83];
ihv2[2] = ihv[2] + rotate_left(Q[82],30);
ihv2[3] = ihv[3] + rotate_left(Q[81],30);
ihv2[4] = ihv[4] + rotate_left(Q[80],30);
exit(0);
}
} // namespace hashclash
#endif //HASHCLASH_SHA1DETAIL_HPP
|
cd data/kitti_split1/devkit/cpp/
g++ -O3 -DNDEBUG -o evaluate_object evaluate_object.cpp
g++ -O3 -DNDEBUG -o evaluate_object_0_5 evaluate_object_0_5.cpp
cd ../../../../
|
#!/bin/bash
# comment
unamestr=`uname`
if [ "$unamestr" == 'Linux' ]; then
prof=~/.bashrc
mini_conda_url=https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh
matplotlibdir=~/.config/matplotlib
env_file=environment_linux.yml
elif [ "$unamestr" == 'FreeBSD' ] || [ "$unamestr" == 'Darwin' ]; then
prof=~/.bash_profile
mini_conda_url=https://repo.continuum.io/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
matplotlibdir=~/.matplotlib
env_file=environment_osx.yml
else
echo "Unsupported environment. Exiting."
exit
fi
source $prof
# echo "Path:"
# echo $PATH
VENV=pager
# Is the reset flag set?
reset=0
while getopts r FLAG; do
case $FLAG in
r)
reset=1
;;
esac
done
# create a matplotlibrc file with the non-interactive backend "Agg" in it.
if [ ! -d "$matplotlibdir" ]; then
mkdir -p $matplotlibdir
fi
matplotlibrc=$matplotlibdir/matplotlibrc
if [ ! -e "$matplotlibrc" ]; then
echo "backend : Agg" > "$matplotlibrc"
echo "NOTE: A non-interactive matplotlib backend (Agg) has been set for this user."
elif grep -Fxq "backend : Agg" $matplotlibrc ; then
:
elif [ ! grep -Fxq "backend" $matplotlibrc ]; then
echo "backend : Agg" >> $matplotlibrc
echo "NOTE: A non-interactive matplotlib backend (Agg) has been set for this user."
else
sed -i '' 's/backend.*/backend : Agg/' $matplotlibrc
echo "###############"
echo "NOTE: $matplotlibrc has been changed to set 'backend : Agg'"
echo "###############"
fi
# Is conda installed?
conda --version
if [ $? -ne 0 ]; then
echo "No conda detected, installing miniconda..."
curl -L $mini_conda_url -o miniconda.sh;
echo "Install directory: $HOME/miniconda"
bash miniconda.sh -f -b -p $HOME/miniconda
# Need this to get conda into path
. $HOME/miniconda/etc/profile.d/conda.sh
rm miniconda.sh
else
echo "conda detected, installing $VENV environment..."
fi
# echo "PATH:"
# echo $PATH
# echo ""
# Choose an environment file based on platform
# only add this line if it does not already exist
grep "/etc/profile.d/conda.sh" $prof
if [ $? -ne 0 ]; then
echo ". $HOME/miniconda/etc/profile.d/conda.sh" >> $prof
fi
# Start in conda base environment
echo "Activate base virtual environment"
conda activate base
# make sure conda is up to date
# echo "Updating conda..."
# conda update -n base conda -y
# check to see if mamba is installed in the base environment
if ! command -v mamba &> /dev/null
then
echo "Installing mamba into base environment..."
conda install mamba -n base -c conda-forge -y
echo "Done installing mamba."
else
echo "Mamba already installed."
fi
# Remove any existing pager environments
echo "Removing existing ${VENV} environment..."
conda remove -y --name $VENV --all
# define the list of packages
package_list='
beautifulsoup4
cartopy=0.17
cython
decorator
descartes
docutils
fiona
flake8
gdal
h5py
hypothesis
impactutils
jupyter
lxml
mapio
matplotlib<2.3
mock
nose
openpyxl
pandas
paramiko
pip
psutil
pycrypto
pyproj
pytables
pytest
pytest-cov
pytest-mpl
pyyaml
rasterio
rtree
scipy
shapely
sqlalchemy
sqlalchemy-utils
xlrd
xlwt'
# it seems now that some of the geospatial packages are more stable
# in the defaults channel, so let's set that as our preferred channel.
conda config --add channels 'conda-forge'
conda config --add channels 'defaults'
conda config --set channel_priority strict
# If the user has specified the -r (reset) flag, then create an
# environment based on only the named dependencies, without
# any versions of packages specified.
if [ $reset != 1 ]; then
echo "Installing PAGER from ${env_file}..."
conda env create -f $env_file
else
echo "Ignoring platform, letting conda sort out dependencies..."
# Create a conda virtual environment
echo "Creating the $VENV virtual environment:"
conda create -n $VENV python=3.7 -y
# activate the new environment so mamba knows where to install packages
echo "Activating ${VENV} environment..."
conda activate $VENV
# Use mamba to install packages
echo "Using mamba to solve dependencies and install packages..."
mamba install -y $package_list
fi
# Bail out at this point if the conda create command fails.
# Clean up zip files we've downloaded
if [ $? != 0 ]; then
echo "Failed to create conda environment. Resolve any conflicts, then try again."
exit
fi
# Activate the new environment
echo "Activating the $VENV virtual environment"
conda activate $VENV
# This package
echo "Installing ${VENV}..."
pip install -e .
# test pager, if we get an error about libffi, try to fix it
# this is a total hack, but the only way I can see to get around
# this weird result.
ffi_lib=libffi.so.7
pager_lib_dir=~/miniconda/envs/pager/lib
test_res=$(pager -h 2>&1)
echo $test_res | grep "${ffi_lib}"
if [ $? == 0 ]; then
echo "Issue finding library ${ffi_lib}. Trying to resolve..."
ffi_files=$(find ~/miniconda -name "libffi.so.7")
if [ -z "$ffi_files" ]; then
# this library is not found on the system
echo "Cannot find missing library ${ffi_lib}. Please attempt to sort out libffi library issue."
exit 1
fi
ffi_file=$(echo $ffi_files | head -1)
echo "Copying ${ffi_file} to ${pager_lib_dir}..."
cp $ffi_file $pager_lib_dir
pager -h 2>/dev/null
if [ $? != 0 ]; then
echo "pager is still broken. Please address this manually."
exit 1
fi
fi
# Install default profile
#python bin/sm_profile -c default -a
# Tell the user they have to activate this environment
echo "Type 'conda activate $VENV' to use this new virtual environment."
|
<filename>contracts/industry/aviation_sample_contract.0.6/eventAnalyticAdjustment.go
/*
Copyright (c) 2016 IBM Corporation and other Contributors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and limitations under the License.
Contributors:
<NAME> - Initial Contribution
*/
// v1 KL Aug 2016 Add analytics adjustment event
package main
import (
"fmt"
"github.com/hyperledger/fabric/core/chaincode/shim"
)
func eventAnalyticAdjustment(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {
event, err := getUnmarshalledArgument(stub, "eventAnalyticAdjustment", args)
if err != nil {
return nil, err
}
_, err = handleAssemblyAnalyticAdjustmentEvent(stub, event)
if err != nil {
return nil, err
}
return nil, nil
}
func handleAssemblyAnalyticAdjustmentEvent(stub shim.ChaincodeStubInterface, event interface{}) (interface{}, error) {
assetID, err := getEventAssetID("handleAssemblyAnalyticAdjustmentEvent", "analyticAdjustment", "analyticAdjustment.assembly", event)
if err != nil {
return nil, err
}
state, err := getUnmarshalledState(stub, "handleAssemblyAnalyticAdjustmentEvent", assetID)
if err != nil {
return nil, err
}
// adjust the life limit according to instructions
state, err = processAnalyticAdjustmentAction(stub, state, event, assetID)
if err != nil {
return nil, err
}
// state will be of type interface{} for use with crudUtils
state, err = addTXNTimestampToState(stub, "handleAssemblyAnalyticAdjustmentEvent", state)
if err != nil {
return nil, err
}
state = addLastEventToState(stub, "handleAssemblyAnalyticAdjustmentEvent", event, state, "")
state, err = handleAlertsAndRules(stub, "handleAssemblyAnalyticAdjustmentEvent", "analyticAdjustment", assetID, event, state)
if err != nil {
return nil, err
}
err = putMarshalledState(stub, "handleAssemblyAnalyticAdjustmentEvent", "analyticAdjustment", assetID, state)
if err != nil {
return nil, err
}
return state, nil
}
func processAnalyticAdjustmentAction(stub shim.ChaincodeStubInterface, state interface{}, event interface{}, assetID string) (interface{}, error) {
action, found := getObjectAsString(event, "analyticAdjustment.action")
if !found {
err := fmt.Errorf("processAnalyticAdjustmentAction: action property not found in event: %+v", event)
log.Error(err)
return nil, err
}
amount, found := getObjectAsNumber(event, "analyticAdjustment.amount")
if !found {
err := fmt.Errorf("processAnalyticAdjustmentAction: amount property not found in event: %+v", event)
log.Error(err)
return nil, err
}
// get the three counters and their adjusted variants
cycles, found := getObjectAsNumber(state, "cycles")
if !found {
err := fmt.Errorf("processAnalyticAdjustmentAction: cycles property not found in state, possible out of order event: %+v", state)
log.Error(err)
return nil, err
}
adjustedCycles, found := getObjectAsNumber(state, "adjustedCycles")
if !found {
// first adjustment
adjustedCycles = cycles
}
acc, found := getObjectAsNumber(state, "aCheckCounter")
if !found {
err := fmt.Errorf("processAnalyticAdjustmentAction: aCheckCounter property not found in state, possible out of order event: %+v", state)
log.Error(err)
return nil, err
}
acca, found := getObjectAsNumber(state, "aCheckCounterAdjusted")
if !found {
// first cycle count
acca = acc
}
bcc, found := getObjectAsNumber(state, "bCheckCounter")
if !found {
err := fmt.Errorf("processAnalyticAdjustmentAction: bCheckCounter property not found in state, possible out of order event: %+v", state)
log.Error(err)
return nil, err
}
bcca, found := getObjectAsNumber(state, "bCheckCounterAdjusted")
if !found {
// first cycle count
bcca = bcc
}
// adjust all of the adjusted variants
switch action {
case "adjustLifeLimit":
adjustedCycles += amount
acca += amount
bcca += amount
default:
err := fmt.Errorf("processAnalyticAdjustmentAction: unknown action property: %s", action)
log.Error(err)
return nil, err
}
// put all of the adjusted variants into the state
state, ok := putObject(state, "adjustedCycles", adjustedCycles)
if !ok {
err := fmt.Errorf("processAnalyticAdjustmentAction: adjustedCycles property could not be written into state: %+v", state)
log.Error(err)
return nil, err
}
state, ok = putObject(state, "aCheckCounterAdjusted", acca)
if !ok {
err := fmt.Errorf("processAnalyticAdjustmentAction: aCheckCounterAdjusted property could not be written into state: %+v", state)
log.Error(err)
return nil, err
}
state, ok = putObject(state, "bCheckCounterAdjusted", bcca)
if !ok {
err := fmt.Errorf("processAnalyticAdjustmentAction: bCheckCounterAdjusted property could not be written into state: %+v", state)
log.Error(err)
return nil, err
}
return state, nil
}
|
<reponame>city41/ardukoi
#include <Arduboy2.h>
#include "player.h"
#include "state.h"
#include "maskBitmaps.h"
#include "nonMaskBitmaps.h"
#include "strings.h"
#include "renderer.h"
#include "tileFloor.h"
#include "util.h"
#include "fishTemplates.h"
#include "enumUtils.h"
#include "world.h"
#include "boat.h"
#include "toast.h"
extern Renderer renderer;
extern Arduboy2Base arduboy;
const uint8_t PLAYER_VELOCITY = 2;
const uint8_t CAST_TIMEOUT = 10;
const uint8_t ANNOUNCE_FISH_COUNT = 100;
const uint8_t PROGMEM playerSpriteIndexAndMirror[] = {
// LEFT
0, 0,
// RIGHT
0, MIRROR_HORIZONTAL,
// UP
2, 0,
// DOWN
3, 0
};
void Player::reset() {
moveTo(STARTING_X, STARTING_Y);
currentBait = BaitType::Worm;
currentCollectionRow = 0;
Boat::currentDock = Dock::Beach;
Boat::x = BEACH_DOCK_X;
}
bool Player::isOnSolidTile() {
return !TileFloor::isWalkable(TileFloor::getTileAt(x + 8, y + 14));
}
/**
* The player has just started scanning,
* based on the way the player is facing, start the cursor
* in front of the player
*/
const int8_t PROGMEM cursorOffsets[] = {
// LEFT
-10, // x
8, // y
// RIGHT
18,
8,
// UP
4,
-8,
// DOWN
4,
18
};
void Player::placeCursorBasedOnDir() {
const int8_t cursorXOffset = pgm_read_byte(cursorOffsets + dir * 2);
const int8_t cursorYOffset = pgm_read_byte(cursorOffsets + dir * 2 + 1);
cursorX = x + cursorXOffset;
cursorY = y + cursorYOffset;
}
void Player::updateWalk(uint8_t frame) {
if (arduboy.pressed(A_BUTTON)) {
if (State::gameState.baitCounts[static_cast<int8_t>(currentBait)] > 0) {
placeCursorBasedOnDir();
currentUpdate = &Player::updateScanning;
currentRender = &Player::renderScanning;
return;
} else {
Toast::toast(noBait_string, 50);
}
}
if (arduboy.justPressed(B_BUTTON)) {
areYouSure = false;
menuRow = MenuRow::COLLECTION;
currentUpdate = &Player::updateMenu;
currentRender = &Player::renderMenu;
return;
}
int16_t newX = x;
int16_t newY = y;
if (arduboy.pressed(DOWN_BUTTON)) {
newY += PLAYER_VELOCITY;
}
if (arduboy.pressed(UP_BUTTON)) {
newY -= PLAYER_VELOCITY;
}
if (arduboy.pressed(LEFT_BUTTON)) {
newX -= PLAYER_VELOCITY;
}
if (arduboy.pressed(RIGHT_BUTTON)) {
newX += PLAYER_VELOCITY;
}
moveTo(newX, newY);
if (isOnSolidTile()) {
undoMove();
}
movedThisFrame = x != prevX || y != prevY;
}
void Player::renderWalk(uint8_t frame) {
const uint8_t* offset = playerSpriteIndexAndMirror + (dir * 2);
uint8_t spriteIndex = pgm_read_byte(offset);
MirrorMode mirror = (MirrorMode)pgm_read_byte(offset + 1);
if (movedThisFrame && ((frame / 10) % 2) == 0) {
if (dir == LEFT || dir == RIGHT) {
++spriteIndex;
} else {
mirror = MIRROR_HORIZONTAL;
}
}
renderer.drawPlusMask(x, y, player_plus_mask, spriteIndex, mirror);
}
void Player::updateMenu(uint8_t frame) {
if (arduboy.justPressed(B_BUTTON)) {
currentUpdate = &Player::updateWalk;
currentRender = &Player::renderWalk;
return;
}
if (arduboy.justPressed(DOWN_BUTTON)) {
menuRow = next(menuRow);
}
if (arduboy.justPressed(UP_BUTTON)) {
menuRow = prev(menuRow);
}
if (arduboy.justPressed(RIGHT_BUTTON)) {
currentBait = next(currentBait);
}
if (arduboy.justPressed(LEFT_BUTTON)) {
currentBait = prev(currentBait);
}
if (arduboy.justPressed(A_BUTTON)) {
switch (static_cast<MenuRow>(menuRow)) {
case MenuRow::COLLECTION:
currentUpdate = &Player::updateCollection;
currentRender = &Player::renderCollection;
break;
case MenuRow::SAVE:
State::saveToEEPROM();
Toast::toast(gameSaved_string, 90);
currentUpdate = &Player::updateWalk;
currentRender = &Player::renderWalk;
break;
case MenuRow::SFX:
Arduboy2Audio::toggle();
Arduboy2Audio::saveOnOff();
break;
case MenuRow::DELETE:
currentUpdate = &Player::updateAreYouSure;
currentRender = &Player::renderAreYouSure;
break;
}
}
}
void Player::renderMenu(uint8_t frame) {
renderer.pushTranslate(10, 0);
renderer.fillRect(0, 0, WIDTH / 2 - 20, HEIGHT, BLACK);
// bait
const uint8_t baitY = 4;
renderer.drawOverwrite(2, baitY, wormIcon_tiles, 0);
renderer.drawOverwrite(12, baitY, grub_tiles, 0);
renderer.drawOverwrite(22, baitY, shrimp_tiles, 0);
renderer.drawOverwrite(32, baitY, meat_tiles, 0);
// bait cursor
renderer.drawOverwrite(4 + static_cast<int8_t>(currentBait) * 10, baitY + 10, squareIcon_tiles, 0);
// menu items
const uint8_t startY = 24;
const uint8_t spacing = 7;
renderer.drawString(6, startY, collection_string);
renderer.drawString(6, startY + spacing * 1, save_string);
const uint8_t* sfxString = Arduboy2Audio::enabled() ? sfxOn_string : sfxOff_string;
renderer.drawString(6, startY + spacing * 2, sfxString);
renderer.drawString(6, startY + spacing * 3, delete_string);
renderer.drawOverwrite(1, startY + static_cast<int8_t>(menuRow) * spacing, squareIcon_tiles, 0);
// money
renderer.drawPlusMask(6, startY + spacing * 4 + 3, currencySymbol_plus_mask, 0);
renderer.drawNumber(12, startY + spacing * 4 + 4, State::gameState.money);
renderer.popTranslate();
}
void Player::updateAreYouSure(uint8_t frame) {
if (arduboy.justPressed(B_BUTTON)) {
currentUpdate = &Player::updateWalk;
currentRender = &Player::renderWalk;
return;
}
if (arduboy.justPressed(DOWN_BUTTON)) {
areYouSure = true;
}
if (arduboy.justPressed(UP_BUTTON)) {
areYouSure = false;
}
if (arduboy.justPressed(A_BUTTON)) {
if (areYouSure) {
State::clearEEPROM();
State::load();
reset();
currentUpdate = &Player::updateWalk;
currentRender = &Player::renderWalk;
} else {
currentUpdate = &Player::updateWalk;
currentRender = &Player::renderWalk;
}
}
}
void Player::renderAreYouSure(uint8_t frame) {
renderer.pushTranslate(10, 0);
renderer.fillRect(0, 0, WIDTH / 2 - 20, HEIGHT, BLACK);
renderer.drawString(6, 20, really_string);
renderer.drawString(6, 26, delete_string);
renderer.drawString(6, 32, save_string);
renderer.drawOverwrite(27, 32, questionMark_tiles, 0);
renderer.drawString(10, 44, no_string);
renderer.drawString(10, 50, yes_string);
renderer.drawOverwrite(6, 44 + static_cast<int8_t>(areYouSure) * 6, squareIcon_tiles, 0);
renderer.popTranslate();
}
void Player::updateCollection(uint8_t frame) {
if (arduboy.justPressed(DOWN_BUTTON)) {
currentCollectionRow = min(static_cast<int8_t>(FishType::COUNT) - 2, currentCollectionRow + 1);
Sfx::menuTick();
}
if (arduboy.justPressed(UP_BUTTON)) {
currentCollectionRow = max(0, currentCollectionRow - 1);
Sfx::menuTick();
}
if (arduboy.justPressed(RIGHT_BUTTON) || arduboy.justPressed(LEFT_BUTTON)) {
currentCollectionColumn = next(currentCollectionColumn);
Sfx::menuTick();
}
if (arduboy.justPressed(B_BUTTON)) {
currentUpdate = &Player::updateWalk;
currentRender = &Player::renderWalk;
}
}
void Player::renderCollection(uint8_t frame) {
renderer.pushTranslate(0, 0);
renderer.fillRect(18, 10, WIDTH - 36, HEIGHT - 5, BLACK);
const uint8_t* headerStr = currentCollectionColumn == CollectionColumn::Quantity ? quantity_string : length_string;
renderer.drawString(22, 12, headerStr);
const uint8_t spacing = 15;
const uint8_t startY = 17;
Fish fish;
for (uint8_t f = currentCollectionRow; f < static_cast<int8_t>(FishType::COUNT) && f < currentCollectionRow + 3; ++f) {
uint8_t offset = f - currentCollectionRow;
renderer.drawNumber(22, startY + spacing * offset + 7, f + 1);
if (State::gameState.acquiredFish[f]) {
const uint8_t* fishString = static_cast<const uint8_t*>(pgm_read_ptr(fish_templates_16t + f * NUM_16T_PROPS + 2));
const uint8_t* fishBmp = static_cast<const uint8_t*>(pgm_read_ptr(fish_templates_16t + f * NUM_16T_PROPS + 3));
const int16_t* numArray = (
currentCollectionColumn == CollectionColumn::Quantity ? State::gameState.currentFishCount
: State::gameState.bestLength
);
Fish::loadFish(static_cast<FishType>(f), fish);
// show a checkmark if the user has caught the biggest possible fish
if (currentCollectionColumn == CollectionColumn::Length && fish.type != FishType::OLD_BOOT) {
if (numArray[f] >= fish.maxLength) {
renderer.drawString(88, startY + spacing * offset + 7, checkmark_string);
}
}
renderer.drawOverwrite(44, startY + spacing * offset, fishBmp, 0);
renderer.drawString(44, startY + spacing * offset + 9, fishString);
if (currentCollectionColumn != CollectionColumn::Length || fish.type != FishType::OLD_BOOT) {
renderer.drawNumber(92, startY + spacing * offset + 9, numArray[f]);
}
} else {
renderer.drawOverwrite(44, startY + spacing * offset + 7, questionMark_tiles, 0);
}
}
if (currentCollectionRow > 0) {
renderer.drawPlusMask(WIDTH - 26, startY + 2, arrow_plus_mask, 1);
}
if (currentCollectionRow < static_cast<int8_t>(FishType::COUNT) - 2) {
renderer.drawPlusMask(WIDTH - 26, HEIGHT - 4, arrow_plus_mask, 0);
}
}
void Player::updateScanning(uint8_t frame) {
if (!arduboy.pressed(A_BUTTON)) {
TileDef tile = TileFloor::getTileAt(cursorX + 4, cursorY + 4);
if (TileFloor::isFishable(tile)) {
castCount = 0;
State::gameState.baitCounts[static_cast<int8_t>(currentBait)] -= 1;
dir = determineDirection(x, y, cursorX, cursorY, dir);
currentUpdate = &Player::updateCast;
currentRender = &Player::renderCast;
} else {
currentUpdate = &Player::updateWalk;
currentRender = &Player::renderWalk;
}
return;
}
if (arduboy.pressed(DOWN_BUTTON)) {
cursorY += PLAYER_VELOCITY;
}
if (arduboy.pressed(UP_BUTTON)) {
cursorY -= PLAYER_VELOCITY;
}
if (arduboy.pressed(LEFT_BUTTON)) {
cursorX -= PLAYER_VELOCITY;
}
if (arduboy.pressed(RIGHT_BUTTON)) {
cursorX += PLAYER_VELOCITY;
}
// if the player only has the beginner pole, it has a limited casting range
// the pro pole can cast anywhere on the screen
// why? only pro pole can cast into deep water
if (!State::gameState.hasProPole) {
cursorY = min(max(cursorY, y - 16), y + 24);
cursorX = min(max(cursorX, x - 16), x + 24);
}
// make sure cursor doesn't leave the map
cursorX = max(min(MAP_WIDTH_PX - 16, cursorX), 0);
cursorY = max(min(MAP_HEIGHT_PX - 8, cursorY), 0);
}
void Player::renderScanning(uint8_t frame) {
uint8_t spriteIndex;
uint8_t poleIndex = 0;
MirrorMode mirror = NO_MIRROR;
MirrorMode poleMirror = NO_MIRROR;
int16_t poleX;
int16_t poleY;
// these switch statements use less space than PROGMEM arrays :(
switch (dir) {
case LEFT:
spriteIndex = 4;
poleY = y - 4;
poleX = x + 14;
break;
case RIGHT:
spriteIndex = 4;
mirror = MIRROR_HORIZONTAL;
poleMirror = MIRROR_HORIZONTAL;
poleY = y - 4;
poleX = x - 12;
break;
case UP:
spriteIndex = 6;
poleIndex = 2;
poleMirror = MIRROR_VERTICAL;
poleY = y + 12;
poleX = x + 1;
break;
case DOWN:
spriteIndex = 3;
poleIndex = 2;
poleY = y - 12;
poleX = x;
mirror = MIRROR_HORIZONTAL;
poleMirror = MIRROR_HORIZONTAL;
break;
}
renderer.drawPlusMask(x, y, player_plus_mask, spriteIndex, mirror);
renderer.drawPlusMask(poleX, poleY, fishingPole_plus_mask, poleIndex, poleMirror);
renderer.drawOverwrite(cursorX, cursorY, cursor_tiles, 0, 0);
}
void Player::updateCast(uint8_t frame) {
if (arduboy.justPressed(B_BUTTON)) {
currentUpdate = &Player::updateWalk;
currentRender = &Player::renderWalk;
return;
}
if (frame != 60) {
return;
}
castCount += 1;
if (castCount == CAST_TIMEOUT) {
currentUpdate = &Player::updateWalk;
currentRender = &Player::renderWalk;
return;
}
TileDef tileBeingFished = TileFloor::getTileAt(cursorX + 4, cursorY + 4);
FishType fishType = getFishThatBit(tileBeingFished == TileDef::PondMiddleDeep);
LOGV(static_cast<int8_t>(fishType));
if (fishType != FishType::UNSET) {
Fish::loadFish(fishType, currentFish);
reelLevel = WIDTH / 2;
if (State::gameState.useProMode) {
inProWindowCount = 0;
proReelTime = 0;
currentUpdate = &Player::updateReelProMode;
currentRender = &Player::renderReelProMode;
} else {
currentUpdate = &Player::updateReel;
currentRender = &Player::renderReel;
}
}
}
uint8_t Player::getPointsForFish(Fish& fish, bool isDeepWater) {
uint8_t hour = State::getCurrentHour();
if (isDeepWater != fish.deepWater) {
LOG("fish is not deep water");
return 0;
}
if (hour < fish.minHour || hour > fish.maxHour) {
LOG("hour out of range");
return 0;
}
LOGV(cursorX);
if (cursorX < fish.minX || cursorX > fish.maxX) {
LOG("x out of range");
return 0;
}
if (fish.baitPreferences[static_cast<uint8_t>(currentBait)] == 0) {
LOG("fish doesnt like the bait");
return 0;
}
return fish.ratio;
}
FishType Player::getFishThatBit(bool isDeepWater) {
Fish fish;
uint8_t diceRollIndex = 0;
uint8_t maxPoints = 0;
uint8_t candidateFishes = 0;
for (uint8_t f = 0; f < static_cast<int8_t>(FishType::COUNT); ++f) {
FishType fishType = static_cast<FishType>(f);
Fish::loadFish(fishType, fish);
uint8_t points = getPointsForFish(fish, isDeepWater);
LOGV(points);
if (points > 0) {
maxPoints += points;
candidateFishes += 1;
fishDiceRoll[diceRollIndex].type = fishType;
fishDiceRoll[diceRollIndex].points = points;
diceRollIndex += 1;
}
}
if (maxPoints == 0) {
return FishType::UNSET;
}
uint8_t roll = random(0, maxPoints + 30);
if (roll >= maxPoints && roll < maxPoints + 29) {
return FishType::UNSET;
}
if (roll >= maxPoints) {
return FishType::OLD_BOOT;
}
uint8_t currentPoints = 0;
for (uint8_t fr = 0; fr < candidateFishes; ++fr) {
if (roll >= currentPoints && roll < currentPoints + fishDiceRoll[fr].points) {
return fishDiceRoll[fr].type;
}
currentPoints += fishDiceRoll[fr].points;
}
}
void Player::renderCast(uint8_t frame) {
uint8_t spriteIndex;
uint8_t poleIndex = 0;
MirrorMode playerMirror = NO_MIRROR;
MirrorMode poleMirror = NO_MIRROR;
int16_t poleX = -200;
int16_t poleY = -200;
switch (dir) {
case LEFT:
spriteIndex = 5;
poleY = y;
poleX = x - 10;
poleMirror = MIRROR_HORIZONTAL;
break;
case RIGHT:
spriteIndex = 5;
playerMirror = MIRROR_HORIZONTAL;
poleY = y;
poleX = x + 10;
break;
case UP:
spriteIndex = 6;
poleIndex = 2;
poleMirror = MIRROR_HORIZONTAL;
poleY = y - 10;
poleX = x + 6;
break;
case DOWN:
spriteIndex = 3;
poleIndex = 2;
poleMirror = MIRROR_VERTICAL;
poleY = y + 10;
poleX = x - 1;
break;
}
renderer.drawPlusMask(poleX, poleY, fishingPole_plus_mask, poleIndex, poleMirror);
renderer.drawPlusMask(x, y, player_plus_mask, spriteIndex, playerMirror);
renderer.drawOverwrite(cursorX, cursorY, bobber_tiles, static_cast<uint8_t>(frame > 30), 0, Xor);
}
void Player::updateReel(uint8_t frame) {
if (frame % 30 == 0) {
reelLevel = max(reelLevel - currentFish.pull, 0);
}
if (arduboy.justPressed(A_BUTTON)) {
Sfx::menuTick();
uint8_t playerPull = State::gameState.hasProPole ? 15 : 10;
reelLevel = min(WIDTH - 2, reelLevel + playerPull);
}
if (reelLevel == 0) {
currentUpdate = &Player::updateWalk;
currentRender = &Player::renderWalk;
} else if (reelLevel == WIDTH - 2) {
State::setFishAcquired(currentFish.type);
State::incrementCurrentCount(currentFish.type);
currentFish.rollForLength();
State::setFishLength(currentFish);
announceFishCount = ANNOUNCE_FISH_COUNT;
if (currentFish.type == FishType::OLD_BOOT) {
Sfx::buzz();
} else {
Sfx::gotFish();
}
currentUpdate = &Player::updateGetFish;
currentRender = &Player::renderGetFish;
}
}
void Player::renderReel(uint8_t frame) {
uint8_t spriteIndex;
uint8_t poleIndex = 1;
MirrorMode playerMirror = NO_MIRROR;
MirrorMode poleMirror = NO_MIRROR;
int16_t poleX;
int16_t poleY;
switch (dir) {
case LEFT:
spriteIndex = 5;
poleY = y;
poleX = x - 11;
poleMirror = MIRROR_HORIZONTAL;
if (frame > 50) {
spriteIndex = 7;
poleX += 1;
}
break;
case RIGHT:
spriteIndex = 5;
playerMirror = MIRROR_HORIZONTAL;
poleY = y;
poleX = x + 13;
if (frame > 50) {
spriteIndex = 7;
poleX -= 1;
}
break;
case UP:
spriteIndex = 6;
poleIndex = 2;
poleMirror = MIRROR_HORIZONTAL;
poleY = y - 10;
poleX = x + 5;
if (frame > 50) {
poleX += 1;
}
break;
case DOWN:
spriteIndex = 3;
poleIndex = 2;
poleMirror = MIRROR_VERTICAL;
poleY = y + 10;
poleX = x - 1;
if (frame > 50) {
poleX += 1;
}
break;
}
renderer.drawPlusMask(poleX, poleY, fishingPole_plus_mask, poleIndex, poleMirror);
renderer.drawPlusMask(x, y, player_plus_mask, spriteIndex, playerMirror);
renderer.pushTranslate(0, 0);
// black background to serve as the frame
renderer.fillRect(29, HEIGHT - 12, WIDTH - 58, 8, BLACK);
// white background to serve as the empty part
renderer.fillRect(30, HEIGHT - 11, WIDTH - 60, 6, WHITE);
// black progress bar at the current reel level
renderer.fillRect(32, HEIGHT - 10, reelLevel / 2, 4, BLACK);
renderer.popTranslate();
}
void Player::updateReelProMode(uint8_t frame) {
if (frame % 20 == 0) {
proReelTime = min(proReelTime + 1, 30);
reelLevel = max(reelLevel - currentFish.pull - proReelTime, 0);
}
if (arduboy.pressed(A_BUTTON)) {
Sfx::menuTick();
reelLevel = min(WIDTH - 2, reelLevel + 2);
}
if (reelLevel == 0) {
currentUpdate = &Player::updateWalk;
currentRender = &Player::renderWalk;
Sfx::buzz();
} else if (reelLevel >= currentFish.proWindow && reelLevel <= currentFish.proWindow + 24) {
inProWindowCount += 1;
if (inProWindowCount == 96) {
State::setFishAcquired(currentFish.type);
State::incrementCurrentCount(currentFish.type);
currentFish.rollForLength();
State::setFishLength(currentFish);
announceFishCount = ANNOUNCE_FISH_COUNT;
if (currentFish.type == FishType::OLD_BOOT) {
Sfx::buzz();
} else {
Sfx::gotFish();
}
currentUpdate = &Player::updateGetFish;
currentRender = &Player::renderGetFish;
}
} else {
inProWindowCount = 0;
}
}
void Player::renderReelProMode(uint8_t frame) {
renderReel(frame);
renderer.pushTranslate(0, 0);
renderer.fillRect(
32 + currentFish.proWindow / 2,
HEIGHT - 14,
12,
2,
frame & 1 ? WHITE : BLACK
);
renderer.fillRect(
32 + currentFish.proWindow / 2,
HEIGHT - 14,
inProWindowCount / 8,
2,
State::isDay() ? BLACK : WHITE
);
renderer.popTranslate();
}
void Player::updateGetFish(uint8_t frame) {
announceFishCount -= 1;
if (announceFishCount == 0) {
currentUpdate = &Player::updateWalk;
currentRender = &Player::renderWalk;
}
}
const uint8_t GET_FISH_FRAME_WIDTH = 48;
const uint8_t GET_FISH_FRAME_HEIGHT = 54;
void Player::renderGetFish(uint8_t frame) {
renderer.fillRect(x - 13, y - 12, GET_FISH_FRAME_WIDTH, GET_FISH_FRAME_HEIGHT, BLACK);
renderer.fillRect(x - 12, y, GET_FISH_FRAME_WIDTH - 2, 17, WHITE);
renderer.drawOverwrite(
x - 13 + (GET_FISH_FRAME_WIDTH / 2 - currentFish.bmpWidth / 2),
y - 10,
currentFish.bmp,
0
);
// TODO: make this an Animation
uint8_t spriteIndex = (announceFishCount > ANNOUNCE_FISH_COUNT - 20 || announceFishCount < 30) ? 8 : currentFish.type == FishType::OLD_BOOT ? 10 : 9;
renderer.drawPlusMask(x, y, player_plus_mask, spriteIndex);
renderer.drawString(
x - 13 + (GET_FISH_FRAME_WIDTH / 2 - currentFish.nameLength * 5 / 2),
y + 20,
currentFish.nameString
);
if (currentFish.type != FishType::OLD_BOOT) {
// draw the size of the fish, centered under its name
// a bit fiddly since the length of the fish can vary
// how many characters is the fishes length plus the "cm" label?
int8_t lengthChars = currentFish.length > 99 ? 5 : currentFish.length > 9 ? 4 : 3;
int16_t lengthStartX = x - 13 + (GET_FISH_FRAME_WIDTH / 2 - lengthChars * 5 / 2);
renderer.drawNumber(
lengthStartX,
y + 26,
currentFish.length
);
renderer.drawString(
lengthStartX + lengthChars * (5 - 2) + 1,
y + 26,
cm_string
);
if (currentFish.length == currentFish.maxLength) {
renderer.drawString(
lengthStartX + lengthChars * 5 + 6,
y + 26,
checkmark_string
);
}
}
}
void Player::update(uint8_t frame) {
(this->*currentUpdate)(frame);
}
void Player::render(uint8_t frame) {
(this->*currentRender)(frame);
}
void Player::onGetWorm(Worm& worm) {
if (worm.isSpawned && State::gameState.baitCounts[0] < 254) {
State::gameState.baitCounts[0] +=1;
Sfx::purchase();
}
}
Direction Player::determineDirection(int16_t px, int16_t py, int16_t x, int16_t y, Direction prevDir) {
if (px == x && py == y) {
return prevDir;
}
int16_t diffX = x - px;
int16_t diffY = y - py;
if (abs(diffX) > abs(diffY)) {
if (diffX < 0) {
return LEFT;
} else {
return RIGHT;
}
} else {
if (diffY < 0) {
return UP;
} else {
return DOWN;
}
}
}
void Player::moveTo(int16_t newX, int16_t newY, boolean resetPrev) {
if (resetPrev) {
prevX = newX;
prevY = newY;
} else if (prevX != x || prevY != y) {
prevX = x;
prevY = y;
}
x = newX;
y = newY;
dir = determineDirection(prevX, prevY, x, y, dir);
}
void Player::undoMove() {
x = prevX;
y = prevY;
}
|
#!/bin/bash
source "${BASH_SOURCE%/*}/helpers.bash"
function pbfeanup {
set +e
del_pbforwarders 1
del_veths 2
}
trap pbfeanup EXIT
set -x
set -e
# setup
create_veth 2
add_pbforwarders 1
pbforwarder_add_port pbf1 veth1
pbforwarder_add_port pbf1 veth2
# Add a bunch of not meaningful rules
pbforwarder_add_rules_l3 0 33 pbf1
pbforwarder_add_rules_l3 33 66 pbf1
# Get the MAC addresses of the namespaces
veth1_mac=`LANG=C sudo ip netns exec ns1 ifconfig -a | grep -Po 'ether \K[a-fA-F0-9:]{17}|[a-fA-F0-9]{12}$'`
veth2_mac=`LANG=C sudo ip netns exec ns2 ifconfig -a | grep -Po 'ether \K[a-fA-F0-9:]{17}|[a-fA-F0-9]{12}$'`
# This should be matched on Echo Request
polycubectl pbforwarder pbf1 rules add 66 src_ip=10.0.0.1 dst_ip=10.0.0.2 action=FORWARD out_port=veth2
# This should be matched on Echo Reply
polycubectl pbforwarder pbf1 rules add 67 src_ip=10.0.0.2 dst_ip=10.0.0.1 action=FORWARD out_port=veth1
# Allows ns1->ns2 ARP Request
polycubectl pbforwarder pbf1 rules add 68 dst_mac=FF:FF:FF:FF:FF:FF action=FORWARD out_port=veth2
# Allows ns2->ns1 ARP Response
polycubectl pbforwarder pbf1 rules add 69 dst_mac=$veth1_mac action=FORWARD out_port=veth1
sudo ip netns exec ns1 ping 10.0.0.2 -c 2
|
#!/bin/bash
mkdir -p ${PREFIX}/x86_64-conda_cos6-linux-gnu/sysroot
mkdir -p ${PREFIX}/x86_64-conda-linux-gnu/sysroot
if [[ -d usr/lib ]]; then
if [[ ! -d lib ]]; then
ln -s usr/lib lib
fi
fi
if [[ -d usr/lib64 ]]; then
if [[ ! -d lib64 ]]; then
ln -s usr/lib64 lib64
fi
fi
pushd ${PREFIX}/x86_64-conda_cos6-linux-gnu/sysroot > /dev/null 2>&1
cp -Rf "${SRC_DIR}"/binary/* .
popd
pushd ${PREFIX}/x86_64-conda-linux-gnu/sysroot > /dev/null 2>&1
cp -Rf "${SRC_DIR}"/binary/* .
popd
|
package space.pxls;
public enum BoardType {
BOARD, VIRGIN, HEAT
}
|
<gh_stars>1-10
import * as path from "path";
const Module = require("module");
const originalRequire = Module.prototype.require;
const oRegisteredCommands: any = {};
const outputChannel = { show: () => "", append: () => "" };
export const testVscode: any = {
window: {
showWarningMessage: (message: string) => message,
showErrorMessage: (message: string) => message,
showInformationMessage: (message: string) => message,
showQuickPick: () => Promise.resolve(),
createOutputChannel: () => outputChannel
},
workspace: {
findFiles: () => {
Promise.resolve([]);
}
},
commands: {
registerCommand: (id: string, cmd: any) => {
oRegisteredCommands[id] = cmd;
return Promise.resolve(oRegisteredCommands);
},
executeCommand: () => Promise.resolve(),
getCommands: () => Promise.resolve()
},
tasks: {
executeTask: () => Promise.resolve()
},
ShellExecution: class MockShellExecution {
public MockShellExecution: (command: string, options?: string) => void;
},
Task: class MockTask {},
TaskScope: { Workspace: true }
};
export function mockVscode(testModulePath?: string) {
clearModuleCache(testModulePath);
Module.prototype.require = function(request: any) {
if (request === "vscode") {
return testVscode;
}
return originalRequire.apply(this, arguments);
};
}
export function clearModuleCache(testModulePath?: string) {
if (testModulePath) {
const key = path.resolve(testModulePath);
if (require.cache[key]) {
delete require.cache[key];
}
}
}
|
The algorithm for finding the maximum sum in a given array is as follows:
Step 1: Initialize two variables 'max_so_far' and 'max_ending_here' with 0.
Step 2: Iterate through the given array.
Step 3: Calculate 'max_ending_here' by taking the maximum of the current element and the sum of the current element and the previous value of 'max_ending_here'.
Step 4: Update the value of 'max_so_far' only when max_ending_here is greater than 'max_so_far'.
Step 5: After the loop, 'max_so_far' will hold the maximum sum in the given array.
In the above example array [1, -3, 2, 5, -7, 6], after step 4, max_so_far will be 8. This is the maximum sum in the given array. |
TRANSLATION_LOCALE="esES"
TRANSLATION_ENCODING="UTF8"
MSG_LANGUAGE="Español"
MSG_DOWNLOADING="Descargando todo lo que necesitamos"
MSG_INSTALLING="Instalando"
MSG_INSTALLING_FLASH="Instalando Flash Player 9"
MSG_INSTALLATION_OPTIONS="IEs4Linux hará:"
MSG_OPTION_INSTALL_IES="Instalar Internet Explorers:"
MSG_OPTION_INSTALL_FLASH="Instalar Adobe Flash 9.0"
MSG_OPTION_CREATE_ICONS="Crear iconos en el escritorio"
MSG_OPTION_BASEDIR="Instalar todo en:"
MSG_OPTION_DOWNLOADDIR="Descargar todo a:"
MSG_OPTION_LOCALE="Usando el idioma de IE:"
MSG_INITIALIZING="Iniciando"
MSG_CREATING_PREFIX="Creando el Prefijo de Wine"
MSG_EXTRACTING_CABS="Extrayendo archivos CAB"
MSG_INSTALLING_FONTS="Instalando Fuentes TTF"
MSG_INSTALLING_REGISTRY="Instalando el registro"
MSG_FINALIZING="Finalizando"
MSG_COPYING_IE6="Copiando la instalación de ie6"
MSG_EXTRACTING_FILES="Extrayendo archivos"
MSG_PROCESSING_INF="Procesando el archivo inf"
MSG_PERFORM_INSTALLATIONS="Efectuando Instalaciones"
MSG_INSTALLING_FLASH_ON="Instalando flash"
MSG_ERROR_INSTALL_WINE="¡Necesita instalar Wine! \nDescarguelo desde: http://www.winehq.org"
MSG_ERROR_INSTALL_CABEXTRACT="¡Necesita instalar 'cabextract'! \nDescarguelo desde : http://www.kyz.uklinux.net/cabextract.php"
MSG_ERROR_UPDATE_CABEXTRACT="¡Necesita actualizar 'cabextract'! \nDescarguelo de: http://www.kyz.uklinux.net/cabextract.php"
MSG_ERROR_INVALIDLOCALE="¡Idioma inválido! Por favor ejecute IEs4Linux nuevamente y elija una opción correcta."
MSG_WARNING_ROOT="¡Usted es root! ¡Eso está totalmente desaconsejado! IE es demasiado inseguro para darle permisos de administrador.\n¿Desea un consejo de amigo? Ejecute IEs4Linux como usuario normal, o, lo que es mejor, cree un usuario separado para administrar sus IEs."
MSG_ERROR_INSTALL_WGET="¡Necesita instalar primero 'wget'!"
MSG_ERROR_INSTALL_UNZIP="¡Necesita instalar 'unzip' primero!"
MSG_WARNING_OLDWINE="IEs4Linux esta pensado para ser usado con versiones recientes de Wine (0.9.x). Parece que esta usando una versión antigua ($(wine --version)). Es recomendable que actualice a la ultima versión de Wine (Vaya a: winehq.com)."
MSG_ERROR_NO_WINEPREFIXCREATE="Tu Wine no tiene wineprefixcreate instalado. Tal vez este usando una versión antigua de Wine. Pruebe a actualizar a la ultima versión."
MSG_ERROR_CABEXTRACTING="Se produjo un error al intentar extraer algunos archivos."
MSG_ERROR_CREATE_FOLDER="No se pudo crear el directorio"
MSG_ERROR_DOWNLOADING="Se produjo un error al descargar. Por favor, ejecute IEs4Linux de nuevo. Archivo corrupto:"
MSG_ERROR_NO_GUI_AVAILABLE="No hay interfaz grafica disponible. Use IEs4Linux en linea de comandos o instale 'pygtk'. Para mas detalle: http://www.tatanka.com.br/ies4linux/page/No_GUI"
MSG_INSTALLATIONS_FINISHED="¡Las instalaciones de IEs 4 Linux han finalizado!"
MSG_RUN_IES="Para ejecutar sus IEs, escriba:"
GUI_TITLE="Internet Explorer para Linux"
GUI_INSTALLATION_OPTIONS="Opciones de instalación"
GUI_IE="Internet Explorer"
GUI_EXTRA="Extra"
GUI_INSTALL_IE6="Instalar Internet Explorer 6.0 SP1"
GUI_INSTALL_IE55="Instalar Internet Explorer 5.5"
GUI_INSTALL_IE5="Instalar Internet Explorer 5.5"
GUI_INSTALL_FLASH="Instalar Adobe Flash player 9"
GUI_CREATE_ICONS="Crear iconos en el escritorio"
GUI_LOCALE="Idioma"
GUI_ADVANCED_OPTIONS="Opciones avanzadas"
GUI_ADVANCED_BASEDIR="Directorio base"
GUI_ADVANCED_BINDIR="Directorio de ejecutables"
GUI_ADVANCED_DOWNLOADDIR="Directorio de descargas"
GUI_ADVANCED_WGETFLAGS="Parámetros para Wget"
GUI_CANCEL_INSTALL="Instalación cancelada por el usuario."
GUI_OK="Aceptar"
GUI_CANCEL="Cancelar"
GUI_CLOSE="Cerrar"
|
<reponame>pmuellr/npmls2dg<gh_stars>1-10
#!/usr/bin/env node
'use strict'
exports.cli = cli
exports.convert = convert
const Graphviz = require('viz.js')
const Logger = require('./lib/logger').getLogger()
// run as cli
function cli () {
require('./lib/cli').run()
}
// convert npm ls --json output to a Graphviz graph
function convert (input, opts) {
let ls
opts = opts || {}
opts.format = opts.format || 'svg'
opts.messages = opts.messages || []
try {
ls = JSON.parse(input)
} catch (err) {
opts.messages.push(`error parsing JSON input: ${err}`)
return null
}
ls.name = ls.name || '[anonymous]'
ls.from = '.'
const pkgs = {}
flattenLs(ls, ls.name, opts, pkgs)
Logger.debug(`flattened pkgs: ${JSON.stringify(pkgs, null, 4)}`)
const dot = generateGraphviz(pkgs, opts)
if (opts.format === 'dot') return dot
const gvOpts = {
totalMemory: 65536 * 256 * 2 // 33,554,432
}
const svg = Graphviz(dot, gvOpts)
return svg
}
// flatten the npm ls output to object with properties of:
// key: name@version
// value: {
// id: string,
// name: string,
// version: string
// from: string[] (removing prefix)
// resolved: string
// dependencies: key[]
// }
function flattenLs (ls, name, opts, pkgs) {
const version = ls.version || 'unknown'
const from = ls.from == null ? '???' : ls.from.replace(/.*?@/, '')
const key = `${name}@${version}`
let val = pkgs[key]
// if already in pkgs output, add from, then return
if (val != null) {
if (val.from.indexOf(from) === -1) {
val.from.push(from)
}
return val
}
// otherwise create new output value
val = {
id: `${name}@${version}`,
name: name,
version: version,
from: [from],
resolved: ls.resolved,
dependencies: []
}
// flatten deps
if (ls.dependencies) {
for (let pkgName in ls.dependencies) {
const dep = flattenLs(ls.dependencies[pkgName], pkgName, opts, pkgs)
val.dependencies.push(dep.id)
}
}
pkgs[key] = val
return val
}
// generate the graphviz output
// key: name@version
// value: {
// id: string,
// name: string,
// version: string
// from: string[] (removing prefix)
// resolved: string
// dependencies: key[]
// }
function generateGraphviz (pkgs, opts) {
const out = []
out.push('digraph g {')
out.push(' graph [')
out.push(' rankdir = "LR"')
out.push(' ];')
for (let pkgKey in pkgs) {
const pkg = pkgs[pkgKey]
Logger.debug(`processing pkg: ${JSON.stringify(pkg, null, 4)}`)
out.push(` "${pkg.id}" [`)
out.push(' shape = "none"')
const href = `href="https://npmjs.org/package/${pkg.name}"`
const tip = `title="resolved: ${pkg.resolved}"`
const attrs = `align="left" border="1" ${href} ${tip}`
const thAttrs = `${attrs} bgcolor="cyan"`
const tdAttrs = `${attrs} bgcolor="green"`
const label = []
label.push('<table border="0" cellspacing="0" cellpadding="5">')
label.push(`<tr><td ${thAttrs}><font point-size="24">${pkg.name}</font></td></tr>`)
label.push(`<tr><td ${tdAttrs}>${pkg.version}</td></tr>`)
for (let versionRange of pkg.from) {
versionRange = versionRange
.replace(/</g, '<')
.replace(/>/g, '>')
label.push(`<tr><td ${tdAttrs}>${versionRange}</td></tr>`)
}
label.push('</table>')
out.push(` label = <${label.join('\n')}>`)
out.push(' ];')
}
for (let pkgKey in pkgs) {
const pkg = pkgs[pkgKey]
for (let dependency of pkg.dependencies) {
const edge = `"${pkg.id}" -> "${dependency}";`
out.push(` ${edge}`)
}
}
out.push('}')
return out.join('\n')
}
// run cli if invoked as main module
if (require.main === module) cli()
|
#!/bin/bash
# ========== Experiment Seq. Idx. 691 / 34.6.2 / N. 53/0/0 - _S=34.6.2 D1_N=53 a=-1 b=1 c=-1 d=1 e=-1 f=1 D3_N=0 g=-1 h=-1 i=-1 D4_N=0 j=0 ==========
set -u
# Prints header
echo -e '\n\n========== Experiment Seq. Idx. 691 / 34.6.2 / N. 53/0/0 - _S=34.6.2 D1_N=53 a=-1 b=1 c=-1 d=1 e=-1 f=1 D3_N=0 g=-1 h=-1 i=-1 D4_N=0 j=0 ==========\n\n'
if [[ "No" == "Yes" ]]; then
echo 'FATAL: This treatment included an SVM layer.'>&2
echo ' Something very wrong happened!'>&2
exit 161
fi
# Prepares all environment variables
JBHI_DIR="$HOME/jbhi-special-issue"
DATASET_DIR="$JBHI_DIR/data/fulltrain-seg.305.tfr"
MODEL_DIR="$JBHI_DIR/models/deep.53"
RESULTS_DIR="$JBHI_DIR/results"
RESULTS_PREFIX="$RESULTS_DIR/deep.53.layer.0.test.0.index.1862.nosvm"
RESULTS_PATH="$RESULTS_PREFIX.results.txt"
# ...variables expected by jbhi-checks.include.sh and jbhi-footer.include.sh
SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODEL_DIR/finish.txt"
START_PATH="$RESULTS_PREFIX.start.txt"
FINISH_PATH="$RESULTS_PREFIX.finish.txt"
LOCK_PATH="$RESULTS_PREFIX.running.lock"
LAST_OUTPUT="$RESULTS_PATH"
# EXPERIMENT_STATUS=1
# STARTED_BEFORE=No
mkdir -p "$RESULTS_DIR"
#
# Assumes that the following environment variables where initialized
# SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
# LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODELS_DIR/finish.txt:"
# START_PATH="$OUTPUT_DIR/start.txt"
# FINISH_PATH="$OUTPUT_DIR/finish.txt"
# LOCK_PATH="$OUTPUT_DIR/running.lock"
# LAST_OUTPUT="$MODEL_DIR/[[[:D1_MAX_NUMBER_OF_STEPS:]]].meta"
EXPERIMENT_STATUS=1
STARTED_BEFORE=No
# Checks if code is stable, otherwise alerts scheduler
pushd "$SOURCES_GIT_DIR" >/dev/null
GIT_STATUS=$(git status --porcelain)
GIT_COMMIT=$(git log | head -n 1)
popd >/dev/null
if [ "$GIT_STATUS" != "" ]; then
echo 'FATAL: there are uncommitted changes in your git sources file' >&2
echo ' for reproducibility, experiments only run on committed changes' >&2
echo >&2
echo ' Git status returned:'>&2
echo "$GIT_STATUS" >&2
exit 162
fi
# The experiment is already finished - exits with special code so scheduler won't retry
if [[ "$FINISH_PATH" != "-" ]]; then
if [[ -e "$FINISH_PATH" ]]; then
echo 'INFO: this experiment has already finished' >&2
exit 163
fi
fi
# The experiment is not ready to run due to dependencies - alerts scheduler
if [[ "$LIST_OF_INPUTS" != "" ]]; then
IFS=':' tokens_of_input=( $LIST_OF_INPUTS )
input_missing=No
for input_to_check in ${tokens_of_input[*]}; do
if [[ ! -e "$input_to_check" ]]; then
echo "ERROR: input $input_to_check missing for this experiment" >&2
input_missing=Yes
fi
done
if [[ "$input_missing" != No ]]; then
exit 164
fi
fi
# Sets trap to return error code if script is interrupted before successful finish
LOCK_SUCCESS=No
FINISH_STATUS=161
function finish_trap {
if [[ "$LOCK_SUCCESS" == "Yes" ]]; then
rmdir "$LOCK_PATH" &> /dev/null
fi
if [[ "$FINISH_STATUS" == "165" ]]; then
echo 'WARNING: experiment discontinued because other process holds its lock' >&2
else
if [[ "$FINISH_STATUS" == "160" ]]; then
echo 'INFO: experiment finished successfully' >&2
else
[[ "$FINISH_PATH" != "-" ]] && rm -f "$FINISH_PATH"
echo 'ERROR: an error occurred while executing the experiment' >&2
fi
fi
exit "$FINISH_STATUS"
}
trap finish_trap EXIT
# While running, locks experiment so other parallel threads won't attempt to run it too
if mkdir "$LOCK_PATH" --mode=u=rwx,g=rx,o=rx &>/dev/null; then
LOCK_SUCCESS=Yes
else
echo 'WARNING: this experiment is already being executed elsewhere' >&2
FINISH_STATUS="165"
exit
fi
# If the experiment was started before, do any cleanup necessary
if [[ "$START_PATH" != "-" ]]; then
if [[ -e "$START_PATH" ]]; then
echo 'WARNING: this experiment is being restarted' >&2
STARTED_BEFORE=Yes
fi
#...marks start
date -u >> "$START_PATH"
echo GIT "$GIT_COMMIT" >> "$START_PATH"
fi
# If the experiment was started before, do any cleanup necessary
if [[ "$STARTED_BEFORE" == "Yes" ]]; then
echo -n
fi
#...gets closest checkpoint file
MODEL_CHECKPOINT=$(ls "$MODEL_DIR/"model.ckpt-*.index | \
sed 's/.*ckpt-\([0-9]*\)\..*/\1/' | \
sort -n | \
awk -v c=1 -v t=15000 \
'NR==1{d=$c-t;d=d<0?-d:d;v=$c;next}{m=$c-t;m=m<0?-m:m}m<d{d=m;v=$c}END{print v}')
MODEL_PATH="$MODEL_DIR/model.ckpt-$MODEL_CHECKPOINT"
echo "$MODEL_PATH" >> "$START_PATH"
#...performs prediction
echo Testing on "$MODEL_PATH"
python \
"$SOURCES_GIT_DIR/predict_image_classifier.py" \
--model_name="resnet_v2_101_seg" \
--checkpoint_path="$MODEL_PATH" \
--dataset_name=skin_lesions \
--task_name=label \
--dataset_split_name=test \
--preprocessing_name=dermatologic \
--aggressive_augmentation="True" \
--add_rotations="True" \
--minimum_area_to_crop="0.20" \
--normalize_per_image="0" \
--batch_size=1 \
--id_field_name=id \
--pool_scores=avg \
--eval_replicas="1" \
--output_file="$RESULTS_PATH" \
--dataset_dir="$DATASET_DIR"
# Tip: leave last the arguments that make the command fail if they're absent,
# so if there's a typo or forgotten \ the entire thing fails
EXPERIMENT_STATUS="$?"
#
#...starts training
if [[ "$EXPERIMENT_STATUS" == "0" ]]; then
if [[ "$LAST_OUTPUT" == "" || -e "$LAST_OUTPUT" ]]; then
if [[ "$FINISH_PATH" != "-" ]]; then
date -u >> "$FINISH_PATH"
echo GIT "$GIT_COMMIT" >> "$FINISH_PATH"
fi
FINISH_STATUS="160"
fi
fi
|
import * as React from 'react';
import { BasicCardDetailsProps } from '../../containers/C_props'
// set start/end points
interface setting {
target : number
count : number
speed : number
display : HTMLElement
}
// increase value i to 100
// reflect the i in html
// stop if i reaches 100
export const IncreaseToTarget = (param: setting):void => {
if (param.count < param.target) {
param.display.innerText = `${param.count}+`
param.count++
setTimeout(()=>{ Wrapper(param) }, param.speed)
}
}
const Wrapper = (param : setting) => {
IncreaseToTarget(param)
}
export const CounterCard = ({
description
} : BasicCardDetailsProps) => {
React.useEffect(()=> {
const counters = document.getElementsByClassName("counter") as HTMLCollectionOf<HTMLSpanElement>
// convert to Array to get HTMLElement(having innerText attribute)
const spans = Array.from(counters).map((val)=> {
return val
})
IncreaseToTarget( { target : 100, count : 0, speed : 30, display: spans[0]}) // user
IncreaseToTarget( { target : 200, count : 0, speed : 20, display: spans[1]}) // contributor
IncreaseToTarget( { target : 300, count : 0, speed : 10, display: spans[2]}) // github star
}, [])
return (
<div id="counterCard">
<span className='counter'></span>
<span>{description}</span>
</div>
)
} |
echo "Copy schemas to $HOME/hfpp/node/"
mkdir $HOME/hfpp/node
mkdir $HOME/hfpp/node/schemas
cp -R ../node/conf/schemas/* $HOME/hfpp/node/schemas
sleep 2
cd ../
echo "Compiling node1..."
mkdir node1
cp -r ./node/* ./node1
cp -r ./conf/node1/* ./node1
cd node1
ant deployWeb
cd ..
sleep 10
echo "Compiling node2..."
mkdir node2
cp -r ./node/* ./node2
cp -r ./conf/node2/* ./node2
cd node2
ant deployWeb
cd ..
sleep 10
echo "Compiling node3..."
mkdir node3
cp -r ./node/* ./node3
cp -r ./conf/node3/* ./node3
cd node3
ant deployWeb
cd ..
echo "Waiting for 3 nodes fully installed on tomcat..."
sleep 10
# Go back to scripts folder
cd scripts/
|
#!/usr/bin/env bash
set -euo pipefail
controller_gen_version=0.6.2
kuttl_version=0.8.0
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
binpath=${script_dir}/../bin
mkdir -p "$binpath"
function ensure-binary-version() {
local bin_name=$1
local bin_version=$2
local download_location=$3
local target_name=${bin_name}-proper-${bin_version}
local link_path=${binpath}/${bin_name}
if [ ! -L "${link_path}" ]; then
rm -f "${link_path}"
fi
if [ ! -e "${binpath}/${target_name}" ]; then
BUILD_DIR=$(mktemp -d)
pushd "${BUILD_DIR}"
go mod init foobar
GOBIN=${PWD} go get "${download_location}"
mv "${bin_name}" "${binpath}/${target_name}"
popd
rm -rf "${BUILD_DIR}"
fi
ln -sf "${target_name}" "${link_path}"
}
ensure-binary-version controller-gen ${controller_gen_version} "sigs.k8s.io/controller-tools/cmd/controller-gen@v${controller_gen_version}"
|
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.siddhi.extension.execution.unitconversion.length;
import io.siddhi.annotation.Example;
import io.siddhi.annotation.Extension;
import io.siddhi.annotation.Parameter;
import io.siddhi.annotation.ReturnAttribute;
import io.siddhi.annotation.util.DataType;
import io.siddhi.core.config.SiddhiQueryContext;
import io.siddhi.core.exception.SiddhiAppRuntimeException;
import io.siddhi.core.executor.ExpressionExecutor;
import io.siddhi.core.executor.function.FunctionExecutor;
import io.siddhi.core.util.config.ConfigReader;
import io.siddhi.core.util.snapshot.state.State;
import io.siddhi.core.util.snapshot.state.StateFactory;
import io.siddhi.query.api.definition.Attribute;
import io.siddhi.query.api.exception.SiddhiAppValidationException;
import javax.measure.UnitConverter;
import static tec.units.ri.unit.MetricPrefix.KILO;
import static tec.units.ri.unit.MetricPrefix.MICRO;
import static tec.units.ri.unit.Units.METRE;
/**
* Siddhi Function for UnitConversion Kilometre to Micrometre
*/
@Extension(
name = "kmToum",
namespace = "unitconversion",
description = "This converts the input given in kilometers into micrometers.",
parameters = @Parameter(
name = "p1",
description = "The value that needs to be converted from kilometers into micrometers.",
type = {DataType.INT, DataType.LONG, DataType.FLOAT, DataType.DOUBLE}),
returnAttributes = @ReturnAttribute(
description = "The value that is returned in micrometers.",
type = {DataType.DOUBLE}),
examples = @Example(
syntax = "define stream UnitConversionForKilometreToMicrometreStream (inValue int); \n" +
"from UnitConversionForKilometreToMicrometreStream \n" +
"select unitconversion:kmToum(inValue) as UnitConversionValue \n" +
"insert into OutMediationStream;",
description = "The value in kilometers obtained from 'UnitConversionForKilometreToMicrometreStream'" +
"input stream is converted into micrometers and returned to the 'OutMediationStream'" +
"output stream."
)
)
public class KilometreToMicrometre extends FunctionExecutor {
private UnitConverter converter;
/**
* The initialization method for KilometreToMicrometre, this method will be called before the other methods
*
* @param attributeExpressionExecutors are the executors of each function parameters
* @param configReader This hold the {@link FunctionExecutor} extensions configuration reader.
* @param siddhiQueryContext the context of the siddhi query
*/
@Override
protected StateFactory init(ExpressionExecutor[] attributeExpressionExecutors, ConfigReader configReader,
SiddhiQueryContext siddhiQueryContext) {
converter = KILO(METRE).getConverterTo(MICRO(METRE));
if (attributeExpressionExecutors.length != 1) {
throw new SiddhiAppValidationException(
"Invalid no of arguments passed to unitconversion:kmToum() function, " +
"required 1, but found " + attributeExpressionExecutors.length);
}
Attribute.Type attributeType = attributeExpressionExecutors[0].getReturnType();
if (!((attributeType == Attribute.Type.DOUBLE)
|| (attributeType == Attribute.Type.INT)
|| (attributeType == Attribute.Type.FLOAT)
|| (attributeType == Attribute.Type.LONG))) {
throw new SiddhiAppValidationException("Invalid parameter type found " +
"for the argument of UnitConversion function, " +
"required " + Attribute.Type.INT + " or " + Attribute.Type.LONG +
" or " + Attribute.Type.FLOAT + " or " + Attribute.Type.DOUBLE +
", but found " + attributeType.toString());
}
return null;
}
/**
* The main execution method which will be called upon event arrival
* when there are more than one function parameter
*
* @param data the runtime values of function parameters
* @param state current query state
* @return the function result
*/
@Override
protected Object execute(Object[] data, State state) {
return null;
}
/**
* The main execution method which will be called upon event arrival
* when there are zero or one function parameter
*
* @param data null if the function parameter count is zero or
* runtime data value of the function parameter
* @param state current query state
* @return the function result
*/
@Override
protected Object execute(Object data, State state) {
if (data != null) {
//type-conversion
return converter.convert((Number) data);
} else {
throw new SiddhiAppRuntimeException("Input to the UnitConversion function cannot be null");
}
}
@Override
public Attribute.Type getReturnType() {
return Attribute.Type.DOUBLE;
}
}
|
<filename>public/assets/more-slider/more.js
(function ($, window) {
function log(v)
{
console.log(v);
}
var isChrome = function () {
var isChromium = window.chrome,
vendorName = window.navigator.vendor;
if (isChromium !== null && vendorName === "Google Inc.") {
return true;
}
return false;
};
function getTouchCoords(event)
{
var coords = {};
if (event.touches != undefined)
{
coords.x = event.touches[0].pageX;
coords.y = event.touches[0].pageY;
}
else
{
if (event.pageX !== undefined)
{
coords.x = event.pageX;
coords.y = event.pageY;
}
else
{
coords.x = event.clientX;
coords.y = event.clientY;
}
}
return coords;
}
More.pipeline = [
{
process: ['items', 'height', 'width'],
update: function () {
for (var i = 0, total = this.items.length; i < total; i++)
{
this.positions.push(100 * -i);
var obj = $(this.items[i]);
obj.addClass(this.options.slideClass);
this.labels.push(obj.attr('data-more-label'));
}
}
},
{
process: ['position'],
update: function () {
if (this.options.viewMode == 'vertical')
{
this.animate(this.getPosition(this.current) || 0);
}
}
}
];
function More(obj, options)
{
this.positions = [];
this.processes = {};
this.labels = [];
this.touch = {};
this.pipes = [];
this.options = $.extend({}, More.defaultOptions, options);
this.speed = this.options.speed;
this.element = obj;
if (this.element.length)
{
this.isScrolling = false;
$('html').addClass('moreMain');
this.items = this.element.children();
this.current = 0;
$.each(More.pipeline, $.proxy(function (c, worker) {
this.pipes.push(
{
process: worker.process,
update: $.proxy(worker.update, this)
}
);
}, this));
if (this.options.viewMode == 'horizontal')
{
this.element.children('.slide').addClass('horizontalSlide');
this.options.paginationAtBottom = true;
}
this.setSpeed();
this.element.children('.slide').eq(this.current).addClass('active');
this.addProcess('items');
this.update();
this.dotPagination();
this.setBtns();
if (this.options.drag)
{
this.element.on(More.events.drag.start, $.proxy(this.eventTarget, this));
}
if (this.options.wheel)
{
this.element.on('mousewheel DOMMouseScroll MozMousePixelScroll', $.proxy(this.mouseWheel, this));
}
if (this.options.hasHash)
{
this.setHash();
this.element.on('More-evnt-changed_position', $.proxy(function () {
var item = this.element.find('.slide').eq(this.current);
window.location.hash = item[0].id;
}, this));
}
if (this.options.loader)
{
var obj = this.element;
obj.after('<div class="more_loader"><div class="more_progress"></div></div>');
obj.on('More-evnt-changed_position', $.proxy(this.loader, this));
}
this.setCurrentItem();
$.proxy(this.options.done, this)();
}
}
More.prototype.loader = function ()
{
var css = {};
var percent = Math.ceil((this.current + 1) / this.positions.length * 100);
if (this.options.loaderProp == 'width')
{
css['width'] = percent + '%';
}
else
{
css['height'] = percent + '%';
}
$('.more_loader .more_progress').css(css);
}
More.prototype.setHash = function ()
{
$('.slide', this.element).each(function (c) {
var item = $(this);
if (!item.attr('id')) {
item.attr('id', 'item-' + (c + 1));
}
});
$(window).on('hashchange', $.proxy(this.setCurrentItem, this));
}
More.prototype.setCurrentItem = function ()
{
if (window.location.hash !== '') {
var itemID = window.location.hash;
var current = this.element.find(itemID);
this.to(current.index());
}
}
More.prototype.mouseWheel = function (event)
{
var e, wheel, target, delta;
e = window.event || event;
wheel = (e.wheelDelta || -e.detail || e.originalEvent.detail);
var delta = Math.max( -1, Math.min( 1, wheel ) );
target = $(e.target);
if (!this.isScrolling && Math.abs(wheel) > 5)
{
this.isScrolling = true;
var scrollNow = true;
if (scrollNow)
{
if (isChrome())
{
delta = Math.floor(wheel / 5);
}
if( e.originalEvent && e.originalEvent.detail ) {
if( delta > 0 ) {
this.next();
}
else
{
this.prev();
}
} else if( delta < 0 ) {
this.next();
}else
{
this.prev();
}
}
else
{
this.isScrolling = false;
}
}
}
More.prototype.eventTarget = function (e)
{
var type = e.type;
switch (type)
{
case 'mousedown':
case 'touchstart':
this.dragStart(e);
break;
case 'mousemove':
case 'touchmove':
this.dragMove(e);
break;
case 'mouseup':
case 'touchend':
this.dragEnd(e);
break;
}
}
More.prototype.dragStart = function (e)
{
var pageX, pageY, event, coords, offset = {};
event = e.originalEvent || window.event || e;
coords = getTouchCoords(event);
pageX = coords.x;
pageY = coords.y;
offset.x = this.element.position().left;
offset.y = this.element.position().top;
var $forbiddenItems = ['img', 'a'];
this.touch.target = event.target || event.srcElement;
if ($.inArray(this.touch.target.tagName.toLowerCase(), $forbiddenItems) !== -1)
{
this.touch.target.draggable = false;
}
this.element.addClass('turbo-drag');
this.touch.start = pageY;
$(document).on(More.events.drag.end, $.proxy(this.eventTarget, this));
};
More.prototype.dragMove = function (e)
{
var pageX, pageY, event, coords, offset = {}, distance, type, direction, destProperty;
event = e.originalEvent || window.event || e;
};
More.prototype.dragEnd = function (e)
{
var dest, coords, pos, event;
event = e.originalEvent || window.event || e;
this.element.removeClass('turbo-drag');
coords = getTouchCoords(event);
this.touch.destY = coords.y;
dest = this.touch.destY;
coords = this.getPosition();
this.touch.target.removeAttribute('draggable');
if (dest > (this.touch.start + this.options.wheelSwipeDisance))
{
this.prev();
}
else if (dest < (this.touch.start - this.options.wheelSwipeDisance))
{
this.next();
}
$(document).off(More.events.drag.move);
$(document).off(More.events.drag.end);
};
More.prototype.transitionEnd = function ()
{
if (this.options.wheel)
{
this.isScrolling = false;
}
}
More.prototype.compare = function (a, b, operator)
{
var res;
switch (operator)
{
case '<':
res = a < b;
break;
case '>':
res = a > b;
break;
case '>=':
res = a >= b;
break;
case '<=':
res = a <= b;
break;
}
return res;
}
More.prototype.trigger = function (evnt, data)
{
var evnt_prefix = 'More-evnt-';
data["time"] = new Date();
data["type"] = evnt_prefix + evnt;
return this.element.trigger(evnt_prefix + evnt, data);
}
More.prototype.setSpeed = function ()
{
var speed = this.speed / 1000;
this.element.css({
'-webkit-transition': 'all ' + speed + 's',
'-moz-transition': 'all ' + speed + 's',
'-o-transition': 'all ' + speed + 's',
'transition': 'all ' + speed + 's'
});
}
More.prototype.dotPagination = function ()
{
if ($('.more_pagination').length) {
$('.more_pagination').remove();
}
var css = {}, label;
var pagination = $('<div class="more_pagination"></div>');
for (var i = 0, total = this.items.length; i < total; i++)
{
label = '';
if (typeof this.labels[i] != "undefined")
{
label = this.labels[i];
}
pagination.append('<div class="more_dot"><span>' + label + '</span></div>');
}
var obj = this.element;
if (this.options.navParent)
{
obj = $(this.options.navParent);
}
obj.after(pagination);
if (this.options.paginationAtBottom)
{
pagination.addClass('more_pagination_bottom');
css['margin-left'] = '-' + pagination.outerWidth(true) / 2 + 'px';
}
else
{
css['margin-top'] = '-' + pagination.height() / 2 + 'px';
}
pagination.css(css);
pagination.find('.more_dot').eq(this.current).addClass('active');
pagination.on('click', '.more_dot', $.proxy(function (e) {
var obj = $(e.currentTarget);
var c = obj.index();
this.to(c)
}, this));
this.element.on('More-evnt-changed_position', $.proxy(function () {
pagination.find('.more_dot').removeClass('active');
pagination.find('.more_dot').eq(this.current).addClass('active');
}, this));
}
More.prototype.getMax = function ()
{
return this.positions.length - 1;
}
More.prototype.getMin = function ()
{
return 0;
}
More.prototype.setCurrent = function (pos)
{
var prev = this.current;
this.current = pos;
this.trigger('changed_position', {
'value':
{
'position': pos,
'previous': prev
}
});
}
More.prototype.to = function (pos)
{
if (this.options.circular)
{
if (pos > this.getMax())
{
pos = this.getMin();
}
else if (pos < this.getMin())
{
pos = this.getMax();
}
}
else
{
if (pos > this.getMax())
{
pos = this.getMax();
this.isScrolling = false;
}
else if (pos < this.getMin())
{
pos = this.getMin();
this.isScrolling = false;
}
}
if (pos == this.current)
return;
this.addProcess('flush');
this.update();
// this.element.children('.slide').eq(this.current).addClass('animated '+this.options.outClass).one('webkitAnimationEnd mozAnimationEnd MSAnimationEnd oanimationend animationend', $.proxy(this.clearAnimation, this));
//update
var $currentSlide = this.element.find('.slide').eq(this.current);
this.setCurrent(pos);
this.addProcess("position");
if (this.options.viewMode == 'horizontal')
{
var isNext = $currentSlide.index() < pos ? true : false;
if (isNext)
{
var t = 'right';
}
else
{
var t = 'left';
}
var $nextSlide = this.element.find('.slide').eq(this.current).addClass('active ' + t);
setTimeout($.proxy(function () {
this.setTransition(this.element.children('.slide'));
this.element.addClass('transition');
$currentSlide.addClass('shift-' + t);
}, this), 100);
setTimeout($.proxy(function () {
this.element.removeClass('transition');
this.removeTransition(this.element.children('.slide'));
$currentSlide.removeClass('active shift-left shift-right');
$nextSlide.removeClass(t);
$.proxy(this.transitionEnd, this)();
}, this), 100 + this.options.speed);
}
this.update();
}
More.prototype.setTransition = function (items)
{
var _obj = this;
items.each(function () {
this.style['transitionDuration'] = _obj.options.speed + 'ms';
});
}
More.prototype.removeTransition = function (items)
{
var _obj = this;
items.each(function () {
this.style['transitionDuration'] = '';
});
}
More.prototype.setBtns = function ()
{
$("body").on("keyup", $.proxy(function (e) {
var key = e.which;
if (key == 38)
{
this.prev();
}
else if (key == 40)
{
this.next();
}
}, this));
}
More.prototype.next = function ()
{
this.to(this.current + 1);
}
More.prototype.prev = function ()
{
this.to(this.current - 1);
}
More.prototype.animate = function (position)
{
var css = {}, speed;
this.element.off('transitionend', $.proxy(this.transitionEnd, this));
if (this.options.viewMode == 'horizontal')
{
css["transform"] = "translate3d(" + position + "%,0px,0px)";
}
else
{
css["transform"] = "translate3d(0px," + position + "%,0px)";
}
speed = this.speed / 1000;
css["transition"] = speed + "s";
setTimeout($.proxy(function () {
this.element.css(css);
if (this.options.viewMode == 'horizontal')
{
this.element.find('.slide').eq(this.current).addClass('next active');
}
else
{
this.element.on('transitionend', $.proxy(this.transitionEnd, this));
this.element.children('.slide').removeClass('active');
this.element.children('.slide').eq(this.current).addClass('active');
this.element.children('.slide').eq(this.current).addClass('animated ' + this.options.inClass).one('webkitAnimationEnd mozAnimationEnd MSAnimationEnd oanimationend animationend', $.proxy(this.clearAnimation, this));
}
}, this));
}
More.prototype.clearAnimation = function (e)
{
$(e.target).removeClass(this.options.outClass);
$(e.target).removeClass(this.options.inClass);
}
More.prototype.getPosition = function (item)
{
if (typeof item == 'undefined')
{
return this.positions;
}
return this.positions[item] || 0;
}
More.prototype.addProcess = function (process)
{
this.processes[process] = true;
}
More.prototype.update = function ()
{
var c = 0;
var pipesCount = this.pipes.length;
var processType = $.proxy(function (i) {
return this[i]
}, this.processes);
while (c < pipesCount)
{
if ($.grep(this.pipes[c].process, processType).length)
{
this.pipes[c].update();
}
c++;
}
this.processes = {};
}
More.prototype.destroy = function ()
{
$('html').removeClass('moreMain');
this.element.children('.slide').removeClass('horizontalSlide active animated');
this.element.off('mousewheel DOMMouseScroll MozMousePixelScroll', $.proxy(this.mouseWheel, this));
$(window).off('hashchange', $.proxy(this.setCurrentItem, this));
this.element.css({
'transform': 'translate3d(0,0,0)'
});
$('.more_loader, .more_pagination').remove();
//window.location.hash = '' ;
this.element.removeData('More');
jQuery.removeData([0], 'More');
}
function MoreLoader(options)
{
if (!$(this).data('More')) {
$(this).data('More', new More($(this), options));
}
return $(this).data('More');
}
$.fn.More = MoreLoader;
More.defaultOptions =
{
drag: true,
wheel: false,
speed: 1000,
hasHash: false,
circular: false,
slideClass: '',
inClass: 'more_enter',
outClass: 'more_leave',
wheelSwipeDisance: 1,
wheelDelay: false,
loader: false,
loaderProp: 'width',
viewMode: 'horizontal',
paginationAtBottom: false,
loaderParent: false,
before: function () {
},
after: function () {
},
init: function () {
},
done: function () {
}
};
More.events =
{
drag:
{
start: 'mousedown touchstart',
move: 'mousemove touchmove',
end: 'mouseup touchend'
}
};
}(jQuery, window)); |
<reponame>mitar/pg-differ
'use strict';
const helpers = require('../../helpers');
describe('rename column', () => {
let differ;
beforeEach(() => {
differ = helpers.getDiffer();
});
it('should rename the column and change its type', async function () {
differ.define('table', {
name: 'DifferSchema.users',
columns: { id: 'int' },
});
await differ.sync({ force: true });
differ.define('table', {
name: 'DifferSchema.users',
columns: { largeID: { type: 'bigint', formerNames: ['id'] } },
});
expect(await differ.sync()).toMatchObject({
queries: [
'alter table "DifferSchema"."users" rename column "id" to "largeID";',
'alter table "DifferSchema"."users" alter column "largeID" type bigint;',
],
});
expect(await differ.sync({ execute: false })).toMatchObject({
queries: [],
});
});
});
|
<gh_stars>0
package wearable.hotelbeds.traveler.grid;
import java.util.ArrayList;
/**
* Created by Zavierazo on 07/10/2015.
*/
public class SimpleRow {
ArrayList<SimplePage> mPagesRow = new ArrayList<SimplePage>();
public void addPages(SimplePage page) {
mPagesRow.add(page);
}
public SimplePage getPages(int index) {
return mPagesRow.get(index);
}
public int size() {
return mPagesRow.size();
}
}
|
package com.anomalydetect.Tool;
/**
* 多维度异常检测的接口类
*
* @author mezereon E-mail:<EMAIL>
* @since 18-6-26
*/
public interface MultiDetectTool {
/**
* 对时间序列进行异常检测
*/
void multiTimeSeriesAnalyse(double[][] data, int dimension);
}
|
import random
def dice_game() -> str:
num = random.randint(1, 6)
print(num)
if num == 1:
return "You lose!"
else:
return "You win!" |
# app/controllers/cats_controller.rb
class CatsController < ApplicationController
def index
@cats = Cat.all
end
end
# app/models/cat.rb
class Cat < ApplicationRecord
belongs_to :owner
end
# app/models/owner.rb
class Owner < ApplicationRecord
has_many :cats
end
# app/views/cats/index.html.erb
<h1>Cats and their owners</h1>
<ul>
<% @cats.each do |cat| %>
<li><%= cat.name %> is owned by <%= cat.owner.name %></li>
<% end %>
</ul> |
# DND: See http://wiki.rubyonrails.org/rails/pages/TextSearch for more info
#
# Adds search method to ActiveRecord::Base.
# The query language supports the operators
# (), not, and, or
# Precedence in that order.
# - is an alias for not.
# If no operator is present, and is assumed.
# Lastly, anything within double quotes is treated as
# a single search term.
#
# For example,
# ruby rails => records where both ruby and rails appear
# "ruby on rails" => records where "ruby on rails" appears
# ruby or rails => records where ruby or rails (or both) appears
# ruby or chunky bacon => records where ruby appears or both chunky and bacon appear
# not dead or alive => records where alive appears or dead is absent
# -(ruby or rails) => records where neither ruby nor rails appears
# (ruby or rails) -"ruby on rails" => records where ruby or rails appears but not the phrase "ruby on rails"
#
# Query feature by <NAME> (<EMAIL>)
# Original TextSearch library by <NAME>.
module ActiveRecord
class Base
# Allow the user to set the default searchable fields
def self.searches_on(*args)
if not args.empty? and args.first != :all
@searchable_fields = args.collect { |f| self.table_name + "." + f.to_s }
end
end
# Return the default set of fields to search on
def self.searchable_fields(tables = nil)
# If the model has declared what it searches_on, then use that...
if @searchable_fields.nil?
# ... otherwise, use all text/varchar fields as the default
fields = []
string_columns = self.columns.select { |c| c.type == :text or c.type == :string }
fields = string_columns.collect { |c| self.table_name + "." + c.name }
else
fields = @searchable_fields
end
# Handle include tables
tables ||= []
if not tables.empty?
tables.each do |table|
klass = eval table.to_s.classify
fields += klass.searchable_fields([])
end
end
return fields
end
# Search the model's text and varchar fields
# text = a set of words to search for
# :only => an array of fields in which to search for the text;
# default is 'all text or string columns'
# :except => an array of fields to exclude
# from the default searchable columns
# :include => an array of tables to include in the joins. Fields that
# have searchable text will automatically be included in the default
# set of :search_columns.
# :join_include => an array of tables to include in the joins, but only
# for joining. (Searchable fields will not automatically be included.)
# :conditions => a string of additional conditions (constraints)
# :order => sort order (order_by SQL snippet)
# :page => page desired. use 0 to return everything. Defaults to 1.
# :keywords => search by keyword instead of contains. Default is false.
#
# returns WillPaginate::Collection
#
def self.simple_search(text = nil, options = {})
# if no search criteria, then nothing to return
return WillPaginate::Collection.new(1, self.per_page, 0) if text.blank? || text.length <= 1
# Default the keywords search to false
keywords = options[:keywords] || false
# Setup the search fields
fields = searchable_fields(options[:include])
fields &= options[:only] if options[:only]
fields -= options[:except] if options[:except]
# build search_options
condition_list = []
unless text.nil?
condition_list << build_text_condition(fields, text, keywords)
end
if options[:conditions]
condition_list << "#{options[:conditions]}"
end
conditions = condition_list.join " AND "
includes = (options[:include] || []) + (options[:join_include] || [])
page = (options[:page] || 1).to_i
find_all = options.delete(:find_all) || false
search_options = { :include => includes.empty? ? nil : includes,
:conditions => conditions.empty? ? nil : conditions,
:order => options[:order],
:per_page => options[:per_page],
:select => options[:select]
}
# Do the search
if page <= 0
search_options.delete(:per_page)
arr = find(:all, search_options)
results = WillPaginate::Collection.create(1, arr.size > 0 ? arr.size : self.per_page, arr.size) do |pager|
pager.replace(arr)
pager.total_entries = arr.size unless pager.total_entries
end
else
results = paginate(search_options.merge(:page => page))
end
results
end
# Given a field or array of fields, return a sql string using text
def self.build_text_condition(fields, text, keywords = false)
build_tc_from_tree(fields, demorganize(parse(text.downcase.strip_punctuation)), keywords)
end
private
# A chunk is a string of non-whitespace,
# except that anything inside double quotes
# is a chunk, including whitespace
def self.make_chunks(s)
chunks = []
while s.length > 0
next_interesting_index = (s =~ /\s|\"/)
if next_interesting_index
if next_interesting_index > 0
chunks << s[0...next_interesting_index]
s = s[next_interesting_index..-1]
else
if s =~ /^\"/
s = s[1..-1]
next_interesting_index = (s =~ /[\"]/)
if next_interesting_index
chunks << s[0...next_interesting_index]
s = s[next_interesting_index+1..-1]
elsif s.length > 0
chunks << s
s = ''
end
else
next_interesting_index = (s =~ /\S/)
s = s[next_interesting_index..-1]
end
end
else
chunks << s
s = ''
end
end
# DND: process the chunks to remove whitespace
chunks.map! { |chunk| chunk.strip_extra_whitespace }
end
def self.process_chunk(chunk)
case chunk
when /^-/
if chunk.length == 1
[:not]
else
[:not, *process_chunk(chunk[1..-1])]
end
when /^\+/
if chunk.length == 1
[:and]
else
[:and, *process_chunk(chunk[1..-1])]
end
when /^\(.*\)$/
if chunk.length == 2
[:left_paren, :right_paren]
else
[:left_paren].concat(process_chunk(chunk[1..-2])) << :right_paren
end
when /^\(/
if chunk.length == 1
[:left_paren]
else
[:left_paren].concat(process_chunk(chunk[1..-1]))
end
when /\)$/
if chunk.length == 1
[:right_paren]
else
process_chunk(chunk[0..-2]) << :right_paren
end
when 'and'
[:and]
when 'or'
[:or]
when 'not'
[:not]
when /^(\w|(#{String::ARTICLE_WORDS.join('|')}))$/ # DND: Remove articles and single characters
[nil]
else
[chunk]
end
end
def self.lex(s)
tokens = []
make_chunks(s).each { |chunk| tokens.concat(process_chunk(chunk)) }
tokens.compact
end
def self.parse_paren_expr(tokens)
expr_tokens = []
while !tokens.empty? && tokens[0] != :right_paren
expr_tokens << tokens.shift
end
if !tokens.empty?
tokens.shift
end
parse_expr(expr_tokens)
end
def self.parse_term(tokens)
if tokens.empty?
return ''
end
token = tokens.shift
case token
when :not
[:not, parse_term(tokens)]
when :left_paren
parse_paren_expr(tokens)
when :right_paren
'' # skip bogus token
when :and
'' # skip bogus token
when :or
'' # skip bogus token
else
token
end
end
def self.parse_and_expr(tokens, operand)
if (tokens[0] == :and)
tokens.shift
end
# Even if :and is missing, :and is implicit
[:and, operand, parse_term(tokens)]
end
def self.parse_or_expr(tokens, operand)
if (tokens[0] == :or)
tokens.shift
[:or, operand, parse_expr(tokens)]
else
parse_and_expr(tokens, operand)
end
end
def self.parse_expr(tokens)
if tokens.empty?
return ''
end
expr = parse_term(tokens)
while !tokens.empty?
expr = parse_or_expr(tokens, expr)
end
expr
end
def self.parse_tokens(tokens)
tree = parse_expr(tokens)
tree.kind_of?(Array)? tree : [tree]
end
def self.parse(text)
parse_tokens(lex(text))
end
def self.apply_demorgans(tree)
if tree == []
return []
end
token = tree.kind_of?(Array)? tree[0] : tree
case token
when :not
if (tree[1].kind_of?(Array))
subtree = tree[1]
if subtree[0] == :and
[:or, apply_demorgans([:not, subtree[1]]), apply_demorgans([:not, subtree[2]])]
elsif tree[1][0] == :or
[:and, apply_demorgans([:not, subtree[1]]), apply_demorgans([:not, subtree[2]])]
else
# assert tree[1][0] == :not
apply_demorgans(subtree[1])
end
else
tree
end
when :and
[:and, apply_demorgans(tree[1]), apply_demorgans(tree[2])]
when :or
[:or, apply_demorgans(tree[1]), apply_demorgans(tree[2])]
else
tree
end
end
def self.demorganize(tree)
result = apply_demorgans(tree)
result.kind_of?(Array)? result : [result]
end
def self.sql_escape(s)
s.gsub('%', '\%').gsub('_', '\_')
end
def self.compound_tc(fields, tree, keywords)
'(' +
build_tc_from_tree(fields, tree[1], keywords) +
' ' + tree[0].to_s + ' ' +
build_tc_from_tree(fields, tree[2], keywords) +
')'
end
def self.build_tc_from_tree(fields, tree, keywords)
token = tree.kind_of?(Array)? tree[0] : tree
case token
when :and
compound_tc(fields, tree, keywords)
when :or
compound_tc(fields, tree, keywords)
when :not
# assert tree[1].kind_of?(String)
op, l, r = get_ops(keywords)
expression = fields.map { |f| "(#{f} IS NULL OR #{f} NOT #{op} #{sanitize(l+sql_escape(tree[1])+r)})" }.join(" AND ")
"(#{expression})"
else
op, l, r = get_ops(keywords)
expression = fields.map { |f| "#{f} #{op} #{sanitize(l+sql_escape(token)+r)}" }.join(" OR ")
"(#{expression})"
end
end
def self.get_ops(keywords)
return (keywords) ? ['REGEXP', '[[:<:]]', '[[:>:]]'] : ['LIKE', '%', '%']
end
end
end |
<reponame>PabloLec/neoss<gh_stars>10-100
import { readdirSync, readFileSync, readlinkSync } from "fs";
import { execSync } from "child_process";
var socketMap: {};
/**
* Fetch in-use sockets and query their users statistics.
*
* @returns Map of used sockets and their users
*/
export async function getUsedSockets(): Promise<{}> {
socketMap = {};
var processes: string[] = [];
var files: string[] = readdirSync("/proc/");
files.forEach((file: any) => {
if (isNumeric(file)) {
processes.push(file);
}
});
var procPromises: Promise<any>[] = [];
processes.forEach((proc, i) => {
procPromises.push(Promise.race([timeout(100), getProcSockets(proc)]));
});
await Promise.allSettled(procPromises);
return socketMap;
}
/**
* Get users statistics for a given socket.
*
* @param proc - Socket inode
*/
async function getProcSockets(proc: string) {
let fd = "/proc/" + proc + "/fd/";
try {
var files = readdirSync(fd);
} catch (EACCES) {
return;
}
var sockets: string[] = [];
files.forEach((file: string) => {
try {
var linkString = readlinkSync(fd + file);
} catch (ENOENT) {
return;
}
if (linkString && linkString.includes("socket")) {
var match: RegExpMatchArray | null = linkString.match(/socket\:\[([0-9]+)\]/);
if (match != null) {
sockets.push(match[1]);
}
}
});
sockets.forEach((socket) => {
if (socket in socketMap) {
socketMap[socket].push(proc);
} else {
socketMap[socket] = [proc];
}
});
}
/**
* Get detailed information about given user.
*
* @param user - User PID
* @returns - User name, owner and init command line
*/
export async function getUserData(user: string): Promise<string[]> {
try {
var status: string = readFileSync("/proc/" + user + "/status", "utf8");
} catch (ENOENT) {
return ["error", "error", "error"];
}
let lines = status.split(/\r\n|\r|\n/);
let name = lines[0].trim().split(/\s+/)[1];
let uid = lines[9].trim().split(/\s+/)[1];
let owner = execSync("id -nu " + uid) + "";
let cmdline: string;
try {
let cmdlineFile: string = readFileSync("/proc/" + user + "/cmdline", "utf8");
cmdline = cmdlineFile.split("\0").join(" ");
} catch (ENOENT) {
cmdline = "Unable to retrieve cmdline. Process already terminated.";
}
return [name, owner.trim(), cmdline];
}
/**
* Check if file descriptor name is numeric to determine if it is related to a process.
*
* @param str - File name
* @returns - Is numeric
*/
function isNumeric(str: string): boolean {
return !isNaN(parseInt(str));
}
/**
* Timeout function to limit fs wait time.
*
* @param ms - Time to wait in ms
* @returns - void
*/
function timeout(ms: number): Promise<void> {
return new Promise((resolve: any) => setTimeout(resolve, ms));
}
|
package ctag.tags;
import ctag.Binary;
import ctag.CTagInput;
import ctag.exception.EndException;
import java.io.IOException;
/**
* The tag that represents a double precision floating point number.
* <br/><br/>
* <table>
* <tr>
* <td><b>Binary prefix: </b></td>
* <td><code>00000110 - 06</code></td>
* </tr>
* <tr>
* <td><b>Minimal payload: </b></td>
* <td>8 bytes</td>
* </tr>
* <tr>
* <td><b>Maximal payload: </b></td>
* <td>8 bytes</td>
* </tr>
* </table>
* The double binary exists of 8 bytes holding it's value
* <br/>
* <pre>
* Prefix Value
* 00000110 0100000000000000111111011111001110110110010001011010000111001011
* DOUBLE = 2.124
* </pre>
* @since 1.0
*/
public class TagDouble implements ITag<Double> {
private double value;
private long bits;
public TagDouble() {
value = 0;
bits = Double.doubleToRawLongBits( value );
}
public TagDouble( double value ) {
this.value = value;
bits = Double.doubleToRawLongBits( value );
}
private byte get1() {
return ( byte ) ( bits & 0xff );
}
private byte get2() {
return ( byte ) ( bits >>> 8 & 0xff );
}
private byte get3() {
return ( byte ) ( bits >>> 16 & 0xff );
}
private byte get4() {
return ( byte ) ( bits >>> 24 & 0xff );
}
private byte get5() {
return ( byte ) ( bits >>> 32 & 0xff );
}
private byte get6() {
return ( byte ) ( bits >>> 40 & 0xff );
}
private byte get7() {
return ( byte ) ( bits >>> 48 & 0xff );
}
private byte get8() {
return ( byte ) ( bits >>> 56 & 0xff );
}
@Override
public Binary encode() {
return new Binary( new byte[] {
get8(), get7(),
get6(), get5(),
get4(), get3(),
get2(), get1()
} );
}
@Override
public Double getValue() {
return value;
}
@Override
public void setValue( Double value ) {
this.value = value;
bits = Double.doubleToRawLongBits( value );
}
@Override
public Binary getPrefixByte() {
return new Binary( ( byte ) 0b110 );
}
/**
* Parses a CTag code as a double.
* @param input The {@link CTagInput} stream that possibly begins with this
* double data.
* @return The parsed {@link TagDouble} if parsed with success.
* @exception IOException If the {@link CTagInput}'s underlying
* stream throws an IOException.
* @since 1.0
*/
public static TagDouble parse( CTagInput input ) throws IOException, EndException {
Binary bytes = input.read( 8 );
long byte1 = bytes.getByte( 7 ) & 0xff;
long byte2 = bytes.getByte( 6 ) & 0xff;
long byte3 = bytes.getByte( 5 ) & 0xff;
long byte4 = bytes.getByte( 4 ) & 0xff;
long byte5 = bytes.getByte( 3 ) & 0xff;
long byte6 = bytes.getByte( 2 ) & 0xff;
long byte8 = bytes.getByte( 0 ) & 0xff;
long byte7 = bytes.getByte( 1 ) & 0xff;
long value = ( byte8 << 56 ) + ( byte7 << 48 ) + ( byte6 << 40 ) + ( byte5 << 32 ) + ( byte4 << 24 ) + ( byte3 << 16 ) + ( byte2 << 8 ) + byte1;
return new TagDouble( Double.longBitsToDouble( value ) );
}
public String toString() {
return "DOUBLE " + value;
}
}
|
<gh_stars>0
package main
import (
"context"
"go.temporal.io/sdk/client"
appsV1 "k8s.io/api/apps/v1"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/watch"
"k8s.io/client-go/kubernetes"
"log"
"paul-go/internal"
"paul-go/internal/util"
cluster_event "paul-go/internal/workflows/cluster-event"
dialogflow_entity "paul-go/internal/workflows/dialogflow-entity"
dialogflow_entity_util "paul-go/internal/workflows/dialogflow-entity/util"
"time"
)
var namespaceEntityTypeId = "47bbdbd2-75a4-42c0-a091-7aaf0aae12e9"
var serviceEntityTypeId = "8f548cae-619c-45b9-8c88-813d68e75135"
var deploymentEntityTypeId = "450b06e5-3fa0-41c4-914c-dfa2bece48d0"
var podEntityTypeId = "7ed95939-23ff-4ab9-bc9e-7f2a0dcc23d6"
var nodeEntityTypeId = "1cdfdd7e-e6b9-422b-bd65-9612157e7500"
func main() {
log.Println("Connecting to Temporal...")
temporalClient := internal.StartTemporal()
defer temporalClient.Close()
log.Println("Connecting to Kubernetes...")
kubeClient := util.GetKubeClient()
log.Println("Starting namespace watcher")
startNamespaceWatcher(temporalClient, kubeClient)
log.Println("Starting service watcher")
startServiceWatcher(temporalClient, kubeClient)
log.Println("Starting deployment watcher")
startDeploymentWatcher(temporalClient, kubeClient)
log.Println("Starting pod watcher")
startPodWatcher(temporalClient, kubeClient)
//log.Println("Starting event watcher")
// startEventWatcher(temporalClient, kubeClient)
log.Println("Waiting for events.")
for {
time.Sleep(time.Second)
}
}
func startNamespaceWatcher(temporalClient client.Client, kubeClient *kubernetes.Clientset) {
ctx := context.Background()
namespaceList, err := kubeClient.CoreV1().Namespaces().List(ctx, metav1.ListOptions{})
if err != nil {
log.Println("Failed to start namespace watcher: ", err)
return
}
var namespaceNames []string
for _, namespace := range namespaceList.Items {
namespaceNames = append(namespaceNames, namespace.Name)
}
setEntity(temporalClient, namespaceEntityTypeId, namespaceNames)
namespaceWatcher, err := kubeClient.CoreV1().Namespaces().Watch(ctx, metav1.ListOptions{ResourceVersion: namespaceList.ListMeta.ResourceVersion})
if err != nil {
log.Println("Failed to start namespace watcher: Failed to get list from version\n", err)
return
}
go watchNamespaces(temporalClient, namespaceWatcher)
}
func watchNamespaces(temporalClient client.Client, watcher watch.Interface) {
entityTypeId := namespaceEntityTypeId
for event := range watcher.ResultChan() {
ns := event.Object.(*v1.Namespace)
switch event.Type {
case watch.Added:
log.Printf("Namespace %s added\n", ns.ObjectMeta.Name)
addEntity(temporalClient, entityTypeId, []string{ns.ObjectMeta.Name})
case watch.Deleted:
log.Printf("Namespace %s deleted\n", ns.ObjectMeta.Name)
addEntity(temporalClient, entityTypeId, []string{ns.ObjectMeta.Name})
}
}
}
func startServiceWatcher(temporalClient client.Client, kubeClient *kubernetes.Clientset) {
ctx := context.Background()
serviceList, err := kubeClient.CoreV1().Services(v1.NamespaceAll).List(ctx, metav1.ListOptions{})
if err != nil {
log.Println("Failed to start service watcher: ", err)
return
}
var serviceNames []string
for _, service := range serviceList.Items {
serviceNames = append(serviceNames, service.Name)
}
setEntity(temporalClient, serviceEntityTypeId, serviceNames)
serviceWatcher, err := kubeClient.CoreV1().Services(v1.NamespaceAll).Watch(ctx, metav1.ListOptions{ResourceVersion: serviceList.ListMeta.ResourceVersion})
if err != nil {
log.Println("Failed to start service watcher: Failed to get list from version\n", err)
return
}
go watchServices(temporalClient, serviceWatcher)
}
func watchServices(temporalClient client.Client, watcher watch.Interface) {
entityTypeId := serviceEntityTypeId
for event := range watcher.ResultChan() {
svc := event.Object.(*v1.Service)
switch event.Type {
case watch.Added:
log.Printf("Service %s/%s added\n", svc.ObjectMeta.Namespace, svc.ObjectMeta.Name)
addEntity(temporalClient, entityTypeId, []string{svc.ObjectMeta.Name})
case watch.Deleted:
log.Printf("Service %s/%s deleted\n", svc.ObjectMeta.Namespace, svc.ObjectMeta.Name)
removeEntity(temporalClient, entityTypeId, []string{svc.ObjectMeta.Name})
}
}
}
func startDeploymentWatcher(temporalClient client.Client, kubeClient *kubernetes.Clientset) {
ctx := context.Background()
deploymentList, err := kubeClient.AppsV1().Deployments(v1.NamespaceAll).List(ctx, metav1.ListOptions{})
if err != nil {
log.Println("Failed to start deployment watcher: ", err)
return
}
var deploymentNames []string
for _, deployment := range deploymentList.Items {
deploymentNames = append(deploymentNames, deployment.Name)
}
setEntity(temporalClient, deploymentEntityTypeId, deploymentNames)
deploymentWatcher, err := kubeClient.AppsV1().Deployments(v1.NamespaceAll).Watch(ctx, metav1.ListOptions{ResourceVersion: deploymentList.ListMeta.ResourceVersion})
if err != nil {
log.Println("Failed to start deployment watcher: Failed to get list from version\n", err)
return
}
go watchDeployments(temporalClient, deploymentWatcher)
}
func watchDeployments(temporalClient client.Client, watcher watch.Interface) {
entityTypeId := deploymentEntityTypeId
for event := range watcher.ResultChan() {
deployment := event.Object.(*appsV1.Deployment)
switch event.Type {
case watch.Added:
log.Printf("Deployment %s/%s added\n", deployment.ObjectMeta.Namespace, deployment.ObjectMeta.Name)
addEntity(temporalClient, entityTypeId, []string{deployment.ObjectMeta.Name})
case watch.Deleted:
log.Printf("Deployment %s/%s deleted\n", deployment.ObjectMeta.Namespace, deployment.ObjectMeta.Name)
removeEntity(temporalClient, entityTypeId, []string{deployment.ObjectMeta.Name})
}
}
}
func startPodWatcher(temporalClient client.Client, kubeClient *kubernetes.Clientset) {
ctx := context.Background()
podList, err := kubeClient.CoreV1().Pods(v1.NamespaceAll).List(ctx, metav1.ListOptions{})
if err != nil {
log.Println("Failed to start pod watcher: ", err)
return
}
var podNames []string
for _, pod := range podList.Items {
podNames = append(podNames, pod.Name)
}
setEntity(temporalClient, podEntityTypeId, podNames)
podWatcher, err := kubeClient.CoreV1().Pods(v1.NamespaceAll).Watch(ctx, metav1.ListOptions{ResourceVersion: podList.ListMeta.ResourceVersion})
if err != nil {
log.Println("Failed to start pod watcher: Failed to get list from version\n", err)
return
}
go watchPods(temporalClient, podWatcher)
}
func watchPods(temporalClient client.Client, watcher watch.Interface) {
entityTypeId := podEntityTypeId
for event := range watcher.ResultChan() {
pod := event.Object.(*v1.Pod)
switch event.Type {
case watch.Added:
log.Printf("Pod %s/%s added\n", pod.ObjectMeta.Namespace, pod.ObjectMeta.Name)
addEntity(temporalClient, entityTypeId, []string{pod.ObjectMeta.Name})
case watch.Deleted:
log.Printf("Pod %s/%s deleted\n", pod.ObjectMeta.Namespace, pod.ObjectMeta.Name)
removeEntity(temporalClient, entityTypeId, []string{pod.ObjectMeta.Name})
}
}
}
func startEventWatcher(temporalClient client.Client, kubeClient *kubernetes.Clientset) {
ctx := context.Background()
// Get a list of events
eventList, err := kubeClient.CoreV1().Events(v1.NamespaceAll).List(ctx, metav1.ListOptions{})
if err != nil {
log.Println("Failed to start event watcher: ", err)
return
}
eventWatcher, err := kubeClient.CoreV1().Events(v1.NamespaceAll).Watch(ctx, metav1.ListOptions{ResourceVersion: eventList.ListMeta.ResourceVersion})
if err != nil {
log.Println("Failed to start event watcher: Failed to get list from version\n", err)
return
}
go watchEvents(temporalClient, eventWatcher)
}
func watchEvents(temporalClient client.Client, watcher watch.Interface) {
for event := range watcher.ResultChan() {
clusterEvent := event.Object.(*v1.Event)
if event.Type == watch.Added {
cluster_event.StartWorkflow(temporalClient, clusterEvent)
} else {
cluster_event.UpdateWorkflow(temporalClient, clusterEvent)
}
}
}
func setEntity(temporalClient client.Client, typeId string, values []string) {
updateEntity(temporalClient, dialogflow_entity_util.SET, typeId, values)
}
func addEntity(temporalClient client.Client, typeId string, values []string) {
updateEntity(temporalClient, dialogflow_entity_util.ADD, typeId, values)
}
func removeEntity(temporalClient client.Client, typeId string, values []string) {
updateEntity(temporalClient, dialogflow_entity_util.REMOVE, typeId, values)
}
func updateEntity(temporalClient client.Client, opCode dialogflow_entity_util.EntityOP, typeId string, values []string) {
entityRequest := dialogflow_entity_util.EntityRequest{
Operation: opCode,
EntityType: typeId,
EntityValues: values,
}
_ = dialogflow_entity.ExecuteWorkflow(temporalClient, entityRequest)
}
|
<gh_stars>0
import React from 'react'
import Title from '../Title'
import style from '../../css/contact.module.css'
const Contact = () => {
return (
<section className={style.contact}>
<Title title="contact" subtitle="us"/>
<div className={style.center}>
<form className={style.form} action="https://formspree.io/<EMAIL>" method="POST">
<div>
<label htmlFor="name">name</label>
<input type="text" name="name" id="name" className={style.formControl} placeholder="Your Name"/>
</div>
<div>
<label htmlFor="email">email</label>
<input type="email" name="email" id="email" className={style.formControl} placeholder="<EMAIL>"/>
</div>
<div>
<label htmlFor="message">message</label>
<textarea name="message" id="message" rows="10" className={style.formControl} placeholder="Your Message"/>
</div>
<div>
<input type="submit" value="Envoyer" className={style.submit}/>
</div>
</form>
</div>
</section>
)
}
export default Contact
|
// $(window).on('beforeunload', function() {
// $(window).scrollTop(0);
// });
$(function () {
$(document).scroll(function () {
var $nav = $(".header");
$nav.toggleClass('scrolled', $(this).scrollTop() > $nav.height());
if ($(document).scrollTop() === 0) {
$('.logo-img').attr('src', '/assets/images/logo-b.png');
} else {
$('.logo-img').attr('src', '/assets/images/logo-w.png');
}
});
});
$(function () {
var btn = $('#btt');
$(window).scroll(function() {
if ($(window).scrollTop() > 300) {
btn.addClass('show');
} else {
btn.removeClass('show');
}
});
btn.on('click', function(e) {
e.preventDefault();
$('html, body').animate({scrollTop:0}, '300');
});
});
|
use serde::Serialize;
#[derive(Debug, Serialize)]
enum DatabaseOperation {
DELETE,
#[serde(rename = "ALTER")]
ALTER,
#[serde(rename = "DESCRIBE")]
DESCRIBE,
#[serde(rename = "DESCRIBE_CONFIGS")]
DESCRIBECONFIGS,
#[serde(rename = "ALTER_CONFIGS")]
ALTERCONFIGS,
}
fn get_serde_rename(op: DatabaseOperation) -> String {
match op {
DatabaseOperation::DELETE => "DELETE".to_string(),
DatabaseOperation::ALTER => "ALTER".to_string(),
DatabaseOperation::DESCRIBE => "DESCRIBE".to_string(),
DatabaseOperation::DESCRIBECONFIGS => "DESCRIBE_CONFIGS".to_string(),
DatabaseOperation::ALTERCONFIGS => "ALTER_CONFIGS".to_string(),
}
}
fn main() {
println!("{}", get_serde_rename(DatabaseOperation::DELETE)); // Output: DELETE
println!("{}", get_serde_rename(DatabaseOperation::ALTER)); // Output: ALTER
println!("{}", get_serde_rename(DatabaseOperation::DESCRIBE)); // Output: DESCRIBE
println!("{}", get_serde_rename(DatabaseOperation::DESCRIBECONFIGS)); // Output: DESCRIBE_CONFIGS
println!("{}", get_serde_rename(DatabaseOperation::ALTERCONFIGS)); // Output: ALTER_CONFIGS
} |
import React from 'react';
const App = ({ data }) => {
return (
<table>
<thead>
<tr>
<th>ID</th>
<th>Name</th>
<th>Age</th>
</tr>
</thead>
<tbody>
{data.map(item => (
<tr key={item.id}>
<td>{item.id}</td>
<td>{item.name}</td>
<td>{item.age}</td>
</tr>
))}
</tbody>
</table>
);
};
export default App; |
#!/bin/sh
set -e
#
# functions
#
. /usr/share/one/supervisor/scripts/lib/functions.sh
#
# run service
#
SSH_AUTH_SOCK=/var/run/one/ssh-agent.sock
export SSH_AUTH_SOCK
echo "SSH_AUTH_SOCK=${SSH_AUTH_SOCK}" > /var/run/one/ssh-agent.env
# emulate ExecStartPost from systemd service unit
if is_running opennebula-ssh-add ; then
supervisorctl stop opennebula-ssh-add
fi
# opennebula-ssh-add will wait until the socket emerges
rm -f "$SSH_AUTH_SOCK"
supervisorctl start opennebula-ssh-add
msg "Service started!"
exec /usr/bin/ssh-agent -D -a "$SSH_AUTH_SOCK"
|
#!/bin/sh
poetry export --without-hashes -E binder > requirements.txt
echo "stackstac==$(poetry version -s)" >> requirements.txt
poetry run coiled env create -n stackstac --pip requirements.txt
rm requirements.txt |
package com.kevin.utils;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.StreamCorruptedException;
import java.util.Arrays;
import java.util.Map;
import java.util.Set;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.text.TextUtils;
import android.util.Base64;
import android.util.Log;
/**
* SharePreference缓存工具类
* @author Kevin
*
*/
public class SharePrefUtil {
private final static String SP_NAME = "sp";
private static SharedPreferences mSp;
public static void saveBoolean(Context context, String key, boolean value) {
if (mSp == null)
mSp = context.getSharedPreferences(SP_NAME, 0);
mSp.edit().putBoolean(key, value).commit();
}
public static void saveString(Context context, String key, String value) {
if (mSp == null)
mSp = context.getSharedPreferences(SP_NAME, 0);
mSp.edit().putString(key, value).commit();
}
public static void saveLong(Context context, String key, long value) {
if (mSp == null)
mSp = context.getSharedPreferences(SP_NAME, 0);
mSp.edit().putLong(key, value).commit();
}
public static void saveInt(Context context, String key, int value) {
if (mSp == null)
mSp = context.getSharedPreferences(SP_NAME, 0);
mSp.edit().putInt(key, value).commit();
}
public static void saveFloat(Context context, String key, float value) {
if (mSp == null)
mSp = context.getSharedPreferences(SP_NAME, 0);
mSp.edit().putFloat(key, value).commit();
}
public static String getString(Context context, String key, String defValue) {
if (mSp == null)
mSp = context.getSharedPreferences(SP_NAME, 0);
return mSp.getString(key, defValue);
}
public static int getInt(Context context, String key, int defValue) {
if (mSp == null)
mSp = context.getSharedPreferences(SP_NAME, 0);
return mSp.getInt(key, defValue);
}
public static long getLong(Context context, String key, long defValue) {
if (mSp == null)
mSp = context.getSharedPreferences(SP_NAME, 0);
return mSp.getLong(key, defValue);
}
public static float getFloat(Context context, String key, float defValue) {
if (mSp == null)
mSp = context.getSharedPreferences(SP_NAME, 0);
return mSp.getFloat(key, defValue);
}
public static boolean getBoolean(Context context, String key, boolean defValue) {
if (mSp == null)
mSp = context.getSharedPreferences(SP_NAME, 0);
return mSp.getBoolean(key, defValue);
}
public static void saveObj(Context context, String key, Object object) {
if (mSp == null)
mSp = context.getSharedPreferences(SP_NAME, 0);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = null;
try {
oos = new ObjectOutputStream(baos);
oos.writeObject(object);
String objBase64 = new String(Base64.encode(baos.toByteArray(), Base64.DEFAULT));
mSp.edit().putString(key, objBase64).commit();
oos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public static Object getObj(Context context, String key) {
if (mSp == null)
mSp = context.getSharedPreferences(SP_NAME, 0);
String objBase64 = mSp.getString(key, null);
if (TextUtils.isEmpty(objBase64))
return null;
byte[] base64Bytes = Base64.decode(objBase64.getBytes(), Base64.DEFAULT);
ByteArrayInputStream bais = new ByteArrayInputStream(base64Bytes);
ObjectInputStream ois;
Object obj = null;
try {
ois = new ObjectInputStream(bais);
obj = (Object) ois.readObject();
ois.close();
} catch (Exception e) {
e.printStackTrace();
}
return obj;
}
/**
* 删除对应缓存
*
* @param context
* @param key
* @param defValue
* @return
*/
public static void removeKey(Context context, String key) {
if (mSp == null)
mSp = context.getSharedPreferences(SP_NAME, 0);
mSp.edit().remove(key).commit();
}
public static String removeKey(Context context, String key, String defValue) {
if (mSp == null)
mSp = context.getSharedPreferences(SP_NAME, 0);
// return mSp.getString(key, defValue);
mSp.edit().remove(key).commit();
return mSp.getString(key, defValue);
}
/**
* 清空缓存
* @param context
*/
public static void clear(Context context) {
if (mSp == null)
mSp = context.getSharedPreferences(SP_NAME, 0);
mSp.edit().clear().commit();
}
/**
* 清空缓存
* @param context
*/
public static void clear(Context context, String... keys) {
if (mSp == null)
mSp = context.getSharedPreferences(SP_NAME, 0);
Map<String, ?> map = mSp.getAll();
if (null != map && map.size() > 0) {
Set<String> keySet = map.keySet();
if (keys != null) {
keySet.removeAll(Arrays.asList(keys));
}
if (null != keySet && keySet.size() > 0) {
for (String key : keySet) {
removeKey(context, key);
}
}
}
}
/**
* 根据key和预期的value类型获取value的值
*
* @param key
* @param clazz
* @return
*/
public <T> T getValue(Context context, String key, Class<T> clazz) {
if (context == null) {
throw new RuntimeException("请先调用带有context,name参数的构造!");
}
if (mSp == null) {
mSp = context.getSharedPreferences(SP_NAME, 0);
}
return getValue(key, clazz, mSp);
}
/**
* 针对复杂类型存储<对象>
*
* @param key
* @param val
*/
public void setObject(Context context, String key, Object object) {
if (mSp == null) {
mSp = context.getSharedPreferences(SP_NAME, 0);
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream out = null;
try {
out = new ObjectOutputStream(baos);
out.writeObject(object);
String objectVal = new String(Base64.encode(baos.toByteArray(), Base64.DEFAULT));
Editor editor = mSp.edit();
editor.putString(key, objectVal);
editor.commit();
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (baos != null) {
baos.close();
}
if (out != null) {
out.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
@SuppressWarnings("unchecked")
public <T> T getObject(Context context, String key, Class<T> clazz) {
if (mSp == null) {
mSp = context.getSharedPreferences(SP_NAME, 0);
}
if (mSp.contains(key)) {
String objectVal = mSp.getString(key, null);
byte[] buffer = Base64.decode(objectVal, Base64.DEFAULT);
ByteArrayInputStream bais = new ByteArrayInputStream(buffer);
ObjectInputStream ois = null;
try {
ois = new ObjectInputStream(bais);
T t = (T) ois.readObject();
return t;
} catch (StreamCorruptedException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
} finally {
try {
if (bais != null) {
bais.close();
}
if (ois != null) {
ois.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
return null;
}
/**
* 对于外部不可见的过渡方法
*
* @param key
* @param clazz
* @param sp
* @return
*/
@SuppressWarnings("unchecked")
private <T> T getValue(String key, Class<T> clazz, SharedPreferences sp) {
T t;
try {
t = clazz.newInstance();
if (t instanceof Integer) {
return (T) Integer.valueOf(sp.getInt(key, 0));
} else if (t instanceof String) {
return (T) sp.getString(key, "");
} else if (t instanceof Boolean) {
return (T) Boolean.valueOf(sp.getBoolean(key, false));
} else if (t instanceof Long) {
return (T) Long.valueOf(sp.getLong(key, 0L));
} else if (t instanceof Float) {
return (T) Float.valueOf(sp.getFloat(key, 0L));
}
} catch (InstantiationException e) {
e.printStackTrace();
Log.e("system", "类型输入错误或者复杂类型无法解析[" + e.getMessage() + "]");
} catch (IllegalAccessException e) {
e.printStackTrace();
Log.e("system", "类型输入错误或者复杂类型无法解析[" + e.getMessage() + "]");
}
Log.e("system", "无法找到" + key + "对应的值");
return null;
}
}
|
cd /gpfs/home/hdiao/Geombrowser
awk '{print $1 "\t" $2 "\t" $3 "\t" int($4*0.484942119)}' B2_169_13_flt_sft_macs.bed_treat_pileup_sorted_flb.bdg > B2_169_13_flt_sft_macs.bed_treat_pileup_sorted_flb_bdgaccuNORM.bdg |
<filename>java-server/src/main/java/com/wednesday/model/Message.java
package com.wednesday.model;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.Proxy;
import org.springframework.stereotype.Indexed;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Table;
import java.sql.Timestamp;
@Entity(name = "message")
@Table(name = "messages")
@Proxy(lazy = false)
@Indexed
public class Message {
public final static String DEFAULT_TYPE = "text";
@Id
@GeneratedValue(generator = "uuid")
@GenericGenerator(name = "uuid", strategy = "uuid2")
private String id;
String userId;
String chatId;
String type;
String content;
Timestamp time;
public Message() {
this.type = DEFAULT_TYPE;
this.time = new Timestamp(System.currentTimeMillis());
}
public Message(String userId, String chatId, String content) {
this.userId = userId;
this.chatId = chatId;
this.type = DEFAULT_TYPE;
this.content = content;
this.time = new Timestamp(System.currentTimeMillis());
}
public void setId(String id) {
this.id = id;
}
public String getId() {
return id;
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getChatId() {
return chatId;
}
public void setChatId(String chatId) {
this.chatId = chatId;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public Timestamp getTime() {
return time;
}
public void setTime(Timestamp time) {
this.time = time;
}
}
|
#!/usr/bin/env bash
cd ..
for item in *; do
if [[ -d "$item" ]]; then
cd "${item}" || exit
echo "---*** Pulling $item ***---"
git pull
cd ..
fi
done
cd project-signer || exit
|
<gh_stars>100-1000
/*
* [The "BSD licence"]
* Copyright (c) 2010 <NAME> (JesusFreke)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.jf.dexlib.Util;
import java.util.HashMap;
public enum AccessFlags
{
PUBLIC(0x1, "public", true, true, true),
PRIVATE(0x2, "private", true, true, true),
PROTECTED(0x4, "protected", true, true, true),
STATIC(0x8, "static", true, true, true),
FINAL(0x10, "final", true, true, true),
SYNCHRONIZED(0x20, "synchronized", false, true, false),
VOLATILE(0x40, "volatile", false, false, true),
BRIDGE(0x40, "bridge", false, true, false),
TRANSIENT(0x80, "transient", false, false, true),
VARARGS(0x80, "varargs", false, true, false),
NATIVE(0x100, "native", false, true, false),
INTERFACE(0x200, "interface", true, false, false),
ABSTRACT(0x400, "abstract", true, true, false),
STRICTFP(0x800, "strictfp", false, true, false),
SYNTHETIC(0x1000, "synthetic", true, true, true),
ANNOTATION(0x2000, "annotation", true, false, false),
ENUM(0x4000, "enum", true, false, true),
CONSTRUCTOR(0x10000, "constructor", false, true, false),
DECLARED_SYNCHRONIZED(0x20000, "declared-synchronized", false, true, false);
private int value;
private String accessFlagName;
private boolean validForClass;
private boolean validForMethod;
private boolean validForField;
//cache the array of all AccessFlags, because .values() allocates a new array for every call
private final static AccessFlags[] allFlags;
private static HashMap<String, AccessFlags> accessFlagsByName;
static {
allFlags = AccessFlags.values();
accessFlagsByName = new HashMap<String, AccessFlags>();
for (AccessFlags accessFlag: allFlags) {
accessFlagsByName.put(accessFlag.accessFlagName, accessFlag);
}
}
private AccessFlags(int value, String accessFlagName, boolean validForClass, boolean validForMethod,
boolean validForField) {
this.value = value;
this.accessFlagName = accessFlagName;
this.validForClass = validForClass;
this.validForMethod = validForMethod;
this.validForField = validForField;
}
public static AccessFlags[] getAccessFlagsForClass(int accessFlagValue) {
int size = 0;
for (AccessFlags accessFlag: allFlags) {
if (accessFlag.validForClass && (accessFlagValue & accessFlag.value) != 0) {
size++;
}
}
AccessFlags[] accessFlags = new AccessFlags[size];
int accessFlagsPosition = 0;
for (AccessFlags accessFlag: allFlags) {
if (accessFlag.validForClass && (accessFlagValue & accessFlag.value) != 0) {
accessFlags[accessFlagsPosition++] = accessFlag;
}
}
return accessFlags;
}
private static String formatAccessFlags(AccessFlags[] accessFlags) {
int size = 0;
for (AccessFlags accessFlag: accessFlags) {
size += accessFlag.toString().length() + 1;
}
StringBuilder sb = new StringBuilder(size);
for (AccessFlags accessFlag: accessFlags) {
sb.append(accessFlag.toString());
sb.append(" ");
}
if (accessFlags.length > 0) {
sb.delete(sb.length() - 1, sb.length());
}
return sb.toString();
}
public static String formatAccessFlagsForClass(int accessFlagValue) {
return formatAccessFlags(getAccessFlagsForClass(accessFlagValue));
}
public static AccessFlags[] getAccessFlagsForMethod(int accessFlagValue) {
int size = 0;
for (AccessFlags accessFlag: allFlags) {
if (accessFlag.validForMethod && (accessFlagValue & accessFlag.value) != 0) {
size++;
}
}
AccessFlags[] accessFlags = new AccessFlags[size];
int accessFlagsPosition = 0;
for (AccessFlags accessFlag: allFlags) {
if (accessFlag.validForMethod && (accessFlagValue & accessFlag.value) != 0) {
accessFlags[accessFlagsPosition++] = accessFlag;
}
}
return accessFlags;
}
public static String formatAccessFlagsForMethod(int accessFlagValue) {
return formatAccessFlags(getAccessFlagsForMethod(accessFlagValue));
}
public static AccessFlags[] getAccessFlagsForField(int accessFlagValue) {
int size = 0;
for (AccessFlags accessFlag: allFlags) {
if (accessFlag.validForField && (accessFlagValue & accessFlag.value) != 0) {
size++;
}
}
AccessFlags[] accessFlags = new AccessFlags[size];
int accessFlagsPosition = 0;
for (AccessFlags accessFlag: allFlags) {
if (accessFlag.validForField && (accessFlagValue & accessFlag.value) != 0) {
accessFlags[accessFlagsPosition++] = accessFlag;
}
}
return accessFlags;
}
public static String formatAccessFlagsForField(int accessFlagValue) {
return formatAccessFlags(getAccessFlagsForField(accessFlagValue));
}
public static AccessFlags getAccessFlag(String accessFlag) {
return accessFlagsByName.get(accessFlag);
}
public int getValue() {
return value;
}
public String toString() {
return accessFlagName;
}
}
|
package it.unibo.osu.util;
import java.util.HashMap;
import java.util.Map;
import it.unibo.osu.controller.HitActionObserver;
import it.unibo.osu.model.GamePoints;
import javafx.animation.KeyFrame;
import javafx.animation.Timeline;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.scene.text.Text;
import javafx.util.Duration;
/**
* The Class Clock.
*/
public class Clock implements HitActionObserver {
private Text text;
private Timeline timeline;
private int mins = 0;
private int secs = 0;
private int millis = 0;
private boolean sos = true;
/** The end time. */
private Text endTime;
/** The time statistic. */
private Map<String, GamePoints> timeStatistic = new HashMap<>();
void change(final Text text) {
if (millis == 1000) {
secs++;
millis = 0;
}
if (secs == 60) {
mins++;
secs = 0;
}
text.setText(
(((mins / 10) == 0) ? "0" : "") + mins + ":"
+ (((secs / 10) == 0) ? "0" : "") + secs + ":"
+ (((millis / 10) == 0) ? "00"
: (((millis / 100) == 0) ? "0" : ""))
+ millis++);
}
/**
* Start.
*/
public void start() {
text = new Text("00:00:000");
timeline = new Timeline(new KeyFrame(Duration.millis(1),
new EventHandler<ActionEvent>() {
@Override
public void handle(final ActionEvent event) {
change(text);
}
}));
timeline.setCycleCount(Timeline.INDEFINITE);
timeline.setAutoReverse(false);
if (sos) {
timeline.play();
sos = false;
} else {
timeline.pause();
sos = true;
}
}
/**
* Gets the time statistic.
*
* @return the time statistic
*/
public final Map<String, GamePoints> getTimeStatistic() {
return timeStatistic;
}
/**
* Gets the current.
*
* @return the current
*/
public String getCurrent() {
return this.text.toString();
}
/**
* Pause.
*/
public void pause() {
timeline.pause();
}
/**
* Stop.
*/
public void stop() {
timeline.stop();
this.endTime = this.text;
}
/**
* On notify.
*
* @param points the points
*/
@Override
public void onNotify(final GamePoints points) {
this.timeStatistic.put(this.text.getText(), points);
}
} |
import java.util.LinkedList;
public class PriorityQueue {
// An ordered list
private LinkedList<Integer> list;
// Constructor
public PriorityQueue() {
list = new LinkedList<>();
}
// enqueue() -> add an element to the list
public void enqueue(int item) {
list.add(item);
}
// dequeue() -> remove an element from the list
public int dequeue() {
int ret = list.get(0);
list.remove(0);
return ret;
}
// peek() -> look at the top of the queue
public int peek() {
return list.get(0);
}
// isEmpty() -> check if the list is empty
public boolean isEmpty() {
return list.isEmpty();
}
} |
from rest_framework import serializers
from .post import PostSerializer
from ..models import Thread
THREAD_READ_ONLY_FIELDS = (
'is_pinned', 'is_closed', 'thread_id',
)
THREAD_EXCLUDE_FIELDS = ('id',)
class ThreadPreviewSerializer(serializers.ModelSerializer):
op_post = PostSerializer(many=False)
last_posts = PostSerializer(many=True)
thread_num = serializers.ReadOnlyField()
posts_count = serializers.ReadOnlyField()
board = serializers.ReadOnlyField(source='board.board')
files_count = serializers.ReadOnlyField()
class Meta:
model = Thread
exclude = THREAD_EXCLUDE_FIELDS
read_only_fields = THREAD_READ_ONLY_FIELDS
class ThreadSerializer(serializers.ModelSerializer):
posts = PostSerializer(many=True)
thread_num = serializers.ReadOnlyField()
posts_count = serializers.ReadOnlyField()
board = serializers.ReadOnlyField(source='board.board')
files_count = serializers.ReadOnlyField()
unique_posters = serializers.ReadOnlyField()
class Meta:
model = Thread
exclude = THREAD_EXCLUDE_FIELDS
read_only_fields = THREAD_READ_ONLY_FIELDS
|
"""Game history views."""
from .serializers import (
TournamentSerializer,
SeasonSerializer,
GameSerializer,
GameEventSerializer,
TournamentLocationSerializer,
TournamentCourtSerializer,
GameAnnounceSerializer,
)
from .models import (
TournamentCourt,
TournamentLocation,
Tournament,
Season,
Game,
InvalidGameActionError,
GameEvent,
GameAnnounce,
)
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework_api_key.permissions import HasAPIKey
import logging
import json
LOGGER = logging.getLogger(__name__)
class TournamentViewSet(viewsets.ReadOnlyModelViewSet):
"""Tournament viewset."""
permission_classes = [HasAPIKey | IsAuthenticated]
queryset = Tournament.objects.all()
serializer_class = TournamentSerializer
class TournamentLocationViewSet(viewsets.ReadOnlyModelViewSet):
"""Tournament location viewset."""
permission_classes = [HasAPIKey | IsAuthenticated]
queryset = TournamentLocation.objects.all()
serializer_class = TournamentLocationSerializer
class TournamentCourtViewSet(viewsets.ReadOnlyModelViewSet):
"""Tournament court viewset."""
permission_classes = [HasAPIKey | IsAuthenticated]
queryset = TournamentCourt.objects.all()
serializer_class = TournamentCourtSerializer
class SeasonViewSet(viewsets.ReadOnlyModelViewSet):
"""Season viewset."""
permission_classes = [HasAPIKey | IsAuthenticated]
queryset = Season.objects.all()
serializer_class = SeasonSerializer
class GameViewSet(viewsets.ReadOnlyModelViewSet):
"""Season viewset."""
permission_classes = [HasAPIKey | IsAuthenticated]
queryset = Game.objects.all()
serializer_class = GameSerializer
@action(detail=True, methods=["post"])
def start_game(self, request, pk=None):
"""Flag game as live."""
LOGGER.info("Trying to flag game as started...")
game = self.get_object()
try:
request_data = json.loads(request.data["payload"])
except (KeyError, json.JSONDecodeError):
return Response({"status": "error", "error": "malformed request"})
try:
game.start_game(**request_data)
except InvalidGameActionError as ex:
# cannot start
LOGGER.error(f"Cannot start game: {ex}")
return Response({"status": "error", "error": str(ex)})
return Response({"status": "ok"})
@action(detail=True, methods=["post"])
def stop_game(self, request, pk=None):
"""Flag game as done."""
game = self.get_object()
try:
request_data = json.loads(request.data["payload"])
except (KeyError, json.JSONDecodeError):
return Response({"status": "error", "error": "malformed request"})
try:
game.stop_game(**request_data)
except InvalidGameActionError as ex:
return Response({"status": "error", "error": str(ex)})
return Response({"status": "ok"})
@action(detail=True, methods=["post"])
def push_event(self, request, pk=None):
"""Push event."""
game = self.get_object()
try:
request_data = json.loads(request.data["payload"])
except (KeyError, json.JSONDecodeError):
return Response({"status": "error", "error": "malformed request"})
try:
game.push_event(**request_data)
except InvalidGameActionError as ex:
LOGGER.error(f"ERROR: Cannot push event: {ex}")
return Response({"status": "error", "error": str(ex)})
return Response({"status": "ok"})
@action(detail=True)
def undo_last_event(self, request, pk=None):
"""Undo last event."""
game = self.get_object()
try:
game.undo_last_event()
except InvalidGameActionError as ex:
return Response({"status": "error", "error": str(ex)})
return Response({"status": "ok"})
class GameEventViewSet(viewsets.ReadOnlyModelViewSet):
"""Game event viewset."""
permission_classes = [HasAPIKey | IsAuthenticated]
queryset = GameEvent.objects.all()
serializer_class = GameEventSerializer
class AnnounceViewSet(viewsets.ReadOnlyModelViewSet):
"""Announce view set."""
permission_classes = [HasAPIKey | IsAuthenticated]
queryset = GameAnnounce.objects.all()
serializer_class = GameAnnounceSerializer
|
<reponame>oddnetworks/oddcast-aws-transport<gh_stars>1-10
'use strict';
const AWS = require('aws-sdk');
const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID;
const AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY;
const AWS_REGION = process.env.AWS_REGION;
const QUEUE_URL = process.argv[2];
if (!AWS_ACCESS_KEY_ID) {
throw new Error('Missing env var AWS_ACCESS_KEY_ID');
}
if (!AWS_SECRET_ACCESS_KEY) {
throw new Error('Missing env var AWS_SECRET_ACCESS_KEY');
}
if (!AWS_REGION) {
throw new Error('Missing env var AWS_REGION');
}
if (!QUEUE_URL) {
throw new Error('missing argv for Queue URL');
}
const sqs = new AWS.SQS({
apiVersion: '2012-11-05',
region: AWS_REGION,
accessKeyId: AWS_ACCESS_KEY_ID,
secretAccessKey: AWS_SECRET_ACCESS_KEY
});
const options = {
QueueUrl: QUEUE_URL,
WaitTimeSeconds: 20,
MaxNumberOfMessages: 1,
MessageAttributeNames: ['pattern']
};
// The callback will be called back immediately if there is a message available, or after
// WaiteTimeSeconds if no message is available.
// After the callback is called, receiveMessage() needs to be called again to initiate
// the next long polling session.
sqs.receiveMessage(options, (err, res) => {
if (err) {
console.error(err.stack || err.message || err);
return;
}
const messages = res.Messages || [];
console.log('got %d messages', messages.length);
messages.forEach(processMessage);
});
function processMessage(msg) {
console.log(JSON.stringify(msg, null, 2));
const options = {
QueueUrl: QUEUE_URL,
ReceiptHandle: msg.ReceiptHandle
};
sqs.deleteMessage(options, (err, res) => {
if (err) {
console.error(err.stack || err.message || err);
return;
}
console.log(JSON.stringify(res, null, 2));
});
}
|
using System;
using System.IO;
public class StreamProcessor
{
public byte[] ReadStreamBytes(Stream stream)
{
if (stream == null)
{
throw new ArgumentNullException(nameof(stream), "Stream cannot be null");
}
using (MemoryStream memoryStream = new MemoryStream())
{
stream.CopyTo(memoryStream);
return memoryStream.ToArray();
}
}
} |
import React from "react"
import { useIntl, Link } from "gatsby-plugin-intl-graphql"
import Layout from "../components/layout"
import SEO from "../components/seo"
const SecondPage = () => {
const intl = useIntl().messages
return (
<Layout>
<SEO title={intl.static.page_two.title} />
<h1>{intl.static.page_two.head}</h1>
<p>{intl.static.page_two.description}</p>
<Link to="/">
{intl.static.page_two.go_to}
</Link>
</Layout>
)
}
export default SecondPage
|
#!/bin/bash
FN="mu19ksubc.db_3.2.3.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.10/data/annotation/src/contrib/mu19ksubc.db_3.2.3.tar.gz"
"https://bioarchive.galaxyproject.org/mu19ksubc.db_3.2.3.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-mu19ksubc.db/bioconductor-mu19ksubc.db_3.2.3_src_all.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-mu19ksubc.db/bioconductor-mu19ksubc.db_3.2.3_src_all.tar.gz"
)
MD5="0cb0abb16337b8acc9dd365f5225856c"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
package Friend_Circles;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
public class Solution {
public int findCircleNum(int[][] M) {
int N = M.length;
Integer[] marked = new Integer[N];
for (int i = 0; i < N; i++) {
marked[i] = i;
}
for (int i = 0; i < N; i++) {
for (int j = i + 1; j < N; j++) {
if (M[i][j] == 1){
int oldMark = marked[j];
for (int k = 0; k < N; k++) {
if (marked[k] == oldMark) marked[k] = marked[i];
}
}
}
}
HashSet<Integer> set = new HashSet<>();
set.addAll(Arrays.asList(marked));
return set.size();
}
public static void main(String[] args) {
Solution s = new Solution();
// int[][] input1 = {
// {1, 1, 0},
// {1, 1, 0},
// {0, 0, 1}
// };
// System.out.println(s.findCircleNum(input1));
// int[][] input2 = {
// {1, 1, 0},
// {1, 1, 1},
// {0, 1, 1}
// };
// System.out.println(s.findCircleNum(input2));
int[][] input3 = {
{1,0,0,1},
{0,1,1,0},
{0,1,1,1},
{1,0,1,1}
};
System.out.println(s.findCircleNum(input3));
}
} |
<reponame>t103z/interview
export const loadState = () => {
try {
const serializedState = localStorage.getItem('state');
const serializedTime = localStorage.getItem('time');
const saveTime = JSON.parse(serializedTime);
// timeout 300
if (Date.now() - saveTime > 300 * 1000) {
return undefined;
}
if (serializedState === null) {
return undefined;
}
return JSON.parse(serializedState);
} catch (error) {
return undefined;
}
};
export const saveState = (state) => {
try {
const serializedState = JSON.stringify(state);
const serializedTime = JSON.stringify(Date.now());
localStorage.setItem('state', serializedState);
localStorage.setItem('time', serializedTime);
} catch (error) {
// ignore error
}
}; |
<filename>src/plugins/openni_sensor/oni_adapter_plugin.hpp
// This file is part of the Orbbec Astra SDK [https://orbbec3d.com]
// Copyright (c) 2015 Or<NAME>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Be excellent to each other.
#ifndef OPENNIPLUGIN_H
#define OPENNIPLUGIN_H
#include <astra_core/astra_core.hpp>
#include <astra_core/plugins/PluginBase.hpp>
#include <astra_core/plugins/PluginLogging.hpp>
#include "oni_devicestream.hpp"
#include "oni_device_streamset.hpp"
#include <memory>
#include <vector>
#include <OpenNI.h>
#include <astra/capi/streams/depth_types.h>
#include <astra/capi/streams/color_types.h>
#include <astra/capi/streams/stream_types.h>
namespace orbbec { namespace ni {
class oni_adapter_plugin : public astra::plugins::plugin_base,
public openni::OpenNI::DeviceConnectedListener,
public openni::OpenNI::DeviceDisconnectedListener
{
public:
oni_adapter_plugin(astra::PluginServiceProxy* pluginService)
: plugin_base(pluginService, "openni_sensor")
{
register_for_host_events();
init_openni();
}
virtual ~oni_adapter_plugin();
virtual void temp_update() override;
oni_adapter_plugin(const oni_adapter_plugin&) = delete;
oni_adapter_plugin& operator=(const oni_adapter_plugin&) = delete;
private:
virtual void on_host_event(astra_event_id id, const void* data, size_t dataSize) override;
void init_openni();
virtual void onDeviceConnected(const openni::DeviceInfo* info) override;
virtual void onDeviceDisconnected(const openni::DeviceInfo* info) override;
device_streamset* add_or_get_device(const char* oniUri);
device_streamset* find_device(const char* oniUri);
astra_status_t read_streams();
using streamset_ptr = std::unique_ptr<device_streamset>;
std::vector<streamset_ptr> streamsets_;
};
}}
#endif /* OPENNIPLUGIN_H */
|
package cmu.xprize.comp_counting;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.drawable.Drawable;
import android.support.v4.content.ContextCompat;
import android.support.v4.view.MotionEventCompat;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.widget.ImageView;
import cmu.xprize.util.TCONST;
/**
* Created by kevindeland on 10/23/17.
*/
@SuppressLint("AppCompatCustomView")
public class CCount_Dot extends ImageView {
private Context context;
String imageName;
int row, col;
private boolean isClickable = true;
private boolean isClicked = false;
private boolean isHollow = false;
public CCount_Dot(Context context) {
super(context);
this.context = context;
}
public CCount_Dot(Context context, AttributeSet attrs) {
super(context, attrs);
this.context = context;
}
public CCount_Dot(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
this.context = context;
}
public void setParams( boolean _isClickable, String _imageName, int _row, int _col) {
this.isClickable = _isClickable;
setImageName(_imageName);
this.row = _row;
this.col = _col;
}
@Override
public boolean onTouchEvent(MotionEvent event) {
final int action = MotionEventCompat.getActionMasked(event);
Log.d(TCONST.COUNTING_DEBUG_LOG, "Dot Tapped!!!");
if (action == MotionEvent.ACTION_DOWN) {
if(isClickable) {
isClicked = true;
isClickable = false;
}
}
return false;
}
public void setImageName(String _imageName) {
this.imageName = _imageName;
createDrawable(this.imageName);
}
public void setHollow(boolean _isHollow) {
isHollow = _isHollow;
String imgPath = imageName;
if (isHollow) {
imgPath += "_hollow";
}
createDrawable(imgPath);
}
private void createDrawable(String imgPath) {
int imageResource = context.getResources().getIdentifier(imgPath, "drawable", context.getPackageName());
Drawable image = ContextCompat.getDrawable(context, imageResource);
setImageDrawable(image);
}
public boolean getIsClickable() {
return isClickable;
}
public void setIsClickable(boolean _isClickable) {
isClickable = _isClickable;
}
public boolean getIsClicked() {
return isClicked;
}
public void setIsClicked(boolean _isClicked) {
isClicked = _isClicked;
}
}
|
use std::collections::{HashMap, Weak};
struct ImageHandleManager {
count: usize,
mapping: HashMap<usize, ImageHandle>,
reverse_mapping: HashMap<Weak<ImageHandle>, usize>,
}
impl ImageHandleManager {
fn new() -> Self {
ImageHandleManager {
count: 0,
mapping: HashMap::new(),
reverse_mapping: HashMap::new(),
}
}
fn get_id(&mut self, image_handle: ImageHandle) -> usize {
if let Some(id) = self.reverse_mapping.get(&image_handle.clone_weak()) {
*id
} else {
let id = self.count;
self.count += 1;
self.mapping.insert(id, image_handle.clone());
self.reverse_mapping.insert(image_handle.clone_weak(), id);
id
}
}
fn get_image_handle(&self, id: usize) -> Option<&ImageHandle> {
self.mapping.get(&id)
}
}
struct ImageHandle {
// Define the fields and methods for the ImageHandle struct if necessary
} |
#!/usr/bin/env bash
# (c) 2021 Tuplex team
# this script creates a deployable AWS Lambda zip package using docker
# check from where script is invoked
CWD="$(cd -P -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd -P)"
echo "Executing buildwheel script located in $CWD"
pushd $CWD > /dev/null
cd .. # go to root of repo
# start code here...
LOCAL_BUILD_FOLDER=build-lambda
SRC_FOLDER=tuplex
DOCKER_IMAGE=tuplex/ci
# convert to absolute paths
get_abs_filename() {
# $1 : relative filename
echo "$(cd "$(dirname "$1")" && pwd)/$(basename "$1")"
}
LOCAL_BUILD_FOLDER=$(get_abs_filename $LOCAL_BUILD_FOLDER)
SRC_FOLDER=$(get_abs_filename $SRC_FOLDER)
echo "Tuplex source: $SRC_FOLDER"
echo "Building lambda in: $LOCAL_BUILD_FOLDER"
mkdir -p $LOCAL_BUILD_FOLDER
echo "starting docker (this might take a while...)"
# start docker & volume & create awslambda target with correct settings
# the python version to use for lambda is in /opt/lambda-python/bin/python3.8
# In order to kick-off the build within the docker, use the following two commands:
# export LD_LIBRARY_PATH=/opt/lambda-python/lib:$LD_LIBRARY_PATH
# cmake -DBUILD_FOR_LAMBDA=ON -DBUILD_WITH_AWS=ON -DBOOST_ROOT=/opt/boost/python3.8/ -GNinja -DPYTHON3_EXECUTABLE=/opt/lambda-python/bin/python3.8 /code/tuplex
# --> The preload is necessary as a shared version of python is used.
# just use tplxlam as target, then run custom python script to package contents up.
docker run --name lambda --rm -v $SRC_FOLDER:/code/tuplex -v $LOCAL_BUILD_FOLDER:/build tuplex/ci bash -c "export LD_LIBRARY_PATH=/opt/lambda-python/lib:\$LD_LIBRARY_PATH && /opt/lambda-python/bin/python3.8 -m pip install cloudpickle numpy && cd /build && cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_FOR_LAMBDA=ON -DBUILD_WITH_AWS=ON -DBUILD_WITH_ORC=ON -DPYTHON3_EXECUTABLE=/opt/lambda-python/bin/python3.8 -DBOOST_ROOT=/opt/boost/python3.8/ -GNinja /code/tuplex && cmake --build . --target tplxlam && python3.8 /code/tuplex/python/zip_cc_runtime.py --input /build/dist/bin/tplxlam --runtime /build/dist/bin/tuplex_runtime.so --python /opt/lambda-python/bin/python3.8 --output /build/tplxlam.zip"
echo "docker command run, zipped Lambda file can be found in: ${LOCAL_BUILD_FOLDER}/tplxlam.zip"
# end code here...
popd > /dev/null
|
package controllers
import java.time.format.DateTimeFormatter
import java.time.{LocalDate, ZoneId}
import java.nio.file.{Paths, Files}
import java.nio.charset.StandardCharsets
import com.typesafe.scalalogging.LazyLogging
import io.circe.generic.auto._
import io.circe.parser._
import scala.sys.process._
object Utils4Controller extends LazyLogging {
def getCandles: List[Zaif.B] = {
val f: DateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm")
val localDate: LocalDate = LocalDate.from(f.parse("2018-01-20 16:00"))
val zoneId: ZoneId = ZoneId.systemDefault
require(zoneId.toString == "Asia/Tokyo")
val from: Long = localDate.atStartOfDay(zoneId).toEpochSecond
val to: Long = LocalDate.now(zoneId).atStartOfDay(zoneId).toEpochSecond
val x: String = (s"curl https://zaif.jp/zaif_chart_api/v1/history?symbol=XEM_JPY&resolution=30&from=$from&to=$to" !!).replace("""\""", "").trim.drop(1).dropRight(1)
parse(x).fold(throw _, identity).as[Zaif.A].fold(throw _, identity).ohlc_data
}
Files.write(Paths.get("output.csv"), Zaif.A(getCandles).toCsv.getBytes(StandardCharsets.UTF_8))
}
object Zaif {
// case class B(volume: Double, average: Double, high: Double, low: Double, time: Long, close: Double, open: Double)
case class B(time: Long, open: Double, high: Double, low: Double, close: Double) {
def toCsvRow: String = (time :: open :: high :: low :: close :: Nil).map(_.toString).reduce(_ + "," + _)
}
case class A(ohlc_data: List[B]) {
def toCsv: String = "date,open,high,low,close\n" + ohlc_data.sortBy(_.time).map(_.toCsvRow).reduce(_ + "\n" + _)
}
} |
#!/usr/bin/env bash
# Copyright (C) 2017-Present Pivotal Software, Inc. All rights reserved.
#
# This program and the accompanying materials are made available under
# the terms of the under the Apache License, Version 2.0 (the "License”);
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
export VERSION=$(cat github-release/version)
export COMMIT_SHA=$(cat github-release/commit_sha)
pushd backup-and-restore-sdk-release
git reset --hard ${COMMIT_SHA}
bosh-cli create-release \
--version ${VERSION} \
--name="backup-and-restore-sdk-preview" \
--tarball=../backup-and-restore-sdk-release-build/backup-and-restore-sdk-preview-${VERSION}.tgz --force
popd
|
import * as core from '@actions/core';
import * as io from '@actions/io';
import * as exec from '@actions/exec';
import * as tc from '@actions/tool-cache';
import * as fs from 'fs';
import * as path from 'path';
import * as release from './release';
export async function getFlutter(
version: string,
channel: string
): Promise<void> {
const platform = release.getPlatform();
const useMaster = channel == 'master';
const {version: selected, downloadUrl} = await release.determineVersion(
version,
useMaster ? 'dev' : channel,
platform
);
let cleanver = useMaster
? channel
: `${selected.replace('+', '-')}-${channel}`;
let toolPath = tc.find('flutter', cleanver);
if (toolPath) {
core.debug(`Tool found in cache ${toolPath}`);
} else {
core.debug(`Downloading Flutter from Google storage ${downloadUrl}`);
const sdkFile = await tc.downloadTool(downloadUrl);
const sdkCache = await tmpDir(platform);
const sdkDir = await extract(sdkFile, sdkCache, path.basename(downloadUrl));
toolPath = await tc.cacheDir(sdkDir, 'flutter', cleanver);
}
core.exportVariable('FLUTTER_ROOT', toolPath);
core.addPath(path.join(toolPath, 'bin'));
core.addPath(path.join(toolPath, 'bin', 'cache', 'dart-sdk', 'bin'));
if (useMaster) {
await exec.exec('flutter', ['channel', 'master']);
await exec.exec('flutter', ['upgrade']);
}
}
function tmpBaseDir(platform: string): string {
let tempDirectory = process.env['RUNNER_TEMP'] || '';
if (tempDirectory) {
return tempDirectory;
}
let baseLocation;
switch (platform) {
case 'windows':
baseLocation = process.env['USERPROFILE'] || 'C:\\';
break;
case 'macos':
baseLocation = '/Users';
break;
default:
baseLocation = '/home';
break;
}
return path.join(baseLocation, 'actions', 'temp');
}
async function tmpDir(platform: string): Promise<string> {
const baseDir = tmpBaseDir(platform);
const tempDir = path.join(
baseDir,
'temp_' + Math.floor(Math.random() * 2000000000)
);
await io.mkdirP(tempDir);
return tempDir;
}
async function extract(
sdkFile: string,
sdkCache: string,
originalFilename: string
): Promise<string> {
const fileStats = fs.statSync(path.normalize(sdkFile));
if (fileStats.isFile()) {
const stats = fs.statSync(sdkFile);
if (!stats) {
throw new Error(`Failed to extract ${sdkFile} - it doesn't exist`);
} else if (stats.isDirectory()) {
throw new Error(`Failed to extract ${sdkFile} - it is a directory`);
}
if (originalFilename.endsWith('tar.xz')) {
await tc.extractTar(sdkFile, sdkCache, 'x');
} else {
await tc.extractZip(sdkFile, sdkCache);
}
return path.join(sdkCache, fs.readdirSync(sdkCache)[0]);
} else {
throw new Error(`Flutter sdk argument ${sdkFile} is not a file`);
}
}
|
var _neon_detection_post_process_workload_8cpp =
[
[ "MakeInfo", "_neon_detection_post_process_workload_8cpp.xhtml#ae0ae21bef03ed19f252c72c660e571a4", null ],
[ "NeonDetectionPostProcessValidate", "_neon_detection_post_process_workload_8cpp.xhtml#a304243ccb52986da06388dc57deae88f", null ]
]; |
<filename>app/scripts/controllers/program/program.js
(function() {
'use strict';
angular.module('sslv2App')
.controller('ProgramCtrl', ProgramCtrl);
ProgramCtrl.$inject = ['$state', 'ProgramService','$filter','$sce','$confirm', '$cookies'];
function ProgramCtrl($state, ProgramService,$filter,$sce,$confirm,$cookies) {
var vm = this;
vm.show_program = false;
vm.message = "";
vm.deleteProgram = deleteProgram;
vm.organization_name = $cookies.get('organization_name');
ProgramService.getAll()
.then(function(response){
var data = _.get(response,"data","");
if(data.success === true && data.total > 0){
var listProgram = "";
listProgram = _.map(data.data,function(value){
value.cohorts = _.map(value.cohorts,function(c){
return $sce.trustAsHtml('<span class="label label-primary">'+c+'</span>');
}).join(' ');
return value;
});
vm.programs = $filter('orderBy')(listProgram,'name');
}
vm.show_program = true;
},function(error){
});
function deleteProgram(id,index){
$confirm({
title: 'Delete Program',
text:'Are you sure you want to delete this record?'
})
.then(function(){
ProgramService.deleteProgram(id)
.then(function(response){
if(response.data.success === true){
vm.programs.splice(index,1);
}
},function(error){
console.log(error);
})
});
}
}
})();
|
import React from 'react';
import {
PieChart, Pie, Cell
} from 'recharts';
import { Typography } from '@material-ui/core';
import './CalorieDisplay.css';
function CalorieDisplay(props) {
const {data, size, colors} = props;
const pieData = [
{group: 'protein', value: data.protein * 4},
{group: 'carbs', value: data.carbs * 4},
{group: 'fat', value: data.fat * 9}
];
const cals = pieData.reduce((cals, macro) => cals += macro.value, 0);
const pieBackgroundStyle = {width: size, height: size, border: size / 10 + 'px solid #ddd', borderRadius: size};
return (
<div className="calorie-display-wrapper">
<div className="calorie-display-graphic">
<PieChart width={size} height={size} margin={{}}>
<Pie
data={pieData}
outerRadius="100%"
innerRadius="80%"
dataKey="value"
isAnimationActive={false}
>
{
pieData.map((entry, index) => <Cell key={`cell-${index}`} fill={colors[entry.group]} />)
}
</Pie>
</PieChart>
</div>
<div className="calorie-display-graphic" style={pieBackgroundStyle}></div>
<Typography variant="h5" component="span" className="calorie-display-count">{cals.toFixed(1)}</Typography>
<Typography variant="h6" component="span" className="calorie-display-label">Cals</Typography>
</div>
)
}
export default CalorieDisplay;
|
/*
* MLX90621.h
*
* Created on: 08.07.2014
* Author: <NAME>
*
* Adapted by https://github.com/longjos
* Adapted for use with Arduino UNO
*
* Adapted again by https://github.com/barucAlmaguer
* Adapted for adq. speed control, ems calibration etc
*/
#include "MLX90621.h"
void MLX90621::setRefreshRate(uint8_t refrate){
refreshRate = refrate & 0b1111; //Solo considera ultimos 4 bits
}
void MLX90621::setResolution(uint8_t res){
resolution = res & 0b11;
resolution_comp = pow(2.0, (3 - resolution));
}
void MLX90621::setEmissivity(float ems){
emissivity = ems <= 1.0 ? ems : 1.0;
}
void MLX90621::initialize() {
Wire.begin();
Wire.setClock(400000L); //Trabajar a 400kHz en vez de 100
delay(5);
readEEPROM();
writeTrimmingValue();
setConfiguration();
preCalculateConstants();
}
void MLX90621::measure(bool calculate_temps) {
if (checkConfig()) {
readEEPROM();
writeTrimmingValue();
setConfiguration();
}
readPTAT();
readIR(); //5ms
if(calculate_temps){
calculateTA();
readCPIX();
calculateTO(); //41ms
}
}
float MLX90621::getTemperature(uint8_t num) {
if ((num >= 0) && (num < 64)) {
return temperatures[num];
} else {
return 0;
}
}
float MLX90621::getAmbient() {
return Tambient;
}
void MLX90621::setConfiguration() {
//Elige taza de refresco
byte cfg_LSB = refreshRate; //0bxxxxNNNN
//elige resolucion (0b00 - 0b11)
bitWrite(cfg_LSB, 5, (resolution >> 1) & 1); //0bxxxNnnnn
bitWrite(cfg_LSB, 4, (resolution >> 0) & 1); //0bxxNnnnnn
byte defaultConfig_H = 0b01000110; //kmoto: See data sheet p.11 and 25
Wire.beginTransmission(0x60);
Wire.write(0x03);
Wire.write((byte) cfg_LSB - 0x55);
Wire.write(cfg_LSB);
Wire.write(defaultConfig_H - 0x55);
Wire.write(defaultConfig_H);
Wire.endTransmission();
//Read the resolution from the config register
uint16_t config_actual = readConfig();
uint8_t real_resolution = (config_actual & 0x30) >> 4;
setResolution(real_resolution);
//Serial.print("Res=");
//Serial.println(resolution, BIN);
//Serial.print("Cfg=");
//Serial.println(config_actual, BIN);
}
void MLX90621::readEEPROM() { // Read in blocks of 32 bytes to accomodate Wire library
for(int j=0;j<256;j+=32) {
Wire.beginTransmission(0x50);
Wire.write(j);
byte rc = Wire.endTransmission(false);
Wire.requestFrom(0x50, 32);
for (int i = 0; i < 32; i++) {
eepromData[j+i] = (uint8_t) Wire.read();
}
}
}
void MLX90621::writeTrimmingValue() {
Wire.beginTransmission(0x60);
Wire.write(0x04);
Wire.write((byte) eepromData[OSC_TRIM_VALUE] - 0xAA);
Wire.write(eepromData[OSC_TRIM_VALUE]);
Wire.write(0x56);
Wire.write(0x00);
Wire.endTransmission();
}
void MLX90621::calculateTA(void) {
Tambient = ((-k_t1 + sqrt(sq(k_t1) - (4 * k_t2 * (v_th - (float) ptat))))
/ (2 * k_t2)) + 25.0;
}
void MLX90621::preCalculateConstants() {
//emissivity = unsigned_16(eepromData[CAL_EMIS_H], eepromData[CAL_EMIS_L]) / 32768.0;
a_common = twos_16(eepromData[CAL_ACOMMON_H], eepromData[CAL_ACOMMON_L]);
a_i_scale = (int16_t)(eepromData[CAL_AI_SCALE] & 0xF0) >> 4;
b_i_scale = (int16_t) eepromData[CAL_BI_SCALE] & 0x0F;
alpha_cp = unsigned_16(eepromData[CAL_alphaCP_H], eepromData[CAL_alphaCP_L]) /
(pow(2.0, eepromData[CAL_A0_SCALE]) * resolution_comp);
a_cp = (float) twos_16(eepromData[CAL_ACP_H], eepromData[CAL_ACP_L]) / resolution_comp;
b_cp = (float) twos_8(eepromData[CAL_BCP]) / (pow(2.0, (float)b_i_scale) * resolution_comp);
tgc = (float) twos_8(eepromData[CAL_TGC]) / 32.0;
k_t1_scale = (int16_t) (eepromData[KT_SCALE] & 0xF0) >> 4;
k_t2_scale = (int16_t) (eepromData[KT_SCALE] & 0x0F) + 10;
v_th = (float) twos_16(eepromData[VTH_H], eepromData[VTH_L]);
v_th = v_th / resolution_comp;
k_t1 = (float) twos_16(eepromData[KT1_H], eepromData[KT1_L]);
k_t1 /= (pow(2, k_t1_scale) * resolution_comp);
k_t2 = (float) twos_16(eepromData[KT2_H], eepromData[KT2_L]);
k_t2 /= (pow(2, k_t2_scale) * resolution_comp);
}
void MLX90621::calculateTO() {
//emissivity = 0.4;
float v_cp_off_comp = (float) cpix - (a_cp + b_cp * (Tambient - 25.0));
tak4 = pow((float) Tambient + 273.15, 4.0);
minTemp = NULL, maxTemp = NULL;
for (int i = 0; i < 64; i++) {
a_ij = ((float) a_common + eepromData[i] * pow(2.0, a_i_scale)) / resolution_comp;
b_ij = (float) twos_8(eepromData[0x40 + i]) / (pow(2.0, b_i_scale) * resolution_comp);
v_ir_off_comp = (float) irData[i] - (a_ij + b_ij * (Tambient - 25.0));
v_ir_tgc_comp = (float) v_ir_off_comp - tgc * v_cp_off_comp;
float alpha_ij = ((float) unsigned_16(eepromData[CAL_A0_H], eepromData[CAL_A0_L]) / pow(2.0, (float) eepromData[CAL_A0_SCALE]));
alpha_ij += ((float) eepromData[0x80 + i] / pow(2.0, (float) eepromData[CAL_DELTA_A_SCALE]));
alpha_ij = alpha_ij / resolution_comp;
//ksta = (float) twos_16(eepromData[CAL_KSTA_H], eepromData[CAL_KSTA_L]) / pow(2.0, 20.0);
//alpha_comp = (1 + ksta * (Tambient - 25.0)) * (alpha_ij - tgc * alpha_cp);
alpha_comp = (alpha_ij - tgc * alpha_cp); // For my MLX90621 the ksta calibrations were 0
// so I can ignore them and save a few cycles
v_ir_comp = v_ir_tgc_comp / emissivity;
float temperature = pow((v_ir_comp / alpha_comp) + tak4, 1.0 / 4.0) - 273.15;
temperatures[i] = temperature;
if (minTemp == NULL || temperature < minTemp) {
minTemp = temperature;
}
if (maxTemp == NULL || temperature > maxTemp) {
maxTemp = temperature;
}
}
}
float MLX90621::getMinTemp() {
return minTemp;
}
float MLX90621::getMaxTemp() {
return maxTemp;
}
void MLX90621::readIR() {
for (int j = 0; j < 64; j += 16) { // Read in blocks of 32 bytes to overcome Wire buffer limit
Wire.beginTransmission(0x60);
Wire.write(0x02);
Wire.write(j);
Wire.write(0x01);
Wire.write(0x20);
Wire.endTransmission(false);
Wire.requestFrom(0x60, 32);
for (int i = 0; i < 16; i++) {
uint8_t pixelDataLow = (uint8_t) Wire.read();
uint8_t pixelDataHigh = (uint8_t) Wire.read();
irData[j + i] = twos_16(pixelDataHigh, pixelDataLow);
}
}
}
void MLX90621::readPTAT() {
Wire.beginTransmission(0x60);
Wire.write(0x02);
Wire.write(0x40);
Wire.write(0x00);
Wire.write(0x01);
Wire.endTransmission(false);
Wire.requestFrom(0x60, 2);
byte ptatLow = Wire.read();
byte ptatHigh = Wire.read();
ptat = (ptatHigh * 256) + ptatLow;
}
void MLX90621::readCPIX() {
Wire.beginTransmission(0x60);
Wire.write(0x02);
Wire.write(0x41);
Wire.write(0x00);
Wire.write(0x01);
Wire.endTransmission(false);
Wire.requestFrom(0x60, 2);
byte cpixLow = Wire.read();
byte cpixHigh = Wire.read();
cpix = twos_16(cpixHigh, cpixLow);
}
int16_t MLX90621::twos_16(uint8_t highByte, uint8_t lowByte){
uint16_t combined_word = 256 * highByte + lowByte;
if (combined_word > 32767)
return (int16_t) (combined_word - 65536);
return (int16_t) combined_word;
}
int8_t MLX90621::twos_8(uint8_t byte) {
if (byte > 127)
return (int8_t) byte - 256;
return (int8_t) byte;
}
uint16_t MLX90621::unsigned_16(uint8_t highByte, uint8_t lowByte){
return (highByte << 8) | lowByte;
}
uint16_t MLX90621::readConfig() {
Wire.beginTransmission(0x60);
Wire.write(0x02);
Wire.write(0x92);
Wire.write(0x00);
Wire.write(0x01);
Wire.endTransmission(false);
Wire.requestFrom(0x60, 2);
byte configLow = Wire.read();
byte configHigh = Wire.read();
uint16_t config = ((uint16_t) (configHigh << 8) | configLow);
return config;
}
//Poll the MLX90621 for its current status
//Returns true if the POR/Brown out bit is set
boolean MLX90621::checkConfig() {
bool check = !((readConfig() & 0x0400) >> POR_TEST);
return check;
}
|
public class MaximumElement
{
public static void main(String[] args)
{
int nums[] = {15, 24, 7, 32, 18, 27, 81, 35, 14, 42 };
int max_val = 0;
for (int i = 0; i < nums.length; i++)
if (nums[i] > max_val)
max_val = nums[i];
System.out.println("Maximum Value is " + max_val);
}
} |
import React, { Component } from "react";
import styles from "../Styles/main_section.scss";
import PlaylistImage from "../Assets/Images/aic_dirt.jpg";
import OptionsImg from "../Assets/Icons/options.svg";
import FilterImg from "../Assets/Icons/magnifying-glass.svg";
import Songs from "../Containers/Songs";
import AudioPlayerService from "../Utils/audioPlayerService";
import * as actions from "../Store/actions/player";
import { connect } from "react-redux";
// import songs from '../Utils/songs';
import Options from "../Components/Options";
import Dropdown from "../Components/Dropdown";
import WrapperAuto from "../Utils/WrapperAuto";
const audioPlayer = AudioPlayerService;
class MainSection extends Component {
onPlayRequested = (index, song) => {
audioPlayer.playSound(index);
this.props.togglePlay(index, true, song);
};
render() {
// onClick={this.props.closeAll}
return (
<div className={styles.mainSection}>
<div className={styles.mainBlock}>
<img src={PlaylistImage} className={styles.mainImage} />
<div className={styles.infoAndButtons}>
<div className={styles.info}>
<p>Playlist</p>
<div className={styles.playlistName}>Top pop 2019</div>
<div className={styles.playlistNumbers}>
<div>10 songs</div>
<div>45 minutes</div>
</div>
</div>
<div className={styles.buttons}>
<div className={styles.play}>Play</div>
<img
className={this.props.optionsOpn && styles.Clicked}
src={OptionsImg}
onClick={this.props.toggleOptions}
/>
<WrapperAuto
change={this.props.change}
className={styles.WrapperAuto}
>
<Options open={this.props.optionsOpn} />
</WrapperAuto>
</div>
</div>
{/* <WrapperAuto change={this.props.change} > */}
<Dropdown open={this.props.dropdown} />
{/* </WrapperAuto> */}
</div>
{/* <div className={classNames(styles.mainBlock, styles.lastBlock)}> */}
<div className={styles.mainBlock}>
<div className={styles.filter}>
<img src={FilterImg} />
<div className={styles.Form}>
<input type="text" name="search" required autoComplete="off" />
<label for="search" className={styles.LabelSearch}>
<span className={styles.ContentSearch}>Search</span>
</label>
</div>
</div>
<div className={styles.list}>
<div className={styles.infoLabels}>
<div>#</div>
<div>Title</div>
<div>Artist</div>
<div>Duration</div>
</div>
<Songs onPlayHandle={this.onPlayRequested} />
</div>
</div>
</div>
);
}
}
const mapStateToProps = state => {
return {
playing: state.player.playing,
optionsOpn: state.player.optionsOpn,
dropdown: state.player.dropdown
};
};
const mapDispatchToProps = dispatch => {
return {
togglePlay: (index, playing, song) => {
dispatch(actions.togglePlay(index, playing, song));
},
toggleOptions: () => {
dispatch(actions.toggleOptions());
},
change: () => {
dispatch(actions.change());
}
};
};
export default connect(mapStateToProps, mapDispatchToProps)(MainSection);
|
#!/bin/sh
# This is a generated file; do not edit or check into version control.
export "FLUTTER_ROOT=/Users/renzhayi/SDK/FlutterSDK/flutter"
export "FLUTTER_APPLICATION_PATH=/Users/renzhayi/Projects/Demo/rongcloud-im-flutter-sdk/example"
export "FLUTTER_TARGET=lib/main.dart"
export "FLUTTER_BUILD_DIR=build"
export "SYMROOT=${SOURCE_ROOT}/../build/ios"
export "FLUTTER_FRAMEWORK_DIR=/Users/renzhayi/SDK/FlutterSDK/flutter/bin/cache/artifacts/engine/ios"
|
#!/usr/bin/env bash
#
# Copyright 2017 Hortonworks.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# defaults
verbose=false
shell_dir=$(dirname $0)
bootstrap_dir=${shell_dir}/..
[ -z $UBER_RUNTIME_ENVIRONMENT ] && UBER_RUNTIME_ENVIRONMENT=dev
CONFIG_FILE_PATH=${bootstrap_dir}/../conf/streamline-${UBER_RUNTIME_ENVIRONMENT}.yaml
# Which java to use
if [ -z "${JAVA_HOME}" ]; then
JAVA="java"
else
JAVA="${JAVA_HOME}/bin/java"
fi
function run_cmd {
cmd=$*
if [[ $verbose == "true" ]]
then
echo $cmd
fi
response=$(eval $cmd)
if [ $? -ne 0 ] ; then
echo "Command failed to execute, quiting the migration ..."
exit 1
fi
if [[ $verbose == "true" ]]
then
echo $response
else
echo $response | grep -o '"responseMessage":[^"]*"[^"]*"'
fi
echo "--------------------------------------"
}
function getId {
str=$1
echo $str | grep -o -E "\"id\":[0-9]+" | head -n1 | cut -d : -f2
}
function getAdminRoleId {
cmd="curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -sS -X GET ${CATALOG_ROOT_URL}/roles?name=ROLE_ADMIN -H 'Content-Type: application/json' ${HTTP_HEADERS_FOR_CURL}"
response=$(eval $cmd)
getId "$response"
}
function put {
uri=$1/$2
data=$3
cmd="curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -sS -X PUT ${CATALOG_ROOT_URL}$uri --data @$data -H 'Content-Type: application/json' ${HTTP_HEADERS_FOR_CURL}"
echo "PUT $data"
run_cmd $cmd
}
function post {
uri=$1
data=$2
cmd="curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -sS -X POST ${CATALOG_ROOT_URL}$uri --data @$data -H 'Content-Type: application/json' ${HTTP_HEADERS_FOR_CURL}"
echo "POST $data"
run_cmd $cmd
}
function add_sample_topology_component_bundle {
echo "POST sample_bundle"
cmd="curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -sS -X POST -i -F topologyComponentBundle=@$bootstrap_dir/kafka-topology-bundle ${CATALOG_ROOT_URL}/streams/componentbundles/SOURCE/ ${HTTP_HEADERS_FOR_CURL}"
run_cmd $cmd
}
function add_topology_component_bundle {
uri=$1
data=$2
cmd="curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -sS -X POST -i -F topologyComponentBundle=@$data ${CATALOG_ROOT_URL}$uri ${HTTP_HEADERS_FOR_CURL}"
echo "POST $data"
run_cmd $cmd
}
function put_topology_component_bundle {
uri=$1
data=$2
subType=$3
out=$(curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X GET -H "Content-Type: application/json" -H "Cache-Control: no-cache" "${CATALOG_ROOT_URL}$uri?subType=${subType}&engine=STORM" ${HTTP_HEADERS_FOR_CURL})
bundleId=$(getId $out)
cmd="curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -sS -X PUT -i -F topologyComponentBundle=@$data ${CATALOG_ROOT_URL}$uri/$bundleId ${HTTP_HEADERS_FOR_CURL}"
echo "PUT $data"
run_cmd $cmd
}
function put_service_bundle {
uri=$1
data=$2
cmd="curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -sS -X PUT ${CATALOG_ROOT_URL}$uri --data @$data -H 'Content-Type: application/json' ${HTTP_HEADERS_FOR_CURL}"
echo "PUT $data"
run_cmd $cmd
}
function update_custom_processors_with_digest {
echo "Running update script to update all custom processors with digests"
cp_upgrade_uri_suffix="/streams/componentbundles/PROCESSOR/custom/upgrade"
cmd="curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -sS -X PUT ${CATALOG_ROOT_URL}$cp_upgrade_uri_suffix -H 'Content-Type: application/json' ${HTTP_HEADERS_FOR_CURL}"
run_cmd $cmd
}
#Below command to update storm version will be called by RE script. Need to remove later. Adding now for convenience
update_storm_version_command="$bootstrap_dir/update-storm-version.sh 1.1.0.3.0.0.0-453"
run_cmd $update_storm_version_command
#---------------------------------------------
# Get catalogRootUrl from configuration file
#---------------------------------------------
CONF_READER_MAIN_CLASS=com.hortonworks.streamline.storage.tool.sql.PropertiesReader
for file in "${bootstrap_dir}"/lib/*.jar;
do
CLASSPATH="$CLASSPATH":"$file"
done
CATALOG_ROOT_URL_PROPERTY_KEY=catalogRootUrl
component_dir=${bootstrap_dir}/components
service_dir=${bootstrap_dir}/services
user_role_dir=${bootstrap_dir}/users_roles
storm_dir=${bootstrap_dir}/engines/storm
piper_dir=${bootstrap_dir}/engines/piper
athenax_dir=${bootstrap_dir}/engines/athenax
echo "Configuration file: ${CONFIG_FILE_PATH}"
CATALOG_ROOT_URL=`exec ${JAVA} -cp ${CLASSPATH} ${CONF_READER_MAIN_CLASS} ${CONFIG_FILE_PATH} ${CATALOG_ROOT_URL_PROPERTY_KEY}`
# if it doesn't exit with code 0, just give up
if [ $? -ne 0 ]; then
exit 1
fi
if [ ! -z "$UBER_PORT_HTTP" ]; then
ORIGINAL_CATALOG_URL_PORT=`echo $CATALOG_ROOT_URL | awk -F[/:] '{print $5}'`
CATALOG_ROOT_URL=${CATALOG_ROOT_URL/$ORIGINAL_CATALOG_URL_PORT/$UBER_PORT_HTTP}
fi
echo "Catalog Root URL: ${CATALOG_ROOT_URL}"
echo "Component bundle Root dir: ${component_dir}"
echo "Service bundle Root dir: ${service_dir}"
echo "User/Role bundle Root dir: ${user_role_dir}"
function update_bundles {
# === Source ===
put_topology_component_bundle /streams/componentbundles/SOURCE ${storm_dir}/components/sources/kafka-source-topology-component.json KAFKA
# === Processor ===
# === Sink ===
put_topology_component_bundle /streams/componentbundles/SINK ${storm_dir}/components/sinks/hdfs-sink-topology-component.json HDFS
put_topology_component_bundle /streams/componentbundles/SINK ${storm_dir}/components/sinks/jdbc-sink-topology-component.json JDBC
put_topology_component_bundle /streams/componentbundles/SINK ${storm_dir}/components/sinks/hive-sink-topology-component.json HIVE
put_topology_component_bundle /streams/componentbundles/SINK ${storm_dir}/components/sinks/druid-sink-topology-component.json DRUID
# === Topology ===
put_topology_component_bundle /streams/componentbundles/TOPOLOGY ${storm_dir}/topology/storm-topology-component.json TOPOLOGY
# === Service Bundle ===
put_service_bundle /servicebundles/KAFKA ${service_dir}/kafka-bundle.json
put_service_bundle /servicebundles/STORM ${service_dir}/storm-bundle.json
put_service_bundle /servicebundles/ZOOKEEPER ${service_dir}/zookeeper-bundle.json
post /servicebundles ${service_dir}/druid-bundle.json
}
function add_udfs {
dir=$(dirname $0)/../..
jarFile="$(find ${bootstrap_dir}/udf-jars/ -name 'streamline-functions-*.jar')"
if [[ ! -f ${jarFile} ]]
then
# try local build path
jarFile="$(find ${dir}/streams/functions/target/ -name 'streamline-functions-*.jar')"
if [[ ! -f ${jarFile} ]]
then
echo "Could not find streamline-functions jar, Exiting ..."
exit 1
fi
fi
echo " - variance"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"VARIANCE_FN", "displayName": "VARIANCE", "description": "Variance", "type":"AGGREGATE", "className":"com.hortonworks.streamline.streams.udaf.Variance", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - variancep"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"VARIANCEP_FN", "displayName": "VARIANCEP", "description": "Population variance", "type":"AGGREGATE", "className":"com.hortonworks.streamline.streams.udaf.Variancep", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - stddev"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"STDDEV_FN", "displayName": "STDDEV", "description": "Standard deviation", "type":"AGGREGATE", "className":"com.hortonworks.streamline.streams.udaf.Stddev", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - stddevp"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"STDDEVP_FN", "displayName": "STDDEVP", "description": "Population standard deviation", "type":"AGGREGATE", "className":"com.hortonworks.streamline.streams.udaf.Stddevp", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - concat"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"CONCAT_FN", "displayName": "CONCAT", "description": "Concatenate", "type":"FUNCTION", "className":"com.hortonworks.streamline.streams.udf.Concat", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - count"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"COUNT_FN", "displayName": "COUNT","description": "Count", "type":"AGGREGATE", "className":"com.hortonworks.streamline.streams.udaf.LongCount", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - substring"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"SUBSTRING_FN", "displayName": "SUBSTRING", "description": "Returns sub-string of a string starting at some position", "type":"FUNCTION", "className":"com.hortonworks.streamline.streams.udf.Substring", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - substring"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"SUBSTRING_FN", "displayName": "SUBSTRING", "description": "Returns a sub-string of a string starting at some position and is of given length", "type":"FUNCTION", "className":"com.hortonworks.streamline.streams.udf.Substring2", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - position"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"POSITION_FN", "displayName": "POSITION", "description": "Returns the position of the first occurrence of sub-string in a string", "type":"FUNCTION", "className":"com.hortonworks.streamline.streams.udf.Position", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - position"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"POSITION_FN", "displayName": "POSITION", "description": "Returns the position of the first occurrence of sub-string in a string starting the search from an index", "type":"FUNCTION", "className":"com.hortonworks.streamline.streams.udf.Position2", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - avg"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"AVG_FN", "displayName": "AVG","description": "Average", "type":"AGGREGATE", "className":"com.hortonworks.streamline.streams.udaf.Mean", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - trim"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"TRIM_FN", "displayName": "TRIM", "description": "Returns a string with any leading and trailing whitespaces removed", "type":"FUNCTION", "className":"com.hortonworks.streamline.streams.udf.Trim", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - trim2"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"TRIM_FN", "displayName": "TRIM2", "description": "Returns a string with specified leading and trailing character removed", "type":"FUNCTION", "className":"com.hortonworks.streamline.streams.udf.Trim2", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - ltrim"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"LTRIM_FN", "displayName": "LTRIM", "description": "Removes leading whitespaces from the input", "type":"FUNCTION", "className":"com.hortonworks.streamline.streams.udf.Ltrim", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - ltrim2"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"LTRIM_FN", "displayName": "LTRIM", "description": "Removes specified leading character from the input", "type":"FUNCTION", "className":"com.hortonworks.streamline.streams.udf.Ltrim2", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - rtrim"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"RTRIM_FN", "displayName": "RTRIM", "description": "Removes trailing whitespaces from the input", "type":"FUNCTION", "className":"com.hortonworks.streamline.streams.udf.Rtrim", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - rtrim2"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"RTRIM_FN", "displayName": "RTRIM", "description": "Removes specified trailing character from the input", "type":"FUNCTION", "className":"com.hortonworks.streamline.streams.udf.Rtrim2", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - overlay"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"OVERLAY_FN", "displayName": "OVERLAY", "description": "Replaces a substring of a string with a replacement string", "type":"FUNCTION", "className":"com.hortonworks.streamline.streams.udf.Overlay", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - overlay"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"OVERLAY_FN", "displayName": "OVERLAY", "description": "Replaces a substring of a string with a replacement string", "type":"FUNCTION", "className":"com.hortonworks.streamline.streams.udf.Overlay2", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - divide"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"DIVIDE_FN", "displayName": "DIVIDE", "description": "Divides input with given divisor", "type":"FUNCTION", "className":"com.hortonworks.streamline.streams.udf.Divide", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - exists"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"EXISTS_FN", "displayName": "EXISTS", "description": "returns 1 if input is not null otherwise returns 0", "type":"FUNCTION", "className":"com.hortonworks.streamline.streams.udf.Exists", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
echo " - sum"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfJarFile=@${jarFile} -F udfConfig='{"name":"SUM_FN", "displayName": "SUM","description": "Sum", "type":"AGGREGATE", "className":"com.hortonworks.streamline.streams.udaf.NumberSum", "builtin":true};type=application/json' "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"ABS", "displayName": "ABS", "description": "Returns the absolute value of the argument", "type":"FUNCTION", "argTypes": ["DOUBLE"], "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"ACOS", "displayName": "ACOS", "description": "Returns the arccosine of the argument", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE"], "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"ASIN", "displayName": "ASIN", "description": "Returns the arcsine of the argument", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE"], "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"ATAN", "displayName": "ATAN", "description": "Returns the arc tangent of the argument", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE"], "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"ATAN2", "displayName": "ATAN2", "description": "Returns the arc tangent of the argument coordinates", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE", "LONG|DOUBLE"], "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"CEIL", "displayName": "CEIL", "description": "Rounds up, returning the smallest integer that is greater than or equal to the argument", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE"], "returnType": "LONG", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"CHARACTER_LENGTH", "displayName": "CHARACTER_LENGTH", "description": "Returns the number of characters in a character string", "type":"FUNCTION", "argTypes": ["LONG"], "returnType": "STRING", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"CHAR_LENGTH", "displayName": "CHAR_LENGTH", "description": "Returns the number of characters in a character string", "type":"FUNCTION", "argTypes": ["LONG"], "returnType": "STRING", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"COS", "displayName": "COS", "description": "Returns the cosine of the argument", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE"], "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"COT", "displayName": "COT", "description": "Returns the cotangent of the argument", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE"], "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"DEGREES", "displayName": "DEGREES", "description": "Converts the argument from radians to degrees", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE"], "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"EXP", "displayName": "EXP", "description": "Returns e raised to the power of the argument", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE"], "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"FLOOR", "displayName": "FLOOR", "description": "Rounds down, returning the largest integer that is less than or equal to the argument", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE"], "returnType": "LONG", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"INITCAP", "displayName": "INITCAP", "description": "Convert the first letter of each word in the argument to upper case and the rest to lower case", "type":"FUNCTION", "argTypes": ["STRING"], "returnType": "STRING", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"LN", "displayName": "LN", "description": "Returns the natural logarithm of the argument", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE"], "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"LOG10", "displayName": "LOG10", "description": "Returns the base 10 logarithm of the argument", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE"], "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"LOWER", "displayName": "LOWER", "description": "Returns a character string converted to lower case", "type":"FUNCTION", "argTypes": ["STRING"], "returnType": "STRING", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"MOD", "displayName": "MOD", "description": "Returns the remainder (modulus) of the first argument divided by the second argument", "type":"FUNCTION", "argTypes": ["LONG", "LONG"], "returnType": "LONG", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"PI", "displayName": "PI", "description": "Returns a value that is closer than any other value to pi", "type":"FUNCTION", "argTypes": , "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"POWER", "displayName": "POWER", "description": "Returns the value of the first argument raised to the power of the second argument", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE", "LONG|DOUBLE"], "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"RADIANS", "displayName": "RADIANS", "description": "Converts the argument from degrees to radians", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE"], "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"RAND", "displayName": "RAND", "description": "Generates a random double between 0 and 1 (inclusive)", "type":"FUNCTION", "argTypes": , "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"RAND_INTEGER", "displayName": "RAND_INTEGER", "description": "Generates a random integer between 0 and the argument (exclusive)", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE"], "returnType": "LONG", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"ROUND", "displayName": "ROUND", "description": "Rounds the first argument to the xth places right to the decimal point, where x is the second argument", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE", "LONG"], "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"SIGN", "displayName": "SIGN", "description": "Returns the signum of the argument", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE"], "returnType": "LONG", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"SIN", "displayName": "SIN", "description": "Returns the sine of the argument", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE"], "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"SQRT", "displayName": "SQRT", "description": "Returns the square root of the argument", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE"], "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"TAN", "displayName": "TAN", "description": "Returns the tangent of the argument", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE"], "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"TRUNCATE", "displayName": "TRUNCATE", "description": "Truncates the first argument to the xth places right to the decimal point, where x is the second argument", "type":"FUNCTION", "argTypes": ["LONG|DOUBLE", "LONG"], "returnType": "DOUBLE", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
curl -i --negotiate -u:anyUser -b /tmp/cookiejar.txt -c /tmp/cookiejar.txt -s -X POST "${CATALOG_ROOT_URL}/streams/udfs" -F udfConfig='{"name":"UPPER", "displayName": "UPPER", "description": "Returns a character string converted to upper case", "type":"FUNCTION", "argTypes": ["STRING"], "returnType": "STRING", "className":"builtin", "builtin":true};type=application/json' -F builtin=true "${HTTP_HEADERS_FOR_CURL}"
}
function main {
echo ""
echo "===================================================================================="
echo "Running bootstrap.sh will create streamline default components, notifiers, udfs and roles"
update_bundles
add_udfs
update_custom_processors_with_digest
}
main
|
#!/bin/bash
# Install poetry
curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python -
source $HOME/.poetry/env
# Activate virtual environment
poetry shell
# Install dependencies
peotry install
|
export MAX_LENGTH=128
export BERT_MODEL=bert-base-multilingual-cased
export OUTPUT_DIR=germeval-model
export BATCH_SIZE=32
export NUM_EPOCHS=3
export SAVE_STEPS=750
export SEED=1
python3 run_ner.py --data_dir ../../NER_DATA/ \
--model_type bert \
--labels ../../NER_DATA/labels.txt \
--model_name_or_path $BERT_MODEL \
--output_dir $OUTPUT_DIR \
--max_seq_length $MAX_LENGTH \
--num_train_epochs $NUM_EPOCHS \
--per_gpu_train_batch_size $BATCH_SIZE \
--save_steps $SAVE_STEPS \
--seed $SEED \
--do_train \
--do_eval \
--do_predict \
--overwrite_output_dir
|
#!/bin/sh
# Attempt to automatically start the Micro Cloud Foundry VM.
vmx_file=micro.vmx
tries=( \
`which vmrun` \
'/Applications/VMware Fusion.app/Contents/Library/vmrun' \
)
for try in "${tries[@]}"; do
if [[ -x "$try" ]]; then
"$try" start "$vmx_file"
exit
fi
done
echo 'Unable to find the vmrun command.'
echo "Please open $vmx_file in VMware Workstation, Player or Fusion."
exit 1
|
package com.alipay.api.response;
import java.util.List;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.internal.mapping.ApiListField;
import com.alipay.api.domain.RtaInfo;
import com.alipay.api.AlipayResponse;
/**
* ALIPAY API: alipay.user.invite.rta.consult response.
*
* @author auto create
* @since 1.0, 2021-07-26 18:02:26
*/
public class AlipayUserInviteRtaConsultResponse extends AlipayResponse {
private static final long serialVersionUID = 7859541676636283821L;
/**
* 一个或多个咨询策略标签,若返回多个标签,中间用“-”分隔
*/
@ApiField("principal_label")
private String principalLabel;
/**
* 是否需要的流量,结果可为true和false
*/
@ApiField("required_flow")
private Boolean requiredFlow;
/**
* rta咨询结果列表。最终结果形如:[{"accountId":"11111"},{"accountId":"22222"}]
*/
@ApiListField("rta_info_list")
@ApiField("rta_info")
private List<RtaInfo> rtaInfoList;
public void setPrincipalLabel(String principalLabel) {
this.principalLabel = principalLabel;
}
public String getPrincipalLabel( ) {
return this.principalLabel;
}
public void setRequiredFlow(Boolean requiredFlow) {
this.requiredFlow = requiredFlow;
}
public Boolean getRequiredFlow( ) {
return this.requiredFlow;
}
public void setRtaInfoList(List<RtaInfo> rtaInfoList) {
this.rtaInfoList = rtaInfoList;
}
public List<RtaInfo> getRtaInfoList( ) {
return this.rtaInfoList;
}
}
|
var structdroid_1_1_runtime_1_1_utilities_1_1_structs_1_1_color_by_instance =
[
[ "_Color", "structdroid_1_1_runtime_1_1_utilities_1_1_structs_1_1_color_by_instance.html#a42bfcdbfa85c86b18544c3aa07158dc6", null ],
[ "_Game_Object", "structdroid_1_1_runtime_1_1_utilities_1_1_structs_1_1_color_by_instance.html#a1c9e91dee06d7e4c0ee1fe859e53cb16", null ]
]; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.