text stringlengths 1 1.05M |
|---|
<filename>include/six_point_two_eight/make_target_models.h
#pragma once
#include <nodelet/nodelet.h>
#include <ros/ros.h>
#include <nav_msgs/Odometry.h>
#include <sensor_msgs/PointCloud2.h>
#include "six_point_two_eight/point_cloud_utilities.h"
#include "six_point_two_eight/utilities.h"
namespace six_point_two_eight {
class MakeTargetModels : public nodelet::Nodelet {
private:
static constexpr double radius = 1.25;
static const int step_size = 16;
ros::Subscriber odom_subscriber_;
CallGetPointCloud2Action::TopicBinded get_point_cloud_2_;
CallMoveBaseAction::TopicBinded move_base_;
public:
void onInit() {
get_point_cloud_2_ = CallGetPointCloud2Action::bindTopic("get_point_cloud_2");
move_base_ = CallMoveBaseAction::bindTopic("move_base");
odom_subscriber_ = getNodeHandle().subscribe<nav_msgs::Odometry>(
"odom", 1,
[&](const auto& message) {
auto position = message->pose.pose.position;
auto yaw = tf::getYaw(message->pose.pose.orientation);
auto target_position = createPointMsg(position.x + std::cos(yaw) * radius, position.y + std::sin(yaw) * radius);
auto target_yaw = normalizeAngle(yaw + M_PI); // 対象物からTurtleBotを見た場合のヨー。
// 撮影しながら、撮影ポイントを回ります。
for (auto i = 0; i < step_size; ++i) {
ROS_INFO_STREAM("Step " << (i + 1) << "/" << step_size);
while (ros::ok()) {
try {
savePointCloud2File( // 点群を保存します。
removeFloorFromPointCloud2( // 点群から床を除去します。
transformPointCloud2( // 追加。誤差補正での回転処理をやりやすくするために、原点が中心になるように補正します。
getSpherePointCloud2( // 精度が低いセンサーから遠くの点を削除するために、球状の点群に切り取ります。
transformPointCloud2( // オドメトリー座標系に変換して、点群同士を重ねあわせられるようにします。
sensor_msgs::PointCloud2ConstPtr( // PointCloud2をPointCloud2ConstPtrに変換します。
new sensor_msgs::PointCloud2(
get_point_cloud_2_(createGetPointCloud2GoalMsg("points")).points)), // alitionlibから点群を取得します。
"odom"),
target_position,
radius * 0.5),
createTransformMsg(-target_position.x, -target_position.y, 0)))); // 追加。
break;
} catch (const std::exception& ex) {
ROS_WARN_STREAM(ex.what());
}
}
target_yaw = normalizeAngle(target_yaw + M_PI * 2 / step_size); // 対象物から見た、次の撮影ポイントのヨー。
move_base_(
createMoveBaseGoalMsg(
"odom",
ros::Time::now(),
target_position.x + std::cos(target_yaw) * radius,
target_position.y + std::sin(target_yaw) * radius,
normalizeAngle(target_yaw + M_PI)));
}
ROS_INFO_STREAM("Finished!");
odom_subscriber_.shutdown();
});
}
};
}
|
from statistics import stdev
temperatures = list(map(float, input().split()))
if stdev(temperatures) <= 1.0:
print("COMFY")
else:
print("NOT COMFY") |
<gh_stars>10-100
class Todo < ActiveRecord::Base
end
|
import tkinter as tk
def centrar_ventana(ventana):
pantalla_ancho = ventana.winfo_screenwidth()
pantalla_largo = ventana.winfo_screenheight()
aplicacion_ancho = 400
aplicacion_largo = 200
x = int((pantalla_ancho/2) - (aplicacion_ancho/2))
y = int((pantalla_largo/2) - (aplicacion_largo/2))
ventana.geometry(f"{aplicacion_ancho}x{aplicacion_largo}+{x}+{y}")
class FormularioPersona(tk.Frame):
def __init__(self, master=None):
super().__init__(master)
self.master = master
self.pack()
self.create_form()
def create_form(self):
# Create input fields for name, age, and email
self.name_label = tk.Label(self, text="Name")
self.name_label.pack()
self.name_entry = tk.Entry(self)
self.name_entry.pack()
self.age_label = tk.Label(self, text="Age")
self.age_label.pack()
self.age_entry = tk.Entry(self)
self.age_entry.pack()
self.email_label = tk.Label(self, text="Email")
self.email_label.pack()
self.email_entry = tk.Entry(self)
self.email_entry.pack()
# Create submit button
self.submit_button = tk.Button(self, text="Submit", command=self.process_data)
self.submit_button.pack()
def process_data(self):
# Retrieve and process the entered data
name = self.name_entry.get()
age = self.age_entry.get()
email = self.email_entry.get()
# Process the data as required
try:
ventana = tk.Tk()
centrar_ventana(ventana)
ventana.title("Formulario")
form = FormularioPersona(ventana)
ventana.mainloop()
except Exception as e:
print("Existe un error : ", e) |
#!/bin/sh
ovhcloud instance stopInstance --instanceId 69191
ovhcloud instance stopInstance --instanceId 69253
ovhcloud instance stopInstance --instanceId 69254
ovhcloud instance stopInstance --instanceId 69446
ovhcloud instance stopInstance --instanceId 69447
ovhcloud instance stopInstance --instanceId 69448
|
<filename>src/snek.c
#include "../include/snek.h"
#include <stdlib.h>
#include <curses.h>
#include <stdbool.h>
snek* init_snek( int y, int x, snek* next, snek* prev ) {
snek* s = (snek*)malloc( sizeof(snek) );
s->y = y;
s->x = x;
s->next = next;
s->prev = prev;
return s;
}
void clean_snek( snek* head ) {
snek* s = head;
snek* next;
while( s ) {
next = s->next;
free( s );
s = next;
}
return;
}
void grow_snek( snek* head ) {
snek* tail = head;
while( tail->next ) {
tail = tail->next;
}
tail->next = init_snek( tail->y, tail->x, NULL, tail );
return;
}
int check_collission( snek* head ) {
if( head->x < 0 || head->x >= COLS )
return 1;
if( head->y < 0 || head->y >= LINES )
return 1;
move(head->y, head->x);
char object_at_head = inch();
if( object_at_head == '#' )
return 1;
else if( object_at_head == 'O' )
return 2;
return 0;
}
int move_snek( snek* head, direction d ) {
snek* s = head;
while( s->next ) {
s = s->next;
}
move(s->y, s->x);
addch(' ');
while( s->prev ) {
s->y = s->prev->y;
s->x = s->prev->x;
s = s->prev;
}
switch( d ) {
case M_UP:
head->y--;
break;
case M_RIGHT:
head->x++;
break;
case M_DOWN:
head->y++;
break;
case M_LEFT:
head->x--;
break;
}
int col;
if((col = check_collission( head )) == 1)
return 1;
else if( col == 2 ) {
grow_snek( head );
add_apple( head );
}
return 0;
} |
<reponame>tonyrosario/faker<gh_stars>0
require File.expand_path(File.dirname(__FILE__) + '/test_helper.rb')
class TestEnIndLocale < Test::Unit::TestCase
def setup
@previous_locale = Faker::Config.locale
Faker::Config.locale = 'en-IND'
end
def teardown
Faker::Config.locale = @previous_locale
end
def test_en_ind_methods
assert Faker::Name.first_name.is_a? String
assert Faker::Name.last_name.is_a? String
assert Faker::Address.state.is_a? String
assert Faker::Address.state_abbr.is_a? String
assert Faker::Address.default_country.is_a? String
assert Faker::Internet.free_email.is_a? String
assert Faker::Internet.domain_suffix.is_a? String
assert Faker::Company.suffix.is_a? String
end
def test_en_ind_postal_code
assert_match(/[\d]{6}$/, Faker::Address.postcode)
end
def test_en_ind_city
assert_match(/(\w+\.? ?){2,3}/, Faker::Address.city)
end
end
|
'use strict';
module.exports = Franz => {
const getMessages = function getMessages() {
const elements = document.getElementsByClassName('suite-preview-bell-badge');
Franz.setBadge(0, elements.length ? 1 : 0 );
};
Franz.loop(getMessages);
};
|
require('babel-polyfill');
require('whatwg-fetch'); // fetch() polyfill for making API calls.
require('normalize.css');
require('../styles/main.css');
require('./style.js');
if (/\:8081/.test(window.location.host)) {
window.apiUrl = 'http://127.0.0.1:3000/api';
window.oauthUrl = 'http://127.0.0.1:3000/oauth';
window.ws = '127.0.0.1:8000';
} else {
window.apiUrl = window.location.origin + '/api';
window.oauthUrl = window.location.origin + '/oauth';
window.ws = window.location.hostname + ':' + window.wsPort;
}
import React from 'react';
import { default as dom } from 'react-dom';
import { applyMiddleware, createStore, combineReducers, compose } from 'redux';
import { Provider } from 'react-redux';
import thunk from 'redux-thunk';
import { ThemeProvider, injectGlobal } from 'styled-components';
import { theme } from 'mcs-lite-theme';
import reducers from '../reducers';
import { Router, Route, RouterContext, browserHistory } from 'react-router';
import IntlProvider from '../containers/intlProvider';
const store = createStore(reducers, applyMiddleware(thunk));
import routes from '../routes/routing';
var oauth = require('../../../configs/oauth');
var clientId = oauth.webClient.clientId;
var clientSecret = oauth.webClient.secret;
var basic_token = new Buffer(clientId + ':' + clientSecret).toString('base64');
window.basic_token = basic_token;
import '../utils/i18n';
import '../images/favicon.ico';
if (document && document.getElementById('app')) {
dom.render(
<Provider store={store}>
<IntlProvider defaultLocale="zh-TW">
<ThemeProvider theme={theme}>
<Router history={browserHistory} routes={routes} />
</ThemeProvider>
</IntlProvider>
</Provider>,
document.getElementById('app')
);
}
|
#! /bin/sh
# Copyright (C) 1996-2017 Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Make sure that Automake suggest using AM_PROG_LEX when a lexer is used.
. test-init.sh
cat >> configure.ac << 'END'
AC_PROG_CC
END
cat > Makefile.am << 'END'
bin_PROGRAMS = zot
zot_SOURCES = joe.l
END
$ACLOCAL
AUTOMAKE_fails -a
grep 'LEX.* undefined' stderr
grep 'add .*AM_PROG_LEX' stderr
:
|
#!/bin/sh
if [ -z "$1" ]; then
if [ -z "$2" ]; then
echo "Usage: label_deamon.sh [label] [imageRoot]"
exit
fi
fi
LABEL_TEST="$1"
echo "using label: $LABEL_TEST "
DATA_DIR="$2"
while true
do
echo "---------- crawler labeling --------------"
sleep 1
for dirs in ${DATA_DIR}/*
do
#echo "have dirs : $dirs"
for file in ${dirs}/*
do
#echo ${file}
if [ "${file##*.}" = "jpg" ] || [ "${file##*.}" = "jpeg" ] || [ "${file##*.}" = "png" ] || [ "${file##*.}" = "JPG" ] || [ "${file##*.}" = "JPEG" ] || [ "${file##*.}" = "PNG" ] || [ "${file##*.}" = "gif" ] ; then
echo "================================$dirs has file to dispose================================"
sh `dirname $0`/label_image_tool.sh $LABEL_TEST $dirs &
break;
fi
sleep 2 &
done
done
wait
done
|
#!/bin/bash
print_usage()
{
cat <<EOF
USAGE: get-a2a-password.sh [-h]
get-a2a-password.sh [-a appliance] [-B cabundle] [-v version] [-c file] [-k file] [-A apikey] [-p]
-h Show help and exit
-a Network address of the appliance
-B CA bundle for SSL trust validation (no checking by default)
-v Web API Version: 3 is default
-c File containing client certificate
-k File containing client private key
-A A2A API token identifying the account
-p Read certificate password from stdin
Retrieve a password using the Safeguard A2A service.
EOF
exit 0
}
ScriptDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
Appliance=
CABundleArg=
CABundle=
Version=3
Cert=
PKey=
ApiKey=
PassStdin=
Pass=
. "$ScriptDir/utils/loginfile.sh"
. "$ScriptDir/utils/a2a.sh"
require_args()
{
handle_ca_bundle_arg
if [ -z "$Appliance" ]; then
read -p "Appliance Network Address: " Appliance
fi
if [ -z "$Cert" ]; then
read -p "Client Certificate File: " Cert
fi
if [ -z "$PKey" ]; then
read -p "Client Private Key File: " PKey
fi
if [ -z "$Pass" ]; then
read -s -p "Private Key Password: " Pass
>&2 echo
fi
if [ -z "$ApiKey" ]; then
read -p "A2A API Key: " ApiKey
fi
}
while getopts ":a:B:v:c:k:A:ph" opt; do
case $opt in
a)
Appliance=$OPTARG
;;
B)
CABundle=$OPTARG
;;
v)
Version=$OPTARG
;;
c)
Cert=$OPTARG
;;
k)
PKey=$OPTARG
;;
p)
PassStdin="-p"
;;
A)
ApiKey=$OPTARG
;;
h)
print_usage
;;
esac
done
require_args
ATTRFILTER='cat'
ERRORFILTER='cat'
if [ ! -z "$(which jq)" ]; then
ERRORFILTER='jq .'
ATTRFILTER='jq .'
fi
Result=$(invoke_a2a_method "$Appliance" "$CABundleArg" "$Cert" "$PKey" "$Pass" "$ApiKey" GET "Credentials?type=Password" $Version "$Body")
Error=$(echo $Result | jq .Code 2> /dev/null)
if [ -z "$Error" -o "$Error" = "null" ]; then
echo $Result | $ATTRFILTER
else
echo $Result | $ERRORFILTER
fi
|
<reponame>tanishq-arya/Rotten-Scripts
import tweepy
import time
# Authenticate to Twitter
CONSUMER_KEY = '<your-consumer-or-API-key-goes-here>'
CONSUMER_SECRET = '<your-consumer-or-API-secret-goes-here>'
ACCESS_KEY = '<your-access-key-goes-here>'
ACESS_SECRET = '<your-access-secret-goes-here>'
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACESS_SECRET)
# Create API object
api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)
user = api.me()
search = '#python3'
numTweet = 500
for tweet in tweepy.Cursor(api.search, search).items(numTweet):
try:
print('Tweet Liked')
tweet.favorite()
print("Retweet done")
tweet.retweet()
time.sleep(10)
except tweepy.TweepError as e:
print(e.reason)
except StopIteration:
break
|
#include <iostream>
#include <cstdlib>
#define TEST_PROGRAMMABLE_SOURCE(type) \
std::cout << "Testing " << #type << " programmable source..." << std::endl; \
// Perform testing for the programmable source type here \
std::cout << "Test for " << #type << " programmable source passed." << std::endl;
int TestProgrammableSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
{
TEST_PROGRAMMABLE_SOURCE(PolyData);
TEST_PROGRAMMABLE_SOURCE(StructuredPoints);
TEST_PROGRAMMABLE_SOURCE(StructuredGrid);
TEST_PROGRAMMABLE_SOURCE(UnstructuredGrid);
TEST_PROGRAMMABLE_SOURCE(RectilinearGrid);
TEST_PROGRAMMABLE_SOURCE(Molecule);
TEST_PROGRAMMABLE_SOURCE(Table);
return EXIT_SUCCESS;
} |
package service
import (
"context"
"github.com/wiqun/route/internal/common"
"github.com/wiqun/route/internal/config"
. "github.com/wiqun/route/internal/log"
"github.com/wiqun/route/internal/message"
"runtime"
"sync"
)
//此service为核心类,主要处理sub,unsub,pub,query请求
type Service interface {
common.Runnable
}
type service struct {
log Logger
localMsgRecv message.LocalMsgChanReceiver
remoteNotifier message.RemoteMsgNotifier
pubRequestResultIOChans []chan pubNotFoundIOTask
pubIOChans []chan *localSubscriberBatchPub
queryIOChans []chan queryIOTask
ioNumbs int
}
type queryIOTask struct {
queryPerson message.QueryResultRecipient
result *message.QueryResponse
}
type pubNotFoundIOTask struct {
pubTopic message.PubResultRecipient
notFound []byte
}
func NewService(logFactoryer LogFactoryer, localMsgRecv message.LocalMsgChanReceiver,
remoteNotifier message.RemoteMsgNotifier, config *config.ServiceConfig) Service {
log := logFactoryer.CreateLogger("service")
s := &service{
log: log,
localMsgRecv: localMsgRecv,
remoteNotifier: remoteNotifier,
}
s.ioNumbs = config.IOGoCoroutineCoreNums * runtime.NumCPU()
if s.ioNumbs < 1 {
s.ioNumbs = 1
}
log.Println("io协程数为:", s.ioNumbs)
for i := 0; i < s.ioNumbs; i++ {
s.pubRequestResultIOChans = append(s.pubRequestResultIOChans, make(chan pubNotFoundIOTask, config.IOChanSize))
s.pubIOChans = append(s.pubIOChans, make(chan *localSubscriberBatchPub, config.IOChanSize))
s.queryIOChans = append(s.queryIOChans, make(chan queryIOTask, config.IOChanSize))
}
return s
}
func (s *service) Run(ctx context.Context, wg *sync.WaitGroup) {
defer wg.Done()
wg.Add(1)
go s.processLocalMsg(ctx, wg)
wg.Add(s.ioNumbs)
for i := 0; i < s.ioNumbs; i++ {
go s.processIO(ctx, wg, s.pubIOChans[i], s.queryIOChans[i], s.pubRequestResultIOChans[i])
}
}
func (s *service) processIO(ctx context.Context, wg *sync.WaitGroup,
pubIOChan chan *localSubscriberBatchPub, queryIOChan chan queryIOTask, pubRequestResultChan chan pubNotFoundIOTask) {
defer wg.Done()
for {
select {
case <-ctx.Done():
s.log.Println("processIO收到取消事件,退出协程!")
return
case result := <-pubRequestResultChan:
err := result.pubTopic.SendPubNotFoundResult(result.notFound)
if err != nil {
s.log.PrintlnError("processIO: SendSearchResponse错误:", err)
}
case pub := <-pubIOChan:
err := pub.flush()
if err != nil {
s.log.PrintlnError("processIO: flush错误:", err)
}
case query := <-queryIOChan:
err := query.queryPerson.SendQueryResult(query.result)
if err != nil {
s.log.PrintlnError("processIO: SendQueryResult错误:", err)
}
}
}
}
func (s *service) processLocalMsg(ctx context.Context, wg *sync.WaitGroup) {
defer wg.Done()
topicMap := make(map[string]*message.SubCounter)
for {
select {
case <-ctx.Done():
s.log.Println("processLocalMsg收到取消事件,退出协程!")
return
case batchOp := <-s.localMsgRecv.LocalBatchTopicOpChan():
//不同的订阅顺序可能会出现的结果,因此Sub,Unsub同时只能拥有一个字段
//例如先订阅A再取消A , 最终A被取消了
//先取消A再订阅A, A最终还是订阅
if batchOp.Sub != nil {
s.handleSubMsg(batchOp.Sub, topicMap)
} else {
s.handleUnSubMsg(batchOp.Unsub, topicMap)
}
case queryRequest := <-s.localMsgRecv.QueryRequestChan():
s.handleQueryRequest(queryRequest, topicMap)
case localPub := <-s.localMsgRecv.LocalPubMessageChan():
s.handleLocalPub(localPub, topicMap)
case remotePub := <-s.localMsgRecv.RemotePubMessageChan():
s.handleRemotePub(remotePub, topicMap)
}
}
}
func (s *service) handleSubMsg(subMsg *message.LocalBatchSubMsg, topicMap map[string]*message.SubCounter) {
subscriber := subMsg.Subscriber
var subList []string
for _, topic := range subMsg.Topic {
if _, ok := topicMap[topic]; !ok {
topicMap[topic] = &message.SubCounter{
Subs: make(map[string]message.LocalSubscriber),
}
}
subCounter := topicMap[topic]
if _, exist := subCounter.Subs[subscriber.ID()]; exist {
continue
}
subCounter.Subs[subscriber.ID()] = subscriber
if subscriber.Type() == message.LocalSubscriberDirect {
//LocalSubscriberDirect才可能引起NotifierSubscribe
subCounter.DirectCount++
if subCounter.DirectCount == 1 {
subList = append(subList, topic)
}
}
}
if len(subList) != 0 {
s.remoteNotifier.NotifierSubscribe(subList)
}
}
func (s *service) handleUnSubMsg(unsubMsg *message.LocalBatchUnsubMsg, topicMap map[string]*message.SubCounter) {
subscriberId := unsubMsg.SubscriberId
var unsubList []string
for _, topic := range unsubMsg.Topic {
subCounter, ok := topicMap[topic]
if !ok || subCounter.Subs == nil {
continue
}
if old, exist := subCounter.Subs[subscriberId]; exist {
delete(subCounter.Subs, subscriberId)
if old.Type() == message.LocalSubscriberDirect {
//LocalSubscriberDirect才可能引起NotifierUnsubscribe
subCounter.DirectCount--
if subCounter.DirectCount == 0 {
unsubList = append(unsubList, topic)
}
}
//回收,防止空map一直在内存里
if len(subCounter.Subs) == 0 {
delete(topicMap, topic)
}
}
}
if len(unsubList) != 0 {
s.remoteNotifier.NotifierUnsubscribe(unsubList)
}
}
func (s *service) handleQueryRequest(query *message.LocalQuery, topicMap map[string]*message.SubCounter) {
if len(query.QueryRequest.TopicList) == 0 {
err := query.Recipient.SendQueryResult(&message.QueryResponse{CustomData: query.QueryRequest.CustomData})
if err != nil {
s.log.PrintlnError("handleQueryRequest: SendQueryResult错误:", err)
}
return
}
var haveSubscriber []string
for _, topic := range query.QueryRequest.TopicList {
subCounter, ok := topicMap[topic]
if !ok || (len(subCounter.Subs) == 0) {
continue
}
haveSubscriber = append(haveSubscriber, topic)
}
s.queryIOChans[query.Recipient.ConcurrentId()%uint64(s.ioNumbs)] <- queryIOTask{
queryPerson: query.Recipient,
result: &message.QueryResponse{CustomData: query.QueryRequest.CustomData, TopicList: haveSubscriber},
}
return
}
type localSubscriberBatchPub struct {
needSend []*message.Message
subscriber message.LocalSubscriber
}
func (l *localSubscriberBatchPub) commitRequest(request *message.PubRequest) {
l.needSend = append(l.needSend, &message.Message{
Topic: request.Topic,
Payload: request.Payload,
})
}
func (l *localSubscriberBatchPub) commitMessage(request *message.Message) {
l.needSend = append(l.needSend, request)
}
func (l *localSubscriberBatchPub) flush() error {
return l.subscriber.SendMessages(l.needSend)
}
func (l *localSubscriberBatchPub) concurrentId() uint64 {
return l.subscriber.ConcurrentId()
}
func (s *service) handleLocalPub(pubMsg message.LocalPub, topicMap map[string]*message.SubCounter) {
if len(pubMsg.PubRequest.Batch) == 0 {
return
}
sendMap := make(map[string]*localSubscriberBatchPub)
for _, item := range pubMsg.PubRequest.Batch {
subCounter, ok := topicMap[item.Topic]
if !ok || (len(subCounter.Subs) == 0) {
if item.NotFound != nil {
s.pubRequestResultIOChans[pubMsg.Recipient.ConcurrentId()%uint64(s.ioNumbs)] <- pubNotFoundIOTask{
pubTopic: pubMsg.Recipient,
notFound: item.NotFound,
}
}
continue
}
for _, subscriber := range subCounter.Subs {
l, ok := sendMap[subscriber.ID()]
if ok {
l.commitRequest(item)
} else {
l = &localSubscriberBatchPub{}
l.subscriber = subscriber
l.commitRequest(item)
sendMap[subscriber.ID()] = l
}
}
}
for _, l := range sendMap {
s.pubIOChans[l.concurrentId()%uint64(s.ioNumbs)] <- l
}
}
//会跳过Type==message.LocalSubscriberDirect的订阅者,防止循环pub
func (s *service) handleRemotePub(batch []*message.Message, topicMap map[string]*message.SubCounter) {
if len(batch) == 0 {
return
}
sendMap := make(map[string]*localSubscriberBatchPub)
for _, item := range batch {
subCounter, ok := topicMap[item.Topic]
if !ok || subCounter.DirectCount == 0 {
continue
}
for _, subscriber := range subCounter.Subs {
if subscriber.Type() != message.LocalSubscriberDirect {
continue
}
l, ok := sendMap[subscriber.ID()]
if ok {
l.commitMessage(item)
} else {
l = &localSubscriberBatchPub{}
l.subscriber = subscriber
l.commitMessage(item)
sendMap[subscriber.ID()] = l
}
}
}
for _, l := range sendMap {
s.pubIOChans[l.concurrentId()%uint64(s.ioNumbs)] <- l
}
}
|
#!/usr/bin/env bash
# remove libvirt BUILD file to regenerate it each time
rm -f vendor/github.com/libvirt/libvirt-go/BUILD.bazel
# generate BUILD files
bazel run \
--platforms=@io_bazel_rules_go//go/toolchain:linux_ppc64le \
--workspace_status_command=./hack/print-workspace-status.sh \
//:gazelle
# inject changes to libvirt BUILD file
bazel run \
--platforms=@io_bazel_rules_go//go/toolchain:linux_ppc64le \
--workspace_status_command=./hack/print-workspace-status.sh \
-- @com_github_bazelbuild_buildtools//buildozer 'add cdeps //:libvirt-libs //:libvirt-headers' //vendor/github.com/libvirt/libvirt-go:go_default_library
bazel run \
--platforms=@io_bazel_rules_go//go/toolchain:linux_ppc64le \
--workspace_status_command=./hack/print-workspace-status.sh \
-- @com_github_bazelbuild_buildtools//buildozer 'add copts -Ibazel-out/k8-fastbuild/genfiles' //vendor/github.com/libvirt/libvirt-go:go_default_library
# allign BAZEL files to a single format
bazel run \
--platforms=@io_bazel_rules_go//go/toolchain:linux_ppc64le \
--workspace_status_command=./hack/print-workspace-status.sh \
//:buildifier
|
#!/bin/bash
. tests/shlib/common.sh
. tests/shlib/vterm.sh
enter_suite tmux final
vterm_setup
ln -s "$(command -v env)" "$TEST_ROOT/path"
ln -s "$(command -v cut)" "$TEST_ROOT/path"
ln -s "$ROOT/scripts/powerline-render" "$TEST_ROOT/path"
ln -s "$ROOT/scripts/powerline-config" "$TEST_ROOT/path"
test_tmux() {
if test "$PYTHON_IMPLEMENTATION" = PyPy; then
# FIXME PyPy3 segfaults for some reason, PyPy does it as well, but
# occasionally.
return 0
fi
if ! command -v "${POWERLINE_TMUX_EXE}" ; then
return 0
fi
ln -sf "$(command -v "${POWERLINE_TMUX_EXE}")" "$TEST_ROOT/path/tmux"
f="$ROOT/tests/test_in_vterm/test_tmux.py"
if ! "${PYTHON}" "$f" ; then
local test_name="$("$POWERLINE_TMUX_EXE" -V 2>&1 | cut -d' ' -f2)"
fail "$test_name" F "Failed vterm test $f"
fi
}
if test -z "$POWERLINE_TMUX_EXE" && test -d "$ROOT/tests/bot-ci/deps/tmux"
then
for tmux in "$ROOT"/tests/bot-ci/deps/tmux/tmux-*/tmux ; do
export POWERLINE_TMUX_EXE="$tmux"
test_tmux || true
done
else
export POWERLINE_TMUX_EXE="${POWERLINE_TMUX_EXE:-tmux}"
test_tmux || true
fi
vterm_shutdown
exit_suite
|
# Generated by Django 3.1.5 on 2021-01-31 21:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dashboard', '0014_auto_20210131_0015'),
]
operations = [
migrations.AddField(
model_name='rpc',
name='signature',
field=models.TextField(default='', help_text='Paste RSA PKCS#1 v1.5 signature of command. Ensure final input is base64 encoded.'),
),
]
|
const https = require('https');
const searchTerm = 'Harry Potter';
const url = `https://www.googleapis.com/books/v1/volumes?q=${searchTerm}&printType=books`;
https.get(url, (response) => {
let data = '';
response.on('data', (chunk) => {
data += chunk;
});
response.on('end', () => {
const books = JSON.parse(data).items;
books.forEach((book) => {
// Get title and author of book
console.log(book.volumeInfo.title);
console.log('By ' + book.volumeInfo.authors[0]);
});
});
}); |
import {
Column,
Entity,
JoinColumn,
OneToOne,
PrimaryGeneratedColumn,
} from 'typeorm';
import { Bicycle } from '../bicycle/bicycle.entity';
@Entity({ name: 'rentBicycle' })
export class RentBicycle {
@PrimaryGeneratedColumn()
id: number;
@Column({ type: 'bigint' })
rentTime: number;
@Column({ type: 'bigint' })
rentDate: number;
@Column()
bicycleId: number;
@OneToOne(() => Bicycle, (bicycle) => bicycle.rentBicycle)
@JoinColumn()
bicycle: Bicycle;
}
|
package com.wpisen.trace.server.service.impl;
import com.wpisen.trace.agent.trace.TraceNode;
import com.wpisen.trace.server.common.TraceUtils;
import com.wpisen.trace.server.service.NodeQueryService;
import com.wpisen.trace.server.service.entity.PageList;
import com.wpisen.trace.server.service.entity.SearchRequestParam;
import org.elasticsearch.index.query.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort;
import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
import org.springframework.data.elasticsearch.core.FacetedPage;
import org.springframework.data.elasticsearch.core.query.NativeSearchQuery;
import org.springframework.data.elasticsearch.core.query.SearchQuery;
import org.springframework.stereotype.Service;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
import java.util.ArrayList;
import java.util.List;
/**
* Description: TODO 一句话描述类是干什么的<br/>
*
* @author <EMAIL>
* @version 1.0
* @date: 2017/1/19 15:36
* @since JDK 1.7
*/
@SuppressWarnings("deprecation")
@Service
public class NodeQueryServiceImpl implements NodeQueryService {
private static final String[] nodeBaseFiles = {"traceId", "rpcId", "appId", "appDetail", "nodeType", "resultState", "resultSize",
"servicePath", "serviceName", "beginTime", "endTime", "addressIp", "fromIp", "inParam", "errorMessage"};
@Autowired
ElasticsearchTemplate esTemplate;
/**
* 根据复合参数搜索节点信息
*
* @param proId 项目ID
* @param param 复合参数
* @return 分页返回结果
*/
@Override
public PageList<TraceNode> searchNodePage(Integer proId, SearchRequestParam param) {
Assert.notNull(param.getTimeBegin());
Assert.notNull(param.getPageIndex());
Assert.notNull(param.getPageSize());
AndFilterBuilder filterBuilder = FilterBuilders.andFilter();
// 创建时间过滤
RangeFilterBuilder timeFilte = FilterBuilders.rangeFilter("beginTime").from(param.getTimeBegin());
if (param.getTimeEnd() != null) {
timeFilte.to(param.getTimeEnd());
}
filterBuilder.add(timeFilte);
// ip过滤 TODO 后期考虑通过traceId 中付加ip属性
if (StringUtils.hasText(param.getClientIp())) {
filterBuilder.add(FilterBuilders.termFilter("fromIp", param.getClientIp()));
}
if (StringUtils.hasText(param.getAddressIp())) {
filterBuilder.add(FilterBuilders.termFilter("addressIp", param.getAddressIp()));
}
// 类别过滤
if (StringUtils.hasText(param.getNodeType())) {
filterBuilder.add(FilterBuilders.termFilter("nodeType", param.getNodeType()));
}
QueryBuilder queryBuilder = null;
if (StringUtils.hasText(param.getQueryWord())) {
queryBuilder = QueryBuilders.queryStringQuery(param.getQueryWord());
}
SearchQuery query = new NativeSearchQuery(queryBuilder, filterBuilder);
query.addIndices(TraceUtils.es_index_prefix + proId);
query.addTypes("TraceNode");
query.setPageable(new PageRequest(param.getPageIndex() - 1, param.getPageSize()));
query.addSort(new Sort(Sort.Direction.DESC, "beginTime"));
query.addFields(nodeBaseFiles);
FacetedPage<TraceNode> facetedPage = esTemplate.queryForPage(query, TraceNode.class);
PageList<TraceNode> result = new PageList<>();
result.setPageIndex(facetedPage.getNumber() + 1);
result.setPageSize(facetedPage.getSize());
result.setTotalElements(facetedPage.getTotalElements());
result.setTotalPage(facetedPage.getTotalPages());
result.setElements(facetedPage.getContent());
return result;
}
/**
* 返回指定Trace ID所有的节点
*
* @param proId
* @param traceId
* @return
*/
@Override
public List<TraceNode> getNodesByTraceId(Integer proId, String traceId) {
Assert.notNull(traceId);
FilterBuilder filterBuilder = FilterBuilders.termFilter("traceId", traceId);
SearchQuery query = new NativeSearchQuery(null, filterBuilder);
query.addFields(nodeBaseFiles);
query.addIndices(TraceUtils.es_index_prefix + proId);
query.addTypes("TraceNode");
query.setPageable(new PageRequest(0, 1000));
List<TraceNode> list = esTemplate.queryForList(query, TraceNode.class);
ArrayList<TraceNode> result = new ArrayList<>(list.size());
result.addAll(list);
return result;
}
/**
* 获取指定跟踪节点信息
*
* @param proId 项目ID
* @param traceId 跟踪ID
* @param nodeId 节点ID
* @return 跟踪节点
*/
@Override
public TraceNode getNodeById(Integer proId, String traceId, String nodeId) {
Assert.notNull(traceId);
Assert.notNull(nodeId);
SearchQuery query = new NativeSearchQuery(QueryBuilders.idsQuery().ids(traceId + "_" + nodeId));
query.addIndices(TraceUtils.es_index_prefix + proId);
query.addTypes("TraceNode");
List<TraceNode> list = esTemplate.queryForList(query, TraceNode.class);
if (list.isEmpty()) {
throw new IllegalArgumentException(String.format("traceNode not found traceID=%s,rcpId=%s", traceId, nodeId));
} else if (list.size() > 1) {
throw new IllegalArgumentException(String.format("result Too much 1 traceID=%s,rcpId=%s", traceId, nodeId));
}
return list.get(0);
}
}
|
/**
* Copyright 2018-2020 Dynatrace LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dynatrace.openkit.core.util;
import com.dynatrace.openkit.api.LogLevel;
import com.dynatrace.openkit.api.Logger;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.TimeZone;
import static com.dynatrace.openkit.api.LogLevel.DEBUG;
import static com.dynatrace.openkit.api.LogLevel.ERROR;
import static com.dynatrace.openkit.api.LogLevel.INFO;
import static com.dynatrace.openkit.api.LogLevel.WARN;
public class DefaultLogger implements Logger {
private final LogLevel logLevel;
private final PrintStream outputStream;
private static final String LINE_SEPARATOR = System.getProperty("line.separator");
private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS");
static {
DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("UTC"));
}
private static String getUTCTime() {
return DATE_FORMAT.format(new Date());
}
public DefaultLogger(LogLevel logLevel) {
this(logLevel, System.out);
}
DefaultLogger(LogLevel logLevel, PrintStream outputStream) {
this.logLevel = logLevel;
this.outputStream = outputStream;
}
@Override
public void log(LogLevel level, String message) {
log(level, message, null);
}
@Override
public void log(LogLevel level, String message, Throwable throwable) {
if(!level.hasSameOrGreaterPriorityThan(this.logLevel)) {
return;
}
String logEntry = getUTCTime() + " " + level.name() + " [" + Thread.currentThread().getName() + "] " + message;
if(throwable != null) {
final StringWriter stringWriter = new StringWriter();
final PrintWriter printWriter = new PrintWriter(stringWriter, true);
throwable.printStackTrace(printWriter);
final String stacktrace = stringWriter.getBuffer().toString();
logEntry += LINE_SEPARATOR + stacktrace;
}
outputStream.println(logEntry);
}
@Override
public void error(String message) {
log(ERROR, message);
}
@Override
public void error(String message, Throwable t) {
log(ERROR, message, t);
}
@Override
public void warning(String message) {
log(WARN, message);
}
@Override
public void info(String message) {
log(INFO, message);
}
@Override
public void debug(String message) {
log(DEBUG, message);
}
@Override
public boolean isErrorEnabled() {
return ERROR.hasSameOrGreaterPriorityThan(logLevel);
}
@Override
public boolean isWarnEnabled() {
return WARN.hasSameOrGreaterPriorityThan(logLevel);
}
@Override
public boolean isInfoEnabled() {
return INFO.hasSameOrGreaterPriorityThan(logLevel);
}
@Override
public boolean isDebugEnabled() {
return DEBUG.hasSameOrGreaterPriorityThan(logLevel);
}
}
|
#!/usr/bin/env bash
export README_TEMPLATE=./templates/README.md
export PROJECT_NAME="Testing"
export PACKAGE="uvicorn"
export PACKAGE_VERSIONS="0.13.3 0.13.4 0.14.0 0.15.0"
export PYTHON_VERSIONS="3.6 3.7 3.8 3.9 3.10"
export ORGANIZATION="TestOrganization"
export REPOSITORY="TestOrganization/TestRepository"
export REPOSITORY_SHORT="TestRepository"
./scripts/update_readme.sh ./test_readme.md
|
OS_VER=$(sw_vers -productVersion)
OS_MAJ=$(echo "${OS_VER}" | cut -d'.' -f1)
OS_MIN=$(echo "${OS_VER}" | cut -d'.' -f2)
OS_PATCH=$(echo "${OS_VER}" | cut -d'.' -f3)
MEM_GIG=$(bc <<< "($(sysctl -in hw.memsize) / 1024000000)")
CPU_SPEED=$(bc <<< "scale=2; ($(sysctl -in hw.cpufrequency) / 10^8) / 10")
CPU_CORE=$( sysctl -in machdep.cpu.core_count )
DISK_INSTALL=$(df -h . | tail -1 | tr -s ' ' | cut -d\ -f1 || cut -d' ' -f1)
blksize=$(df . | head -1 | awk '{print $2}' | cut -d- -f1)
gbfactor=$(( 1073741824 / blksize ))
total_blks=$(df . | tail -1 | awk '{print $2}')
avail_blks=$(df . | tail -1 | awk '{print $4}')
DISK_TOTAL=$((total_blks / gbfactor ))
DISK_AVAIL=$((avail_blks / gbfactor ))
printf "\\n\\tOS name: %s\\n" "${ARCH}"
printf "\\tOS Version: %s\\n" "${OS_VER}"
printf "\\tCPU speed: %sGhz\\n" "${CPU_SPEED}"
printf "\\tCPU cores: %s\\n" "${CPU_CORE}"
printf "\\tPhysical Memory: %s Gbytes\\n" "${MEM_GIG}"
printf "\\tDisk install: %s\\n" "${DISK_INSTALL}"
printf "\\tDisk space total: %sG\\n" "${DISK_TOTAL}"
printf "\\tDisk space available: %sG\\n\\n" "${DISK_AVAIL}"
if [ "${MEM_GIG}" -lt 7 ]; then
echo "Your system must have 7 or more Gigabytes of physical memory installed."
echo "Exiting now."
exit 1
fi
if [ "${OS_MIN}" -lt 12 ]; then
echo "You must be running Mac OS 10.12.x or higher to install CELESOS."
echo "Exiting now."
exit 1
fi
if [ "${DISK_AVAIL}" -lt "$DISK_MIN" ]; then
echo "You must have at least ${DISK_MIN}GB of available storage to install CELESOS."
echo "Exiting now."
exit 1
fi
printf "\\tChecking xcode-select installation\\n"
if ! XCODESELECT=$( command -v xcode-select)
then
printf "\\n\\tXCode must be installed in order to proceed.\\n\\n"
printf "\\tExiting now.\\n"
exit 1
fi
printf "\\txcode-select installation found @ \\n"
printf "\\t%s \\n\\n" "${XCODESELECT}"
printf "\\tChecking Ruby installation.\\n"
if ! RUBY=$( command -v ruby)
then
printf "\\nRuby must be installed in order to proceed.\\n\\n"
printf "\\tExiting now.\\n"
exit 1
fi
printf "\\tRuby installation found @ \\n"
printf "\\t%s \\n\\n" "${RUBY}"
printf "\\tChecking Home Brew installation\\n"
if ! BREW=$( command -v brew )
then
printf "\\tHomebrew must be installed to compile CELES.OS\\n\\n"
printf "\\tDo you wish to install Home Brew?\\n"
select yn in "Yes" "No"; do
case "${yn}" in
[Yy]* )
"${XCODESELECT}" --install 2>/dev/null;
if ! "${RUBY}" -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
then
echo "Unable to install homebrew at this time. Exiting now."
exit 1;
else
BREW=$( command -v brew )
fi
break;;
[Nn]* ) echo "User aborted homebrew installation. Exiting now.";
exit 1;;
* ) echo "Please enter 1 for yes or 2 for no.";;
esac
done
fi
printf "\\tHome Brew installation found @\\n"
printf "\\t%s\\n\\n" "${BREW}"
COUNT=1
PERMISSION_GETTEXT=0
DISPLAY=""
DEP=""
printf "\\tChecking dependencies.\\n"
var_ifs="${IFS}"
IFS=","
while read -r name tester testee brewname uri
do
printf "\\tChecking %s ... " "${name}"
if [ "${tester}" "${testee}" ]; then
printf "\\t\\t %s found\\n" "${name}"
continue
fi
# resolve conflict with homebrew glibtool and apple/gnu installs of libtool
if [ "${testee}" == "/usr/local/bin/glibtool" ]; then
if [ "${tester}" "/usr/local/bin/libtool" ]; then
printf "\\t\\t %s found\\n" "${name}"
continue
fi
fi
if [ "${brewname}" = "gettext" ]; then
PERMISSION_GETTEXT=1
fi
DEP=$DEP"${brewname} "
DISPLAY="${DISPLAY}${COUNT}. ${name}\\n\\t"
printf "\\t\\t %s ${bldred}NOT${txtrst} found.\\n" "${name}"
(( COUNT++ ))
done < "${SOURCE_DIR}/scripts/celesos_build_dep"
IFS="${var_ifs}"
printf "\\tChecking Python3 ... "
if [ -z "$( python3 -c 'import sys; print(sys.version_info.major)' 2>/dev/null )" ]; then
DEP=$DEP"python@3 "
DISPLAY="${DISPLAY}${COUNT}. Python 3\\n\\t"
printf "\\t\\t python3 ${bldred}NOT${txtrst} found.\\n"
(( COUNT++ ))
else
printf "\\t\\t Python3 found\\n"
fi
if [ $COUNT -gt 1 ]; then
printf "\\n\\tThe following dependencies are required to install CELESOS.\\n"
printf "\\n\\t${DISPLAY}\\n\\n"
echo "Do you wish to install these packages?"
select yn in "Yes" "No"; do
case $yn in
[Yy]* )
if [ $PERMISSION_GETTEXT -eq 1 ]; then
sudo chown -R "$(whoami)" /usr/local/share
fi
"${XCODESELECT}" --install 2>/dev/null;
printf "\\tUpdating Home Brew.\\n"
if ! brew update
then
printf "\\tUnable to update Home Brew at this time.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
printf "\\tInstalling Dependencies.\\n"
if ! "${BREW}" install --force ${DEP}
then
printf "\\tHomebrew exited with the above errors.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if [[ "$DEP" == "llvm@4" ]]; then
"${BREW}" unlink ${DEP}
elif ! "${BREW}" unlink ${DEP} && "${BREW}" link --force ${DEP}
then
printf "\\tHomebrew exited with the above errors.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
break;;
[Nn]* ) echo "User aborting installation of required dependencies, Exiting now."; exit;;
* ) echo "Please type 1 for yes or 2 for no.";;
esac
done
else
printf "\\n\\tNo required Home Brew dependencies to install.\\n"
fi
printf "\\n\\tChecking boost library installation.\\n"
BVERSION=$( grep "#define BOOST_VERSION" "/usr/local/include/boost/version.hpp" 2>/dev/null | tail -1 | tr -s ' ' | cut -d\ -f3 )
if [ "${BVERSION}" != "106700" ]; then
if [ ! -z "${BVERSION}" ]; then
printf "\\tFound Boost Version %s.\\n" "${BVERSION}"
printf "\\CELES.OS requires Boost version 1.67.\\n"
printf "\\tWould you like to uninstall version %s and install Boost version 1.67.\\n" "${BVERSION}"
select yn in "Yes" "No"; do
case $yn in
[Yy]* )
if "${BREW}" list | grep "boost"
then
printf "\\tUninstalling Boost Version %s.\\n" "${BVERSION}"
if ! "${BREW}" uninstall --force boost
then
printf "\\tUnable to remove boost libraries at this time. 0\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
else
printf "\\tRemoving Boost Version %s.\\n" "${BVERSION}"
if ! sudo rm -rf "/usr/local/include/boost"
then
printf "\\tUnable to remove boost libraries at this time. 1\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! sudo rm -rf /usr/local/lib/libboost*
then
printf "\\tUnable to remove boost libraries at this time. 2\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
fi
break;;
[Nn]* ) echo "User cancelled installation of Boost libraries, Exiting now."; exit;;
* ) echo "Please type 1 for yes or 2 for no.";;
esac
done
fi
printf "\\tInstalling boost libraries.\\n"
if ! "${BREW}" install https://raw.githubusercontent.com/Homebrew/homebrew-core/f946d12e295c8a27519b73cc810d06593270a07f/Formula/boost.rb
then
printf "\\tUnable to install boost 1.67 libraries at this time. 0\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if [ -d "$BUILD_DIR" ]; then
if ! rm -rf "$BUILD_DIR"
then
printf "\\tUnable to remove directory %s. Please remove this directory and run this script %s again. 0\\n" "$BUILD_DIR" "${BASH_SOURCE[0]}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
fi
printf "\\tBoost 1.67.0 successfully installed @ /usr/local.\\n"
else
printf "\\tBoost 1.67.0 found at /usr/local.\\n"
fi
printf "\\n\\tChecking MongoDB C++ driver installation.\\n"
MONGO_INSTALL=true
if [ -e "/usr/local/lib/libmongocxx-static.a" ]; then
MONGO_INSTALL=false
if ! version=$( grep "Version:" /usr/local/lib/pkgconfig/libmongocxx-static.pc | tr -s ' ' | awk '{print $2}' )
then
printf "\\tUnable to determine mongodb-cxx-driver version.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
maj=$( echo "${version}" | cut -d'.' -f1 )
min=$( echo "${version}" | cut -d'.' -f2 )
if [ "${maj}" -gt 3 ]; then
MONGO_INSTALL=true
elif [ "${maj}" -eq 3 ] && [ "${min}" -lt 3 ]; then
MONGO_INSTALL=true
fi
fi
if [ $MONGO_INSTALL == "true" ]; then
if ! cd "${TEMP_DIR}"
then
printf "\\tUnable to enter directory %s.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! pkgconfig=$( "${BREW}" list | grep pkg-config )
then
if ! "${BREW}" install --force pkg-config
then
printf "\\tHomebrew returned an error installing pkg-config.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! "${BREW}" unlink pkg-config && "${BREW}" link --force pkg-config
then
printf "\\tHomebrew returned an error linking pkgconfig.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
fi
STATUS=$( curl -LO -w '%{http_code}' --connect-timeout 30 https://github.com/mongodb/mongo-c-driver/releases/download/1.10.2/mongo-c-driver-1.10.2.tar.gz )
if [ "${STATUS}" -ne 200 ]; then
if ! rm -f "${TEMP_DIR}/mongo-c-driver-1.10.2.tar.gz"
then
printf "\\tUnable to remove file %s/mongo-c-driver-1.10.2.tar.gz.\\n" "${TEMP_DIR}"
fi
printf "\\tUnable to download MongoDB C driver at this time.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! tar xf mongo-c-driver-1.10.2.tar.gz
then
printf "\\tUnable to unarchive file %s/mongo-c-driver-1.10.2.tar.gz.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! rm -f "${TEMP_DIR}/mongo-c-driver-1.10.2.tar.gz"
then
printf "\\tUnable to remove file mongo-c-driver-1.10.2.tar.gz.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cd "${TEMP_DIR}"/mongo-c-driver-1.10.2
then
printf "\\tUnable to cd into directory %s/mongo-c-driver-1.10.2.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! mkdir cmake-build
then
printf "\\tUnable to create directory %s/mongo-c-driver-1.10.2/cmake-build.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cd cmake-build
then
printf "\\tUnable to enter directory %s/mongo-c-driver-1.10.2/cmake-build.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr/local -DENABLE_BSON=ON \
-DENABLE_SSL=DARWIN -DENABLE_AUTOMATIC_INIT_AND_CLEANUP=OFF -DENABLE_STATIC=ON ..
then
printf "\\tConfiguring MongoDB C driver has encountered the errors above.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! make -j"${CPU_CORE}"
then
printf "\\tError compiling MongoDB C driver.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! sudo make install
then
printf "\\tError installing MongoDB C driver.\\nMake sure you have sudo privileges.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cd "${TEMP_DIR}"
then
printf "\\tUnable to enter directory %s.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! rm -rf "${TEMP_DIR}/mongo-c-driver-1.10.2"
then
printf "\\tUnable to remove directory %s/mongo-c-driver-1.10.2.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! git clone https://github.com/mongodb/mongo-cxx-driver.git --branch releases/v3.3 --depth 1
then
printf "\\tUnable to clone MongoDB C++ driver at this time.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cd "${TEMP_DIR}/mongo-cxx-driver/build"
then
printf "\\tUnable to enter directory %s/mongo-cxx-driver/build.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cmake -DBUILD_SHARED_LIBS=OFF -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr/local ..
then
printf "\\tCmake has encountered the above errors building the MongoDB C++ driver.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! make -j"${CPU_CORE}"
then
printf "\\tError compiling MongoDB C++ driver.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! sudo make install
then
printf "\\tError installing MongoDB C++ driver.\\nMake sure you have sudo privileges.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cd "${TEMP_DIR}"
then
printf "\\tUnable to enter directory %s.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! rm -rf "${TEMP_DIR}/mongo-cxx-driver"
then
printf "\\tUnable to remove directory %s/mongo-cxx-driver.\\n" "${TEMP_DIR}" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
printf "\\tMongo C++ driver installed at /usr/local/lib/libmongocxx-static.a.\\n"
else
printf "\\tMongo C++ driver found at /usr/local/lib/libmongocxx-static.a.\\n"
fi
printf "\\n\\tChecking LLVM with WASM support.\\n"
if [ ! -d /usr/local/wasm/bin ]; then
if ! cd "${TEMP_DIR}"
then
printf "\\tUnable to enter directory %s.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! mkdir "${TEMP_DIR}/wasm-compiler"
then
printf "\\tUnable to create directory %s/wasm-compiler.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cd "${TEMP_DIR}/wasm-compiler"
then
printf "\\tUnable to enter directory %s/wasm-compiler.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! git clone --depth 1 --single-branch --branch release_40 https://github.com/llvm-mirror/llvm.git
then
printf "\\tUnable to clone llvm repo @ https://github.com/llvm-mirror/llvm.git.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cd "${TEMP_DIR}/wasm-compiler/llvm/tools"
then
printf "\\tUnable to enter directory %s/wasm-compiler/llvm/tools.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! git clone --depth 1 --single-branch --branch release_40 https://github.com/llvm-mirror/clang.git
then
printf "\\tUnable to clone clang repo @ https://github.com/llvm-mirror/clang.git.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cd "${TEMP_DIR}/wasm-compiler/llvm"
then
printf "\\tUnable to enter directory %s/wasm-compiler/llvm.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! mkdir "${TEMP_DIR}/wasm-compiler/llvm/build"
then
printf "\\tUnable to create directory %s/wasm-compiler/llvm/build.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cd "${TEMP_DIR}/wasm-compiler/llvm/build"
then
printf "\\tUnable to enter directory %s/wasm-compiler/llvm/build.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cmake -G "Unix Makefiles" -DCMAKE_INSTALL_PREFIX=/usr/local/wasm \
-DLLVM_TARGETS_TO_BUILD= -DLLVM_EXPERIMENTAL_TARGETS_TO_BUILD=WebAssembly \
-DCMAKE_BUILD_TYPE=Release ../
then
printf "\\tError compiling LLVM/Clang with WASM support.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! sudo make -j"${CPU_CORE}" install
then
printf "\\tCompiling LLVM/Clang with WASM support has exited with the error above.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! sudo rm -rf "${TEMP_DIR}/wasm-compiler"
then
printf "\\tUnable to remove directory %s/wasm-compiler.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
printf "\\tSuccessfully installed LLVM/Clang with WASM support @ /usr/local/wasm/bin/.\\n"
else
printf "\\tWASM found at /usr/local/wasm/bin/.\\n"
fi
|
<gh_stars>0
import { CodeBuildCloudWatchStateEvent } from 'aws-lambda';
import { IncomingMessage } from 'http';
import * as https from 'https';
import * as url from 'url';
export const handler = (event: CodeBuildCloudWatchStateEvent): void => {
console.info('Debug event\n' + JSON.stringify(event, null, 2));
const state = event.detail['build-status'];
const subject = `project: ${event.detail['project-name']} \n ${event['detail-type']}: ${state}`;
const webhookURL = url.parse(process.env.SLACK_WEBHOOK_URL as string);
const req = https.request(
{
hostname: webhookURL.host,
port: 443,
path: webhookURL.path,
method: 'POST',
headers: {
'Context-Type': 'application/json',
},
},
(res: IncomingMessage) => {
console.log(`STATUS: ${res.statusCode}`);
res.on('data', (chunk) => {
console.log(`BODY: ${chunk}`);
});
}
);
req.write(
JSON.stringify({
text: `${process.env.STAGE}: ${subject}`,
})
);
req.end();
};
|
<html>
<head>
<title>Reverse A String</title>
<script>
function reverseString(str) {
var newString = '';
for (var i = str.length - 1; i >= 0; i--) {
newString += str[i];
}
return newString;
}
function getResult() {
var str = document.getElementById("string-input").value;
document.getElementById("result").innerHTML = reverseString(str);
}
</script>
</head>
<body>
<h1>Reverse A String</h1>
Enter a string: <input type="text" id="string-input">
<button type="button" onclick="getResult()">Submit</button>
<p id="result"></p>
</body>
</html> |
<filename>src/node/orginizeData.ts
// const TESTDATA = {
// data: [
// ["my title 1", 0, 20, true],
// ["my title 2", 1, 21, false],
// ["my title 3", 2, 22, true],
// ],
// labels: ["title", "id", "value", "isGood"],
// }
export function organizeData(data: Array<any[]>, labels: Array<any>) {
const partial: {
[key: string]: any
}[] = []
for (let dataRow = 0; dataRow < data.length; dataRow++) {
let object: { [key: string]: any } = {}
for (let col = 0; col < data[dataRow].length; col++) {
const key = labels[col]
object[key] = data[dataRow][col]
}
partial.push(object)
}
return partial
}
|
<gh_stars>1-10
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.gwt.common.client.lookup.configuration;
import java.util.Arrays;
import java.util.List;
/**
* Defines the interface between the server and client for the CaseLookupService
* Technically not a java interface, but it defines all the constants needed on both sides
* which makes the code more robust.
*/
public abstract class CaseLookupConfiguration {
private CaseLookupConfiguration() { }
public static final String URL_FIND_CASES = "gwtFindCases";
public static final String URL_SEARCH_CASES = "gwtSearchCases";
public static final String IN_RESPONSIBILTY = "MyOrTeamResponsibility";
public static final String MY_VALUES = "MY_VALUES";
public static final String TEAM_VALUES = "TEAM_VALUES";
public static final String IN_PARTY_ID_FROM = "partyIdFrom";
public static final String IN_ROLE_TYPE_FROM = "roleTypeIdFrom";
public static final String INOUT_CUST_REQUEST_ID = "custRequestId";
public static final String INOUT_PRIORITY = "priority";
public static final String INOUT_STATUS_ID = "statusId";
public static final String INOUT_CUST_REQUEST_TYPE_ID = "custRequestTypeId";
public static final String INOUT_CUST_REQUEST_NAME = "custRequestName";
public static final String OUT_STATUS = "status";
public static final String OUT_CUST_REQUEST_TYPE = "custRequestType";
public static final String OUT_REASON = "reason";
public static final String OUT_UPDATED = "updated";
public static final List<String> LIST_OUT_FIELDS = Arrays.asList(
INOUT_PRIORITY,
INOUT_CUST_REQUEST_ID,
INOUT_STATUS_ID,
INOUT_CUST_REQUEST_TYPE_ID,
INOUT_CUST_REQUEST_NAME,
OUT_STATUS,
OUT_CUST_REQUEST_TYPE,
OUT_REASON,
OUT_UPDATED
);
}
|
'use strict';
describe('myApp.chat module', function() {
var $componentController;
beforeEach(module('myApp.chat'));
beforeEach(inject(function(_$componentController_) {
$componentController = _$componentController_;
}));
describe('chat component', function(){
it('should create chat controller', inject(function() {
var ctrl = $componentController('chat', null, null);
expect(ctrl).toBeDefined();
}));
});
});
|
import {randomBytes} from 'crypto';
export async function generateUniqueByte() {
const buffer = await randomBytes(12);
return buffer.toString('hex');
}
|
#!/bin/bash
if [[ $EUID -ne 0 ]]; then
echo "This script must be run as root."
exit 1
fi
installPath="/usr/local/bin"
script[0]="qam-config-defaults"
script[1]="qam-config"
script[2]="qam-rc-install"
script[3]="qam-rc-uninstall"
script[4]="qam-uninstall"
script[5]="qam-startup"
script[6]="qubes-auto-mount"
script[7]="qubes-auto-unmount"
script[8]="qam-update-from-git"
for i in ${script[@]}
do
if [ -x "$installPath/$i" ]; then
echo "Uninstalling $i script..."
rm "$installPath/$i"
fi
done
echo "All scripts uninstalled." |
var fs = require('fs');
var readline = require('readline');
var google = require('googleapis');
var googleAuth = require('google-auth-library');
// If modifying these scopes, delete your previously saved credentials
// at ~/.credentials/admin-reports_v1-nodejs-quickstart.json
var SCOPES = ['https://www.googleapis.com/auth/admin.reports.audit.readonly','https://www.googleapis.com/auth/admin.reports.usage.readonly'];
var TOKEN_DIR = (process.env.HOME || process.env.HOMEPATH ||
process.env.USERPROFILE) + '/.credentials/';
var TOKEN_PATH = TOKEN_DIR + 'admin-reports_v1-nodejs-quickstart.json';
var TTL_FOR_WEB_HOOK = '3600';//'21600';
// Generate a v4 UUID (random)
var uuid = require('node-uuid');
var CURRENT_UUID = uuid.v4();
// var repeat = require('repeat');
// var Repeat = repeat;
//calls function at set interval in seconds
function startInterval(callback, seconds) {
callback();
return setInterval(callback, seconds * 1000);
}
function callGoogleLoginWatcher() {
// Load client secrets from a local file.
fs.readFile('client_secret.json', function processClientSecrets(err, content) {
if (err) {
console.log('Error loading client secret file: ' + err);
return;
}
// Authorize a client with the loaded credentials, then call the
// Reports API.
authorize(JSON.parse(content), watchLoginEvents);
});
}
/**
* Create an OAuth2 client with the given credentials, and then execute the
* given callback function.
*
* @param {Object} credentials The authorization client credentials.
* @param {function} callback The callback to call with the authorized client.
*/
function authorize(credentials, callback) {
var clientSecret = credentials.installed.client_secret;
var clientId = credentials.installed.client_id;
var redirectUrl = credentials.installed.redirect_uris[0];
var auth = new googleAuth();
var oauth2Client = new auth.OAuth2(clientId, clientSecret, redirectUrl);
// Check if we have previously stored a token.
fs.readFile(TOKEN_PATH, function(err, token) {
if (err) {
getNewToken(oauth2Client, callback);
} else {
oauth2Client.credentials = JSON.parse(token);
callback(oauth2Client);
}
});
}
/**
* Get and store new token after prompting for user authorization, and then
* execute the given callback with the authorized OAuth2 client.
*
* @param {google.auth.OAuth2} oauth2Client The OAuth2 client to get token for.
* @param {getEventsCallback} callback The callback to call with the authorized
* client.
*/
function getNewToken(oauth2Client, callback) {
var authUrl = oauth2Client.generateAuthUrl({
access_type: 'offline',
scope: SCOPES
});
console.log('Authorize this app by visiting this url: ', authUrl);
var rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
rl.question('Enter the code from that page here: ', function(code) {
rl.close();
oauth2Client.getToken(code, function(err, token) {
if (err) {
console.log('Error while trying to retrieve access token', err);
return;
}
oauth2Client.credentials = token;
storeToken(token);
callback(oauth2Client);
});
});
}
/**
* Store token to disk be used in later program executions.
*
* @param {Object} token The token to store to disk.
*/
function storeToken(token) {
try {
fs.mkdirSync(TOKEN_DIR);
} catch (err) {
if (err.code != 'EEXIST') {
throw err;
}
}
fs.writeFile(TOKEN_PATH, JSON.stringify(token));
console.log('Token stored to ' + TOKEN_PATH);
}
/**
* Lists the last 10 login events for the domain.
*
* @param {google.auth.OAuth2} auth An authorized OAuth2 client.
*/
function listLoginEvents(auth) {
var service = google.admin('reports_v1');
service.activities.list({
auth: auth,
userKey:'all',
applicationName:'login',
maxResults: 10,
}, function(err, response) {
if (err) {
console.log('The API returned an error: ' + err);
return;
}
console.log(JSON.stringify(response))
// var activities = response.items;
// if (activities.length == 0) {
// console.log('No logins found.');
// } else {
// console.log('Logins:');
// for (var i = 0; i < activities.length; i++) {
// var activity = activities[i];
// console.log(activity.JSON);
// }
// }
});
}
/**
* Create webhook watcher for Google login events
*
* @param {google.auth.OAuth2} auth An authorized OAuth2 client.
*/
function watchLoginEvents(auth) {
var service = google.admin('reports_v1');
//var uuid = require('node-uuid');
var CURRENT_UUID = uuid.v4();
console.log(Date.now() + ` Calling web_hook. uuid: ${CURRENT_UUID}`)
var data = {
auth: auth,
userKey: 'all',
//singleEvents: true,
applicationName:'login',
orderBy: 'startTime',
resource: {
id: CURRENT_UUID,
//token: 'email='+_token.provider_email,
address: 'https://nsut-dev-nodejs01.nsuok.edu/',
type: 'web_hook',
params: {
ttl: TTL_FOR_WEB_HOOK
}
}
};
service.activities.watch(data, function(err, response) {
if (err) {
//logging.info(`watch api error ${err}`);
console.error('The API returned an error: ' + JSON.stringify(err));
//console.log(JSON.stringify(response));
return;
}
//apiCount += 1;
//jsonOutput = JSON.stringify(response);
//logging.info(`JSON response ${response}`);
console.log(Date.now() + ` now watching for logins: ${JSON.stringify(response)}`);
});
}
// end watch logins
//Receive data from JSON POST and insert into MongoDB
var express = require('express'),
bodyParser = require('body-parser'),
app = express(),
port = 8080;
var MongoClient = require('mongodb').MongoClient
var db;
var moment = require('moment');
var logger = require('morgan');
var path = require('path')
//Establish Connection
MongoClient.connect('mongodb://nsut-dev-nodejs01.nsuok.edu:27017/mydb', function (err, database) {
if (err)
throw err
else
{
db = database;
console.log('Connected to MongoDB');
//Start app only after connection is ready
app.listen(port);
}
});
//calls google login watcher 100 seconds before TTL ends
//startInterval(callGoogleLoginWatcher,TTL_FOR_WEB_HOOK - 100)
startInterval(callGoogleLoginWatcher,TTL_FOR_WEB_HOOK - 100)
// create a write stream (in append mode)
//var accessLogStream = fs.createWriteStream(path.join(__dirname, 'access.log'), {flags: 'a'})
app.use(logger('combined'));
app.use(bodyParser.json())
app.post('/', function (req, res) {
JSON.stringify(req.body)
// Insert JSON straight into MongoDB
var date = moment();
req.body["inserted_dt"] = date.toISOString();
//console.log(date.toISOString() + ` Message Headers: ${JSON.stringify(req.headers)}`);
//console.log(date.toISOString() + ` Message Headers: ${req.body.ipAddress}`);
db.collection('googleLogins', function(err, collection) {
collection.count({ "id.uniqueQualifier": req.body.id.uniqueQualifier }, function (err, count) {
if (count>0) {
//console.log(date.toISOString() + ` Item Already exists in mongodb, will not insert duplicate: ${JSON.stringify(req.body)}`)
res.status(200).json('Success: true');
}
//insert recor
else {
collection.insert(req.body, function (err, result) {
if (err) {
res.status(500).json(`error: ${JSON.stringify(err)}`);
console.log(date.toISOString() + ` Failed to insert into mongodb Error: ${JSON.stringify(err)}`)
}
else {
res.status(200).json('Success: true');
//console.log(date.toISOString() + ` inserted into mongodb: Result: ${JSON.stringify(result)}`)
}
});
}
});
});
});
app.get('/', function (req, res) {
res.send('Post only please.')
}) |
package models;
import javax.persistence.Entity;
import javax.persistence.Id;
import java.time.LocalDate;
@Entity
public class SubmittedTrip
{
@Id private int reqTripId;
private int userId;
private String firstName;
private String lastName;
private LocalDate startDate;
private LocalDate endDate;
private String tripPurpose;
public SubmittedTrip(int reqTripId, int userId, String firstName, String lastName, LocalDate startDate, LocalDate endDate, String tripPurpose)
{
this.reqTripId = reqTripId;
this.userId = userId;
this.firstName = firstName;
this.lastName = lastName;
this.startDate = startDate;
this.endDate = endDate;
this.tripPurpose = tripPurpose;
}
public int getReqTripId()
{
return reqTripId;
}
public void setReqTripId(int reqTripId)
{
this.reqTripId = reqTripId;
}
public int getUserId()
{
return userId;
}
public void setUserId(int userId)
{
this.userId = userId;
}
public String getFirstName()
{
return firstName;
}
public void setFirstName(String firstName)
{
this.firstName = firstName;
}
public String getLastName()
{
return lastName;
}
public void setLastName(String lastName)
{
this.lastName = lastName;
}
public LocalDate getStartDate()
{
return startDate;
}
public void setStartDate(LocalDate startDate)
{
this.startDate = startDate;
}
public LocalDate getEndDate()
{
return endDate;
}
public void setEndDate(LocalDate endDate)
{
this.endDate = endDate;
}
public String getTripPurpose()
{
return tripPurpose;
}
public void setTripPurpose(String tripPurpose)
{
this.tripPurpose = tripPurpose;
}
}
|
<gh_stars>1-10
package io.syndesis.qe.endpoints;
import io.syndesis.qe.endpoint.Constants;
import io.syndesis.qe.endpoint.client.EndpointClient;
import io.syndesis.qe.resource.impl.PublicOauthProxy;
import io.syndesis.qe.utils.PublicApiUtils;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedHashMap;
import javax.ws.rs.core.MultivaluedMap;
import lombok.extern.slf4j.Slf4j;
/**
* Abstract class for public endpoints
*/
@Slf4j
public abstract class PublicEndpoint {
protected String rootEndPoint = "/public";
private static Client client;
private MultivaluedMap<String, Object> COMMON_HEADERS = new MultivaluedHashMap<>();
public PublicEndpoint(String endpoint) {
client = EndpointClient.getClient();
COMMON_HEADERS.add("X-Forwarded-User", "pista");
COMMON_HEADERS.add("X-Forwarded-Access-Token", "<PASSWORD>");
COMMON_HEADERS.add("SYNDESIS-XSRF-TOKEN", "awesome");
COMMON_HEADERS.add("Authorization", "Bearer " + PublicApiUtils.getPublicToken());
rootEndPoint += endpoint;
}
String getWholeUrl(String publicEndpointUrl) {
return String.format("https://%s%s%s", PublicOauthProxy.PUBLIC_API_PROXY_ROUTE, Constants.API_PATH, publicEndpointUrl);
}
Invocation.Builder createInvocation(String url) {
log.info(String.format("Creating invocation for url %s", url));
return client.target(url)
.request(MediaType.APPLICATION_JSON)
.headers(COMMON_HEADERS);
}
}
|
<filename>gateway/views.py<gh_stars>0
import json
from dateutil import tz
from django.contrib.auth.decorators import login_required
from django.contrib.auth.views import LoginView, LogoutView
from django.urls import reverse_lazy
from django.utils.decorators import method_decorator
from django.views.generic import TemplateView
from django.views.generic.detail import BaseDetailView, DetailView
from django.views.generic.edit import CreateView, DeleteView, UpdateView
from django.views.generic.list import ListView
from django_celery_results.models import TaskResult
from django_datatables_view.base_datatable_view import BaseDatatableView
from rest_framework import status
from rest_framework.generics import RetrieveAPIView
from rest_framework.renderers import JSONRenderer
from rest_framework.response import Response
from rest_framework.views import APIView
from .mixins import JSONResponseMixin
from .models import Application, RequestLog, ServiceRegistry, Source, User
from .serializers import ServiceRegistrySerializer
from .service_library import check_service_auth, send_service_request
from .views_library import get_health_check_status
applications_update_fields = ['name', 'app_host', 'app_port', 'health_check_path']
services_registry_fields = [
'name',
'application',
'description',
'external_uri',
'service_route',
'plugin',
'sources',
'is_private',
'method',
]
class Gateway(APIView):
authentication_classes = ()
renderer_classes = (JSONRenderer,)
request = {}
def bad_url(self, request):
"""
Checks to ensure URL is correctly formatted.
Expects path_info variable to be `api/{external uri}/{service_route}`
"""
path = request.path_info.split('/')
if len(path) < 2:
return True
return False
def operation(self, request):
# Check URL format
if self.bad_url(request):
return self.bad_request(request=request, msg="No URL path.")
external_uri = request.path_info.split('/')[2]
# Ensures that exactly one ServiceRegistry object matches the URI path and method.
try:
registry = ServiceRegistry.objects.get(external_uri=external_uri, method=request.method)
except ServiceRegistry.DoesNotExist:
return self.bad_request(request=request,
msg="No service registry matching path {} and method {}."
.format(external_uri, request.method))
except ServiceRegistry.MultipleObjectsReturned:
return self.bad_request(request=request,
msg="More than one service registry matching path {} and method {}."
.format(external_uri, request.method))
# Checks authentication
valid, msg = check_service_auth(registry, request)
if not valid:
return self.bad_request(service=registry, request=request, msg=msg)
# Checks if both service and system are active
if not registry.service_active():
return self.bad_request(registry, request, msg="Service {} cannot be executed.".format(registry))
res = send_service_request(registry, request)
data = {'SUCCESS': 0}
if res:
data['SUCCESS'] = 1
return Response(data=data)
def bad_request(self, service=None, request=request, msg="Bad Request."):
RequestLog.objects.create(service=service, status_code=status.HTTP_400_BAD_REQUEST, request_url=request.META['REMOTE_ADDR'])
return Response({"detail": msg}, status=status.HTTP_400_BAD_REQUEST)
def get(self, request):
return self.operation(request)
def post(self, request):
return self.operation(request)
def put(self, request):
return self.operation(request)
def patch(self, request):
return self.operation(request)
def delete(self, request):
return self.operation(request)
class SplashView(TemplateView):
template_name = "gateway/splash.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['applications'] = Application.objects.all().order_by('name')
context['services'] = ServiceRegistry.objects.exclude(application__name='Pisces')
context['recent_errors'] = RequestLog.objects.exclude(task_result__status='SUCCESS').order_by('-task_result__date_done')[:5]
for app in context['applications']:
app.health_check_status = get_health_check_status(app)
return context
@method_decorator(login_required, name='dispatch')
class ServicesAddView(CreateView):
template_name = "gateway/add.html"
model = ServiceRegistry
fields = services_registry_fields
class ServicesListView(ListView):
template_name = "gateway/services_list.html"
model = ServiceRegistry
class ServicesDetailView(DetailView):
template_name = "gateway/services_detail.html"
model = ServiceRegistry
def get_context_data(self, **kwargs):
context = super(ServicesDetailView, self).get_context_data(**kwargs)
context['service_results'] = RequestLog.objects.filter(service=self.object.pk).order_by('-task_result__date_done')[:5]
return context
class ServicesJSONView(RetrieveAPIView):
model = ServiceRegistry
queryset = ServiceRegistry.objects.all()
serializer_class = ServiceRegistrySerializer
@method_decorator(login_required, name='dispatch')
class ServicesUpdateView(UpdateView):
template_name = "gateway/update.html"
model = ServiceRegistry
fields = services_registry_fields + ['is_active']
@method_decorator(login_required, name='dispatch')
class ServicesDeleteView(DeleteView):
template_name = "gateway/delete.html"
model = ServiceRegistry
success_url = reverse_lazy('services-list')
class ServicesTriggerView(JSONResponseMixin, BaseDetailView):
model = ServiceRegistry
def render_to_response(self, context, **response_kwargs):
result = send_service_request(self.object)
data = {'SUCCESS': 0}
if result:
data['SUCCESS'] = 1
return self.render_to_json_response(context=data, **response_kwargs)
class ServicesClearErrorsView(JSONResponseMixin, BaseDetailView):
model = ServiceRegistry
def render_to_response(self, context, **kwargs):
try:
TaskResult.objects.filter(status='FAILURE', request_log__service=self.object).delete()
data = {'SUCCESS': 1}
except Exception:
data = {'SUCCESS': 0}
return self.render_to_json_response(context=data, **kwargs)
@method_decorator(login_required, name='dispatch')
class ApplicationsAddView(CreateView):
template_name = "gateway/add.html"
model = Application
fields = applications_update_fields
class ApplicationsDetailView(DetailView):
template_name = "gateway/applications_detail.html"
model = Application
def get_context_data(self, *args, **kwargs):
context = super(DetailView, self).get_context_data(*args, **kwargs)
context['health_check_status'] = get_health_check_status(self.object)
return context
class ApplicationsListView(ListView):
template_name = "gateway/applications_list.html"
model = Application
def get_context_data(self, *args, **kwargs):
context = super(ListView, self).get_context_data(*args, **kwargs)
for obj in context['object_list']:
obj.health_check_status = get_health_check_status(obj)
return context
@method_decorator(login_required, name='dispatch')
class ApplicationsUpdateView(UpdateView):
template_name = "gateway/update.html"
model = Application
fields = applications_update_fields + ['is_active']
@method_decorator(login_required, name='dispatch')
class ApplicationsDeleteView(DeleteView):
template_name = "gateway/delete.html"
model = Application
success_url = reverse_lazy('applications-list')
class ResultsListView(TemplateView):
template_name = "gateway/results_list.html"
class ResultsDatatableView(BaseDatatableView):
model = RequestLog
columns = ['async_result_id', 'service__name', 'task_result_status', 'task_result__result', 'task_result__date_done']
order_columns = ['async_result_id', 'service__name', 'task_result_status', 'task_result__result', 'task_result__date_done']
max_display_length = 500
def get_filter_method(self):
return self.FILTER_ICONTAINS
def get_task_result(self, result):
task_result = ''
if result.task_result:
task_result = result.task_result.result
if 'exc_message' in result.task_result.result:
task_result = str(json.loads(result.task_result.result).get('exc_message')[0])
return task_result
def get_status_display(self, status):
status = status if status else "Idle"
statuses = {
"Error": ['danger', 'times-circle'],
"Idle": ['warning', 'circle'],
"Success": ['success', 'check-circle'],
}
return '<span class="text-{}">{} <i class="fa fa-{}"></i></span>'.format(statuses[status][0], status, statuses[status][1])
def prepare_results(self, qs):
json_data = []
for result in qs:
result.refresh_from_db()
async_result_id = result.async_result_id if result.async_result_id else ""
json_data.append([
'<a href="' + str(reverse_lazy('results-detail', kwargs={"pk": result.id})) + '">' + async_result_id + '</a>',
'<a href="' + str(reverse_lazy('services-detail', kwargs={"pk": result.service.id})) + '">' + result.service.full_name + '</a>' if result.service else '',
self.get_status_display(result.task_result_status),
'<pre>' + self.get_task_result(result) + '</pre>',
result.task_result.date_done.astimezone(tz.tzlocal()).strftime('%b %e, %Y %I:%M:%S %p') if result.task_result else '',
])
return json_data
class ResultsDetailView(DetailView):
template_name = "gateway/results_detail.html"
model = RequestLog
@method_decorator(login_required, name='dispatch')
class ResultsDeleteView(DeleteView):
template_name = "gateway/delete.html"
model = RequestLog
success_url = reverse_lazy('results-list')
@method_decorator(login_required, name='dispatch')
class SourcesAddView(CreateView):
template_name = "gateway/add.html"
model = Source
fields = ('user', 'apikey')
class SourcesDetailView(DetailView):
template_name = "gateway/sources_detail.html"
model = Source
class SourcesListView(ListView):
template_name = "gateway/sources_list.html"
model = Source
@method_decorator(login_required, name='dispatch')
class SourcesUpdateView(UpdateView):
template_name = "gateway/update.html"
model = Source
fields = ('user', 'apikey')
@method_decorator(login_required, name='dispatch')
class SourcesDeleteView(DeleteView):
template_name = "gateway/delete.html"
model = Source
success_url = reverse_lazy('sources-list')
@method_decorator(login_required, name='dispatch')
class UsersAddView(CreateView):
template_name = "gateway/users_add.html"
model = User
fields = ('username',)
class UsersDetailView(DetailView):
template_name = "gateway/users_detail.html"
model = User
class UsersListView(ListView):
template_name = "gateway/users_list.html"
model = User
@method_decorator(login_required, name='dispatch')
class UsersUpdateView(UpdateView):
template_name = "gateway/users_update.html"
model = User
fields = ('username',)
success_url = reverse_lazy('users-list')
@method_decorator(login_required, name='dispatch')
class UsersDeleteView(DeleteView):
template_name = "gateway/users_delete.html"
model = User
success_url = reverse_lazy('users-list')
class UsersLoginView(LoginView):
template_name = "gateway/users_login.html"
class UsersLogoutView(LogoutView):
next_page = reverse_lazy("dashboard")
|
#include <vector>
#include <string>
#include "../include/colors.hpp"
#include "../include/utils.hpp"
#include "../include/descriptors.hpp"
using std::vector;
using std::string;
using std::to_string;
void* os(string& out)
{
string os_version = exec("sw_vers | xargs | awk '{print $2,$4}'");
string os_architecture = exec("uname -m");
out = os_version + " " + os_architecture;
return NULL;
}
void* host(string& out)
{
out = exec("sysctl -n hw.model");
return NULL;
}
void* kernel(string& out)
{
out = exec("uname -r");
return NULL;
}
void* uptime(string& out)
{
int uptimeD;
int uptimeH;
int uptimeM;
struct timespec time;
clock_gettime(CLOCK_MONOTONIC, &time);
int dd = time.tv_sec / 60 / 60 / 24;
int hh = time.tv_sec / 60 / 60 % 24;
int mm = time.tv_sec / 60 % 60;
string final_uptime = "";
final_uptime += to_string(dd) + " day" + (dd == 1 ? "" : "s") + ", ";
final_uptime += to_string(hh) + " hour" + (hh == 1 ? "" : "s") + ", ";
final_uptime += to_string(mm) + " minute" + (mm == 1 ? "" : "s");
out = final_uptime;
return NULL;
}
void* packages(string& out)
{
bool has_brew = exec("type brew > /dev/null; echo $?") == "0";
if (!has_brew)
{
out = "Unknown";
}
out = exec("ls /usr/local/Cellar/* | grep ':' | wc -l | xargs") + " (brew)";
return NULL;
}
void* shell(string& out)
{
out = exec("echo $SHELL");
return NULL;
}
void* resolution(string& out)
{
out = exec("screenresolution get 2>&1 | awk '/Display/{printf $6}' | awk -F 'x' '{print $1\"x\"$2}'");
return NULL;
}
void* de(string& out)
{
out = "Aqua";
return NULL;
}
void* terminal(string& out)
{
out = exec("echo $TERM_PROGRAM");
return NULL;
}
void* cpu(string& out)
{
string cached = exec("cat 2> /dev/null /Library/Caches/macfetch/cpu");
if (cached.empty())
{
string info = exec("sysctl -n machdep.cpu.brand_string");
cache("cpu", info);
out = info;
}
else
{
out = cached;
}
return NULL;
}
void* gpu(string& out)
{
string cached = exec("cat 2> /dev/null /Library/Caches/macfetch/gpu");
if (cached.empty())
{
string info = exec("system_profiler SPDisplaysDataType 2> /dev/null | awk -F': ' '/^\\ *Chipset Model:/ {printf $2}'");
cache("gpu", info);
out = info;
}
else
{
out = cached;
}
return NULL;
}
void* memory(string& out)
{
string cached = exec("cat 2> /dev/null /Library/Caches/macfetch/memory");
if (cached.empty())
{
string info = exec("system_profiler SPHardwareDataType /dev/null | grep 'Memory:' | awk '{print $2\"\"$3}'");
cache("memory", info);
out = info;
}
else
{
out = cached;
}
return NULL;
}
void* battery(string& out)
{
out = exec("pmset -g batt | grep -Eo \"\\d+%\"");
return NULL;
}
void* dark_colors(string& out)
{
string blocks = "";
vector<string> colors = {
BG0,
BG1,
BG2,
BG3,
BG4,
BG5,
BG6,
BG7,
};
for (string color : colors)
{
blocks += color + " " + DEFAULT;
}
out = blocks;
return NULL;
}
void* bright_colors(string& out)
{
string blocks = "";
vector<string> colors = {
BG8,
BG9,
BG10,
BG11,
BG12,
BG13,
BG14,
BG15
};
for (string color : colors)
{
blocks += color + " " + DEFAULT;
}
out = blocks;
return NULL;
}
void* empty(string& out)
{
out = "";
return NULL;
}
|
#ifndef LAYER_DIMOP_H
#define LAYER_DIMOP_H
#include "layer.h"
namespace ncnn {
class DimOp : public Layer
{
public:
DimOp();
virtual int load_param(const ParamDict& pd);
virtual int forward(const std::vector<Mat>& bottom_blobs, std::vector<Mat>& top_blobs, const Option& opt) const;
// virtual int forward_inplace(Mat& bottom_top_blob, const Option& opt) const;
#if NCNN_VULKAN
virtual int create_pipeline();
virtual int destroy_pipeline();
virtual int forward(const std::vector<VkMat>& bottom_blobs, std::vector<VkMat>& top_blobs, VkCompute& cmd, const Option& opt) const;
virtual int forward_inplace(VkMat& bottom_top_blob, VkCompute& cmd, const Option& opt) const;
#endif // NCNN_VULKAN
enum {
Operation_ADD = 0,
Operation_SUB = 1,
Operation_MUL = 2,
Operation_DIV = 3,
Operation_MAX = 4,
Operation_MIN = 5,
Operation_POW = 6,
Operation_RSUB = 7,
Operation_RDIV = 8
};
public:
// param
int op_type;
int dim;
#if NCNN_VULKAN
Pipeline* pipeline_binaryop;
Pipeline* pipeline_binaryop_pack4;
#endif // NCNN_VULKAN
};
} // namespace ncnn
#endif // LAYER_BINARYOP_H
|
<reponame>yupcheng/yupc-admin-cloud
package com.github.yupc.cache;
import com.github.yupc.utils.SpringUtil;
import org.springframework.data.redis.core.RedisTemplate;
import java.util.Objects;
import java.util.Optional;
import java.util.StringJoiner;
import java.util.concurrent.TimeUnit;
import java.util.function.Supplier;
/**
* @author yupc
* @createTime 2017-12-25 14:27
*/
public abstract class BaseRedisCache<K, V> implements Cache<V> {
private final Supplier<RedisTemplate<K, V>> redisTemplate;
public BaseRedisCache(Supplier<RedisTemplate<K, V>> redisTemplate) {
this.redisTemplate = redisTemplate;
}
/**
* 具体业务缓存key
* 规则:
* [开发环境=${spring.redis.key-prefix}]:[业务系统=${spring.application.name}]:[具体业务缓存key]
*
* @return
*/
protected abstract K key();
/**
* 只从缓存中获取,缓存中不存在返回空
*
* @return
*/
public abstract Optional<V> getCache();
protected abstract Supplier<V> getDefaultSupplier() ;
protected RedisTemplate<K, V> getRedisTemplate() {
return redisTemplate.get();
}
/**
* 用于生成key的工具方法
* 规则:
* [开发环境=${spring.redis.key-prefix}]:[业务系统=${spring.application.name}]:[具体业务缓存key]
*
* @param values 入参只需要包含 具体业务缓存key
* @return
*/
protected String keyGenerator(CharSequence... values) {
KeyGenerator bean = SpringUtil.getBean("cacheKeyGenerator", KeyGenerator.class);
if (bean != null) {
return new StringJoiner(":")
.add(bean.keyPrefix())
.add(bean.bizModular())
.add(String.join(":", values))
.toString();
} else {
return String.join(":", values);
}
}
/**
* 先从缓存中获取,缓存不存在再从supplier获取,最后返回空
*
* @return
*/
@Override
public Optional<V> get() {
return this.get(this.getDefaultSupplier());
}
@Override
public boolean exists() {
return this.getRedisTemplate().hasKey(this.key());
}
@Override
public void clear() {
this.getRedisTemplate().delete(this.key());
}
/**
* 设置过期时间
*
* @param timeout 过期时间 单位:秒
* @return
*/
public Boolean expire(int timeout) {
return this.getRedisTemplate().expire(this.key(), timeout, TimeUnit.SECONDS);
}
/**
* @param supplier
* @return
*/
@Override
public Optional<V> get(Supplier<V> supplier) {
Optional<V> value = this.getCache();
if (value.isPresent()) {
return value;
}
//因考虑并发情况下为避免同时从数据库获取数据,加锁以控制
synchronized (BaseRedisCache.class) {
value = this.getCache();
if (value.isPresent()) {
return value;
}
V obj = supplier.get();
if (Objects.isNull(obj)) {
return Optional.empty();
}
this.set(obj);
return Optional.of(obj);
}
}
}
|
<filename>next.config.js
require('dotenv').config()
const NODE_ENV = process.env.NODE_ENV || 'development'
const ENV_NAME = NODE_ENV
const dev = NODE_ENV === 'development'
const SHOPIFY_API_SHOP_DOMAIN = process.env.SHOPIFY_API_SHOP_DOMAIN
const SHOPIFY_API_VERSION = process.env.SHOPIFY_API_VERSION
const SHOPIFY_API_STOREFRONT_ACCESS_TOKEN = process.env.SHOPIFY_API_STOREFRONT_ACCESS_TOKEN
const SHOPIFY_API_TARGET_COLLECTION_HANDLE = process.env.SHOPIFY_API_TARGET_COLLECTION_HANDLE
module.exports = {
env: {
ENV_NAME,
SHOPIFY_API_SHOP_DOMAIN,
SHOPIFY_API_VERSION,
SHOPIFY_API_STOREFRONT_ACCESS_TOKEN,
SHOPIFY_API_TARGET_COLLECTION_HANDLE,
BASE_URL: process.env.BASE_URL || '',
},
}
|
import Alamofire
internal extension DataResponse {
func decodeModel<T: Decodable>(with decoder: JSONDecoder) -> Result<T, Error> {
do {
let decodedModel = try decoder.decode(T.self, from: self.data)
return .success(decodedModel)
} catch {
return .failure(error)
}
}
} |
#!/bin/bash
composer dump-autoload --optimize
php artisan config:cache
php artisan route:cache
php artisan optimize
a2enmod rewrite
exec "$@"
|
import {Point} from '../../../math/geometry/point'
// import {Assert} from '../../../utils/assert'
import {AxisEdge} from './AxisEdge'
export class AxisEdgesContainer {
edges: Set<AxisEdge> = new Set<AxisEdge>()
get Edges(): Iterable<AxisEdge> {
return this.edges
}
/// it is not necessarely the upper point but some point above the source
UpPoint: Point
AddEdge(edge: AxisEdge) {
this.UpPoint = edge.TargetPoint
/*Assert.assert(!this.edges.has(edge))*/
this.edges.add(edge)
}
constructor(source: Point) {
this.Source = source
}
Source: Point
RemoveAxis(edge: AxisEdge) {
/*Assert.assert(this.edges.has(edge))*/
this.edges.delete(edge)
}
IsEmpty(): boolean {
return this.edges.size == 0
}
}
|
import pandas as pd
data_1 = {'Name': ['John', 'Paul', 'George', 'Ringo'],
'Age': [30, 25, 27, 28]}
data_2 = {'Name': ['Julia', 'Yoko', 'Martha', 'Tina'],
'Age': [20, 40, 22, 32]}
df1 = pd.DataFrame(data_1)
df2 = pd.DataFrame(data_2)
df = pd.concat([df1, df2]) |
python decoder_read4feat.py -i /usr/shared/CMPT/nlp-class/project/toy/train.cn -t /usr/shared/CMPT/nlp-class/project/toy/phrase-table/phrase_table.out -l /usr/shared/CMPT/nlp-class/project/lm/en.tiny.3g.arpa -s 100 -k 20 > output/toy_it1_s100k20.output 2> errortoy1.log
python reranker.py -r /usr/shared/CMPT/nlp-class/project/toy/train.en -s /usr/shared/CMPT/nlp-class/project/toy/train.cn -n output/toy_it1_s100k20.output > output/toy_it1_s100k20.weight 2>errortoy2.log
python rerank.py -n output/toy_it1_s100k20.output -w output/toy_it1_s100k20.weight > output/toy_it1_s100k20weight.out 2> errortoy3.log
#python score-reranker.py -r /usr/shared/CMPT/nlp-class/project/toy/train.en < toy_it1_s100k20weight.output
python decoder_read4feat.py -i /usr/shared/CMPT/nlp-class/project/toy/train.cn -t /usr/shared/CMPT/nlp-class/project/toy/phrase-table/phrase_table.out -l /usr/shared/CMPT/nlp-class/project/lm/en.tiny.3g.arpa -s 100 -k 20 -w output/toy_it1_s100k20.weight > output/toy_it2_s100k20.output 2> errortoy4.log |
#!/bin/bash
export PYTHONPATH="$(dirname "$PWD")"
# setting variables
NUM_INSTANCES=3
cores=1
# INPUT_DATA_DIR="ssudan-mscale"
INPUT_DATA_DIR="ssudan-mscale-test"
RUN_PYTHON_FILE="run_mscale.py"
LOG_EXCHANGE_DATA="True"
COUPLING_TYPE="file"
WEATHER_COUPLING="False"
#-------------------------------------------------------
# parse input arguments
#-------------------------------------------------------
# cores=${cores:-1}
while [ $# -gt 0 ]; do
if [ "$1" == "-h" ] || [ "$1" == "--help" ]; then
echo -e "\nUsage:"
echo -e "\t ./run_file_coupling.sh.sh < --cores N > < --NUM_INSTANCES M >\n"
exit 1
fi
if [[ $1 == *"--"* ]]; then
param="${1/--/}"
if [ -n "${!param}" ]; then
declare $param="$2"
else
echo -e "\nError !!! Input arg --$param is not valid\n"
echo -e "you can use -h or --help option to see the valid input arguments\n"
exit 1
fi
fi
shift
done
#-------------------------------------------------------
# convert to lowercase
#-------------------------------------------------------
function covert_to_lowercase()
{
echo $(echo "$1" | tr "[:upper:]" "[:lower:]")
}
LOG_EXCHANGE_DATA=$(covert_to_lowercase $LOG_EXCHANGE_DATA)
COUPLING_TYPE=$(covert_to_lowercase $COUPLING_TYPE)
WEATHER_COUPLING=$(covert_to_lowercase $WEATHER_COUPLING)
#-------------------------------------------------------
# set run_command variable
#-------------------------------------------------------
function set_run_command()
{
run_command=""
if [ "$cores" -gt "1" ];
then
run_command="mpirun -n $cores python3"
else
run_command="python3"
fi
}
set_run_command
#-------------------------------------------------------
# clean output directory
#-------------------------------------------------------
if [ $WEATHER_COUPLING = "true" ];
then
rm -rf out/weather/$COUPLING_TYPE/*
mkdir -p out/weather/$COUPLING_TYPE/coupled
mkdir -p out/weather/$COUPLING_TYPE/macro
mkdir -p out/weather/$COUPLING_TYPE/micro
mkdir -p out/weather/$COUPLING_TYPE/log_exchange_data
mkdir -p out/weather/$COUPLING_TYPE/plot_exchange_data
else
rm -rf out/$COUPLING_TYPE/*
mkdir -p out/$COUPLING_TYPE/coupled
mkdir -p out/$COUPLING_TYPE/macro
mkdir -p out/$COUPLING_TYPE/micro
mkdir -p out/$COUPLING_TYPE/log_exchange_data
mkdir -p out/$COUPLING_TYPE/plot_exchange_data
fi
#-------------------------------------------------------
# return common input arguments
#-------------------------------------------------------
function ret_common_args()
{
local common_args="--data_dir=$INPUT_DATA_DIR \
--log_exchange_data $LOG_EXCHANGE_DATA \
--instance_index $i \
--coupling_type $COUPLING_TYPE \
--num_instances $NUM_INSTANCES \
--weather_coupling $WEATHER_COUPLING"
echo $common_args
}
start_time="$(date -u +%s.%N)"
# index should be started from 0
for i in $(seq 0 $(($NUM_INSTANCES-1)))
do
common_args="$(ret_common_args)"
$run_command $RUN_PYTHON_FILE --submodel macro $common_args &
$run_command $RUN_PYTHON_FILE --submodel micro $common_args &
done
wait
end_time="$(date -u +%s.%N)"
elapsed="$(bc <<<"$end_time-$start_time")"
#-------------------------------------------------------
# reporting
#-------------------------------------------------------
#printf '=%.0s' {1..70}
for i in {1..70}; do echo -n =; done
echo -e "\nExecuted commands :"
for i in $(seq 0 $(($NUM_INSTANCES-1)))
do
common_args="--data_dir=$INPUT_DATA_DIR \
--instance_index $i \
--coupling_type $COUPLING_TYPE \
--num_instances $NUM_INSTANCES \
--weather_coupling $WEATHER_COUPLING"
echo -e "\t $run_command $RUN_PYTHON_FILE --submodel macro $common_args"
echo -e "\t $run_command $RUN_PYTHON_FILE --submodel micro $common_args"
done
echo -e "\n\nTotal Executing Time = $elapsed seconds\n" |
#!/bin/bash
#
# Copyright (c) 2021 The Flatcar Maintainers.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# CI automation common functions.
source ci-automation/ci-config.env
: ${PIGZ:=pigz}
# set up author and email so git does not complain when tagging
git -C . config user.name "${CI_GIT_AUTHOR}"
git -C . config user.email "${CI_GIT_EMAIL}"
function init_submodules() {
git submodule init
git submodule update
}
# --
function update_submodule() {
local submodule="$1"
local commit_ish="$2"
cd "sdk_container/src/third_party/${submodule}"
git fetch --all --tags
git checkout "${commit_ish}"
cd -
}
# --
function check_version_string() {
local version="$1"
if ! echo "${version}" | grep -qE '^(main-|alpha-|beta-|stable-|lts-)' ; then
echo "ERROR: invalid version '${version}', must start with 'main-', 'alpha-', 'beta-', 'stable-', or 'lts-'"
exit 1
fi
}
# --
function update_submodules() {
local coreos_git="$1"
local portage_git="$2"
init_submodules
update_submodule "coreos-overlay" "${coreos_git}"
update_submodule "portage-stable" "${portage_git}"
}
# --
function update_and_push_version() {
local version="$1"
# Add and commit local changes
git add "sdk_container/src/third_party/coreos-overlay"
git add "sdk_container/src/third_party/portage-stable"
git add "sdk_container/.repo/manifests/version.txt"
git commit --allow-empty -m "New version: ${version}"
git tag -f "${version}"
if git push origin "${version}" ; then
return
fi
# Push (above) may fail because a tag already exists.
# We check for tag presence, and for the difference
# between local and remote, and bail
# only if the remote / local contents differ.
# Remove local tag, (re-)fetch remote tags
git tag -d "${version}"
# refresh tree, let origin overwrite local tags
git fetch --all --tags --force
# This will return != 0 if
# - the remote tag does not exist (rc: 127)
# - the remote tag has changes compared to the local tree (rc: 1)
git diff --exit-code "${version}"
}
# --
function copy_from_buildcache() {
local what="$1"
local where_to="$2"
mkdir -p "$where_to"
curl --verbose --fail --silent --show-error --location --retry-delay 1 --retry 60 \
--retry-connrefused --retry-max-time 60 --connect-timeout 20 \
--remote-name --output-dir "${where_to}" "https://${BUILDCACHE_SERVER}/${what}"
}
# --
function gen_sshcmd() {
echo -n "ssh -o BatchMode=yes"
echo -n " -o StrictHostKeyChecking=no"
echo -n " -o UserKnownHostsFile=/dev/null"
echo " -o NumberOfPasswordPrompts=0"
}
# --
function copy_to_buildcache() {
local remote_path="${BUILDCACHE_PATH_PREFIX}/$1"
shift
local sshcmd="$(gen_sshcmd)"
$sshcmd "${BUILDCACHE_USER}@${BUILDCACHE_SERVER}" \
"mkdir -p ${remote_path}"
rsync -Pav -e "${sshcmd}" "$@" \
"${BUILDCACHE_USER}@${BUILDCACHE_SERVER}:${remote_path}"
}
# --
function image_exists_locally() {
local name="$1"
local version="$2"
local image="${name}:${version}"
local image_exists="$(docker images "${image}" \
--no-trunc --format '{{.Repository}}:{{.Tag}}')"
[ "${image}" = "${image_exists}" ]
}
# --
# Derive docker-safe image version string from vernum.
#
function vernum_to_docker_image_version() {
local vernum="$1"
echo "$vernum" | sed 's/[+]/-/g'
}
# --
# Return the full name (repo+name+tag) of an image. Useful for SDK images
# pulled from the registry (which have the registry pre-pended)
function docker_image_fullname() {
local image="$1"
local version="$2"
docker images --no-trunc --format '{{.Repository}}:{{.Tag}}' \
| grep -E "^(${CONTAINER_REGISTRY}/)*${image}:${version}$"
}
# --
function docker_image_to_buildcache() {
local image="$1"
local version="$2"
# strip potential container registry prefix
local tarball="$(basename "$image")-${version}.tar.gz"
docker save "${image}":"${version}" | $PIGZ -c > "${tarball}"
copy_to_buildcache "containers/${version}" "${tarball}"
}
# --
function docker_commit_to_buildcache() {
local container="$1"
local image_name="$2"
local image_version="$3"
docker commit "${container}" "${image_name}:${image_version}"
docker_image_to_buildcache "${image_name}" "${image_version}"
}
# --
function docker_image_from_buildcache() {
local name="$1"
local version="$2"
local tgz="${name}-${version}.tar.gz"
if image_exists_locally "${name}" "${version}" ; then
return
fi
local url="https://${BUILDCACHE_SERVER}/containers/${version}/${tgz}"
curl --verbose --fail --silent --show-error --location --retry-delay 1 --retry 60 \
--retry-connrefused --retry-max-time 60 --connect-timeout 20 \
--remote-name "${url}"
cat "${tgz}" | $PIGZ -d -c | docker load
rm "${tgz}"
}
# --
function docker_image_from_registry_or_buildcache() {
local image="$1"
local version="$2"
if image_exists_locally "${CONTAINER_REGISTRY}/${image}" "${version}" ; then
return
fi
if docker pull "${CONTAINER_REGISTRY}/${image}:${version}" ; then
return
fi
docker_image_from_buildcache "${image}" "${version}"
}
# --
|
float calculate_average(vector<int>& nums) {
float total = 0;
for (auto& num : nums) {
total += num;
}
return total / nums.size();
} |
<gh_stars>100-1000
import { makeExecutableSchema } from '@graphql-tools/schema';
import { IResolvers } from '@graphql-tools/utils';
import { graphql, GraphQLSchema, print } from 'graphql';
import gql from 'graphql-tag';
import { assertSuccessfulResult } from '../../../src/graphql/execution-result';
import { weaveSchemas } from '../../../src/weave-schemas';
import { BenchmarkConfig, BenchmarkFactories } from '../support/async-bench';
function createSchema(size: number) {
const types = gql`
type Query {
posts: [Post]
allUsers(filter: UserFilter): [User]
}
input UserFilter {
name_in: [String]
}
type Post {
id: ID
userName: String
}
type User {
name: String
}
`;
const posts: any[] = [];
for (let i = 0; i < size; i++) {
posts.push({id: i, userName: 'user' + i});
}
const resolvers: IResolvers = {
Query: {
posts: () => posts,
allUsers: (root: any, args: any, context: any) => args.filter.name_in.map((name: string) => ({name: name}))
}
};
const schema = makeExecutableSchema({typeDefs: print(types), resolvers});
return weaveSchemas({
endpoints: [{
schema,
fieldMetadata: {
'Post.userName': {
link: {
field: 'allUsers',
keyField: 'name',
batchMode: true,
linkFieldName: 'user',
argument: 'filter.name_in'
}
},
'Query.posts': {
join: {
linkField: 'userName'
}
}
}
}]
})
}
const query = gql`{ posts { user { name } } }`;
function testJoin(size: number): BenchmarkConfig {
let schema: GraphQLSchema;
return {
name: `join with ${size} objects`,
async fn() {
const result = await graphql(schema, print(query), {}, {}, {});
assertSuccessfulResult(result);
},
async beforeAll() {
schema = await createSchema(size);
}
};
}
export const JOIN_BENCHMARKS: BenchmarkFactories = [
() => testJoin(10),
() => testJoin(1000),
() => testJoin(10000),
];
|
# Aliases
alias r='repo'
compdef _repo r=repo
alias rra='repo rebase --auto-stash'
compdef _repo rra='repo rebase --auto-stash'
alias rs='repo sync'
compdef _repo rs='repo sync'
alias rsrra='repo sync ; repo rebase --auto-stash'
compdef _repo rsrra='repo sync ; repo rebase --auto-stash'
alias ru='repo upload'
compdef _repo ru='repo upload'
alias rst='repo status'
compdef _repo rst='repo status'
|
cd node_modules/pomelo/node_modules/pomelo-admin/node_modules/v8-profiler/ && node-waf configure && node-waf build && cd ../../../../
|
<gh_stars>1-10
/*
Copyright (c) 2013, Groupon, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
Neither the name of GROUPON nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.groupon.util.math;
import junit.framework.TestCase;
import org.apache.commons.math3.linear.RealMatrix;
import org.junit.Test;
/**
* @author <EMAIL>
*/
public class MatrixUtilsTest extends TestCase {
private static final double[][] data = {
{0.0, 1.0, 2.0},
{1.0, 1.0, 1.0},
{1.0, 0.5, 0.0}};
private static final double[][] normalizedData = {
{0.0, 0.3333333333333333, 0.6666666666666666},
{0.3333333333333333, 0.3333333333333333, 0.3333333333333333},
{0.6666666666666666, 0.3333333333333333, 0.0}};
@Test
public void testCosineNormalize() throws Exception {
RealMatrix matrix = org.apache.commons.math3.linear.MatrixUtils.createRealMatrix(data);
MatrixUtils.cosineNormalize(matrix);
assertEquals(org.apache.commons.math3.linear.MatrixUtils.createRealMatrix(normalizedData), matrix);
}
@Test
public void testSumOfRow() throws Exception {
RealMatrix matrix = org.apache.commons.math3.linear.MatrixUtils.createRealMatrix(data);
assertEquals(3.0, MatrixUtils.sumOfRow(matrix, 0));
assertEquals(3.0, MatrixUtils.sumOfRow(matrix, 1));
assertEquals(1.5, MatrixUtils.sumOfRow(matrix, 2));
}
}
|
#!/usr/bin/env bash
set -e
info() {
echo -e "\033[1;34m$1\033[0m"
}
warn() {
echo "::warning :: $1"
}
error() {
echo "::error :: $1"
exit 1
}
root_file="${1}"
glob_root_file="${2}"
working_directory="${3}"
compiler="${4}"
args="${5}"
extra_packages="${6}"
extra_system_packages="${7}"
extra_fonts="${8}"
pre_compile="${9}"
post_compile="${10}"
latexmk_shell_escape="${11}"
latexmk_use_lualatex="${12}"
latexmk_use_xelatex="${13}"
if [[ -z "$root_file" ]]; then
error "Input 'root_file' is missing."
fi
readarray -t root_file <<< "$root_file"
if [[ -n "$working_directory" ]]; then
if [[ ! -d "$working_directory" ]]; then
mkdir -p "$working_directory"
fi
cd "$working_directory"
fi
if [[ -n "$glob_root_file" ]]; then
expanded_root_file=()
for pattern in "${root_file[@]}"; do
expanded="$(compgen -G "$pattern" || echo "$pattern")"
readarray -t files <<< "$expanded"
expanded_root_file+=("${files[@]}")
done
root_file=("${expanded_root_file[@]}")
fi
if [[ -z "$compiler" && -z "$args" ]]; then
warn "Input 'compiler' and 'args' are both empty. Reset them to default values."
compiler="latexmk"
args="-pdf -file-line-error -halt-on-error -interaction=nonstopmode"
fi
IFS=' ' read -r -a args <<< "$args"
if [[ "$compiler" = "latexmk" ]]; then
if [[ -n "$latexmk_shell_escape" ]]; then
args+=("-shell-escape")
fi
if [[ -n "$latexmk_use_lualatex" && -n "$latexmk_use_xelatex" ]]; then
error "Input 'latexmk_use_lualatex' and 'latexmk_use_xelatex' cannot be used at the same time."
fi
if [[ -n "$latexmk_use_lualatex" ]]; then
for i in "${!args[@]}"; do
if [[ "${args[i]}" = "-pdf" ]]; then
unset 'args[i]'
fi
done
args+=("-lualatex")
# LuaLaTeX use --flag instead of -flag for arguments.
for VAR in -file-line-error -halt-on-error -shell-escape; do
for i in "${!args[@]}"; do
if [[ "${args[i]}" = "$VAR" ]]; then
args[i]="-$VAR"
fi
done
done
args=("${args[@]/#-interaction=/--interaction=}")
fi
if [[ -n "$latexmk_use_xelatex" ]]; then
for i in "${!args[@]}"; do
if [[ "${args[i]}" = "-pdf" ]]; then
unset 'args[i]'
fi
done
args+=("-xelatex")
fi
else
for VAR in "${!latexmk_@}"; do
if [[ -n "${!VAR}" ]]; then
error "Input '${VAR}' is only valid if input 'compiler' is set to 'latexmk'."
fi
done
fi
if [[ -n "$extra_system_packages" ]]; then
for pkg in $extra_system_packages; do
info "Install $pkg by apk"
apk --no-cache add "$pkg"
done
fi
if [[ -n "$extra_fonts" ]]; then
readarray -t extra_fonts <<< "$extra_fonts"
expanded_extra_fonts=()
for pattern in "${extra_fonts[@]}"; do
expanded="$(compgen -G "$pattern" || echo "$pattern")"
readarray -t files <<< "$expanded"
expanded_extra_fonts+=("${files[@]}")
done
extra_fonts=("${expanded_extra_fonts[@]}")
mkdir -p "$HOME/.local/share/fonts/"
for f in "${extra_fonts[@]}"; do
if [[ -z "$f" ]]; then
continue
fi
info "Install font $f"
cp -r "$f" "$HOME/.local/share/fonts/"
done
fc-cache -fv
fi
if [[ -n "$extra_packages" ]]; then
warn "Input 'extra_packages' is deprecated. We now build LaTeX document with full TeXLive installed."
fi
if [[ -n "$pre_compile" ]]; then
info "Run pre compile commands"
eval "$pre_compile"
fi
for f in "${root_file[@]}"; do
if [[ -z "$f" ]]; then
continue
fi
info "Compile $f"
if [[ ! -f "$f" ]]; then
error "File '$f' cannot be found from the directory '$PWD'."
fi
"$compiler" "${args[@]}" "$f"
done
if [[ -n "$post_compile" ]]; then
info "Run post compile commands"
eval "$post_compile"
fi
|
#!/bin/sh
set -e
#
# See: http://boinc.berkeley.edu/trac/wiki/AndroidBuildClient#
#
# Script to compile OpenSSL for Android
COMPILEOPENSSL="${COMPILEOPENSSL:-yes}"
STDOUT_TARGET="${STDOUT_TARGET:-/dev/stdout}"
CONFIGURE="yes"
MAKECLEAN="yes"
OPENSSL="${OPENSSL_SRC:-$HOME/src/openssl-1.0.2p}" #openSSL sources, requiered by BOINC
export ANDROID_TC="${ANDROID_TC:-$HOME/android-tc}"
export ANDROIDTC="${ANDROID_TC_X86:-$ANDROID_TC/x86}"
export TCBINARIES="$ANDROIDTC/bin"
export TCINCLUDES="$ANDROIDTC/i686-linux-android"
export TCSYSROOT="$ANDROIDTC/sysroot"
export STDCPPTC="$TCINCLUDES/lib/libstdc++.a"
export PATH="$TCBINARIES:$TCINCLUDES/bin:$PATH"
export CC=i686-linux-android-clang
export CXX=i686-linux-android-clang++
export LD=i686-linux-android-ld
export CFLAGS="--sysroot=$TCSYSROOT -DANDROID -Wall -I$TCINCLUDES/include -O3 -fomit-frame-pointer -fPIE -D__ANDROID_API__=19"
export CXXFLAGS="--sysroot=$TCSYSROOT -DANDROID -Wall -funroll-loops -fexceptions -O3 -fomit-frame-pointer -fPIE -D__ANDROID_API__=19"
export LDFLAGS="-L$TCSYSROOT/usr/lib -L$TCINCLUDES/lib -llog -fPIE -pie -latomic -static-libstdc++"
export GDB_CFLAGS="--sysroot=$TCSYSROOT -Wall -g -I$TCINCLUDES/include"
# Prepare android toolchain and environment
./build_androidtc_x86.sh
if [ "$COMPILEOPENSSL" = "yes" ]; then
echo "===== building openssl for x86 from $OPENSSL ====="
cd "$OPENSSL"
if [ -n "$MAKECLEAN" ]; then
make clean 1>$STDOUT_TARGET 2>&1
fi
if [ -n "$CONFIGURE" ]; then
./Configure linux-generic32 no-shared no-dso -DL_ENDIAN --openssldir="$TCINCLUDES/ssl" 1>$STDOUT_TARGET
#override flags in Makefile
sed -e "s/^CFLAG=.*$/`grep -e \^CFLAG= Makefile` \$(CFLAGS)/g
s%^INSTALLTOP=.*%INSTALLTOP=$TCINCLUDES%g" Makefile > Makefile.out
mv Makefile.out Makefile
fi
make 1>$STDOUT_TARGET
make install_sw 1>$STDOUT_TARGET
echo "===== openssl for x86 build done ====="
fi
|
<gh_stars>0
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package ed.biodare2.backend.features.ppa;
import ed.biodare2.backend.repo.isa_dom.dataimport.DataColumnProperties;
import ed.biodare2.backend.repo.isa_dom.dataimport.DataTrace;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author tzielins
*/
public class FakeIdExtractorTest {
public FakeIdExtractorTest() {
}
protected List<DataTrace> fakeTraces() {
List<DataTrace> traces = new ArrayList<>();
DataTrace trace;
trace = new DataTrace();
trace.details = new DataColumnProperties("T1");
traces.add(trace);
trace = new DataTrace();
trace.details = new DataColumnProperties("WT");
traces.add(trace);
trace = new DataTrace();
trace.details = new DataColumnProperties("T1");
traces.add(trace);
trace = new DataTrace();
trace.details = new DataColumnProperties("T2");
traces.add(trace);
trace = new DataTrace();
trace.details = new DataColumnProperties("WT");
traces.add(trace);
String REFS = "ABCDEFG";
int nr = 1;
for (DataTrace t : traces) {
t.traceRef = REFS.substring(nr-1,nr);
t.traceNr = nr++;
t.dataId = t.traceNr;
t.rawDataId = t.traceNr+1;
}
return traces;
}
@Test
public void throwsExceptionOnUnitializedList() {
FakeIdExtractor instance = new FakeIdExtractor(Collections.emptyList());
DataTrace trace1;
trace1 = new DataTrace();
trace1.traceNr = 1;
trace1.dataId = trace1.traceNr;
trace1.traceRef = "A";
trace1.details = new DataColumnProperties("T1");
try {
long id1 = instance.getBioId(trace1);
fail("IllegalArgumentException expected");
} catch (IllegalArgumentException e) {};
}
@Test
public void givesDifferentIdsForDifferentTracesLabels() {
List<DataTrace> traces = fakeTraces();
DataTrace trace1,trace2;
trace1 = traces.get(0);
trace2 = traces.get(1);
assertNotEquals(trace1.details.dataLabel, trace2.details.dataLabel);
FakeIdExtractor instance = new FakeIdExtractor(traces);
long id1 = instance.getBioId(trace1);
long id2 = instance.getBioId(trace2);
assertNotEquals(id1, id2);
assertEquals(trace1.details.dataLabel, traces.get(2).details.dataLabel);
id2 = instance.getBioId(traces.get(2));
assertEquals(id1,id2);
id1 = instance.getCondId(trace1);
id2 = instance.getCondId(trace2);
assertNotEquals(id1, id2);
assertEquals(trace2.details.dataLabel, traces.get(4).details.dataLabel);
id1 = instance.getCondId(traces.get(4));
assertEquals(id1,id2);
}
@Test
public void instancesBasedOnSameInputGivesTheSameIds() {
List<DataTrace> traces = fakeTraces();
FakeIdExtractor instance1 = new FakeIdExtractor(traces);
FakeIdExtractor instance2 = new FakeIdExtractor(traces);
for (DataTrace trace : traces) {
assertEquals(instance1.getBioId(trace),instance2.getBioId(trace));
assertEquals(instance1.getCondId(trace),instance2.getCondId(trace));
}
}
@Test
public void idsMapsBackToLabels() {
List<DataTrace> traces = fakeTraces();
FakeIdExtractor instance1 = new FakeIdExtractor(traces);
FakeIdExtractor instance2 = new FakeIdExtractor(traces);
for (DataTrace trace : traces) {
assertEquals(trace.details.dataLabel,instance2.getBioLabel(instance1.getBioId(trace)));
assertEquals(trace.details.dataLabel,instance2.getCondLabel(instance1.getCondId(trace)));
}
}
@Test
public void dataRefBasedOnTraceNrAndItsRefByDataId() {
List<DataTrace> traces = fakeTraces();
FakeIdExtractor instance1 = new FakeIdExtractor(traces);
String exp = "2. [B]";
String res = instance1.getDataRef(traces.get(1).dataId);
assertEquals(exp,res);
}
}
|
/*
Copyright 2016 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package generators
import (
"testing"
)
func TestRemoveLastDir(t *testing.T) {
table := map[string]struct{ newPath, removedDir string }{
"a/b/c": {"a/c", "b"},
}
for input, expect := range table {
gotPath, gotRemoved := removeLastDir(input)
if e, a := expect.newPath, gotPath; e != a {
t.Errorf("%v: wanted %v, got %v", input, e, a)
}
if e, a := expect.removedDir, gotRemoved; e != a {
t.Errorf("%v: wanted %v, got %v", input, e, a)
}
}
}
|
#!/bin/bash
sh ${AMSProjJobs}/CERN/submit_condor.sh RUN jobconf.cern.iss.B1130.pass7
sh ${AMSProjJobs}/CERN/submit_condor.sh RUN jobconf.cern.mc.ap.pl1.l1.021000.B1220
sh ${AMSProjJobs}/CERN/submit_condor.sh RUN jobconf.cern.mc.d.pl1.l1.021000.B1128
sh ${AMSProjJobs}/CERN/submit_condor.sh RUN jobconf.cern.mc.pr.0550.B1220
sh ${AMSProjJobs}/CERN/submit_condor.sh RUN jobconf.cern.mc.pr.flux.1050.B1220
sh ${AMSProjJobs}/CERN/submit_condor.sh RUN jobconf.cern.mc.pr.flux.504000.B1220
sh ${AMSProjJobs}/CERN/submit_condor.sh RUN jobconf.cern.mc.pr.pl1.l1.021000.B1220
sh ${AMSProjJobs}/CERN/submit_condor.sh RUN jobconf.cern.mc.pr.l1o9.flux.2016000.B1220
sh ${AMSProjJobs}/CERN/submit_condor.sh RUN jobconf.cern.mc.pr.l19.10016000.B1220
|
#include <vector>
int calculateTerrainArea(std::vector<std::vector<int>>& terrain) {
int totalArea = 0;
for (size_t i = 0; i < terrain.size(); ++i) {
for (size_t j = 0; j < terrain[i].size(); ++j) {
totalArea += terrain[i][j] * 1; // Assuming the area of a single cell is 1
}
}
return totalArea;
} |
#!/bin/sh
rm -rf build dist *.spec *.zip
wine C:/Python36-32/Scripts/pyinstaller.exe --onefile ../app/openocd_svd.py
zip -j openocd_svd_v$1_win32.zip dist/openocd_svd.exe |
#!/bin/sh
DIR="$( cd "$( dirname "$0" )" && pwd )"
cd $DIR
cd ../
env=$1
echo "Running Class Central weekly cron for $env environment"
# Generate follow counts
echo "Generate follow counts"
php app/console classcentral:follows:calculatecount --env=$env |
#!/bin/bash
# Script to deploy a very simple web application.
# The web app has a customizable image and some text.
cat << EOM > /var/www/html/index.html
<html>
<head><title>Meow!</title></head>
<body>
<div style="width:800px;margin: 0 auto">
<!-- BEGIN -->
<center><img src="http://${PLACEHOLDER}/${WIDTH}/${HEIGHT}"></img></center>
<center><h2>Meow World!</h2></center>
Welcome to ${PREFIX}'s app!!!!!
<!-- END -->
</div>
</body>
</html>
EOM
echo "Script complete."
|
if (typeof(Ecwid) == 'object') {
Ecwid.OnAPILoaded.add(function(page){
jQuery('html').attr('id', 'ecwid_html')
});
} |
<filename>src/main/java/it/qbteam/persistence/repository/OrganizationAccessRepository.java
package it.qbteam.persistence.repository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import it.qbteam.model.OrganizationAccess;
@Repository
public interface OrganizationAccessRepository extends CrudRepository<OrganizationAccess, Long> {
@Query("from OrganizationAccess where orgAuthServerId=:serverId and organizationId=:orgId")
Iterable<OrganizationAccess> findByOrgAuthServerIdAndOrganizationId(@Param("serverId") String orgAuthServerId, @Param("orgId") Long organizationId);
@Query("from OrganizationAccess where exitToken=:token and organizationId=:orgId")
Iterable<OrganizationAccess> findByExitTokenAndOrganizationId(@Param("token") String exitToken, @Param("orgId") Long organizationId);
@Query("from OrganizationAccess where organizationId=:orgId")
Iterable<OrganizationAccess> findByOrganizationId(@Param("orgId") Long organizationId);
}
|
from django.db import models
##################################################################
#MUS
class Artist(models.Model):
name = models.CharField(max_length=255, null=True, unique=True)
def __str__(self):
return self.name
class Song(models.Model):
name = models.CharField(max_length=255, null=True)
artist = models.ForeignKey(Artist, on_delete=models.CASCADE)
image = models.ImageField()
data = models.JSONField() # <- NOT SURE ABOUT THIS SHIT!
song_file = models.FileField(upload_to='songs/') # LOOKUP DJANGO DOCS
midi_file = models.FileField(upload_to='midi/')
def __str__(self):
return "%s %s" % (self.name, self.artist) |
#!/bin/bash
keytool -genkey -keypass SPARtest -storepass SPARtest -dname "cn=TA3 Broker, ou=SPAR, o=MIT Lincoln Laboratory, l=Lexington, st=MA, c=US" -alias ta3_broker -keyalg RSA -keystore stores/ta3_broker.ks
keytool -genkey -keypass SPARtest -storepass SPARtest -dname "cn=TA3 Server, ou=SPAR, o=MIT Lincoln Laboratory, l=Lexington, st=MA, c=US" -alias ta3_server -keyalg RSA -keystore stores/ta3_server.ks
keytool -export -storepass SPARtest -alias ta3_broker -keystore stores/ta3_broker.ks -file certs/ta3_broker_cert
keytool -export -storepass SPARtest -alias ta3_server -keystore stores/ta3_server.ks -file certs/ta3_server_cert
keytool -import -storepass SPARtest -noprompt -alias ta3_server -keystore stores/ta3_broker.ts -file certs/ta3_server_cert
keytool -import -storepass SPARtest -noprompt -alias ta3_broker -keystore stores/ta3_server.ts -file certs/ta3_broker_cert
for ((i=0; i < $1; i++))
do
keytool -genkey -keypass SPARtest -storepass SPARtest -dname "cn=TA3 Broker, ou=SPAR, o=MIT Lincoln Laboratory, l=Lexington, st=MA, c=US" -alias ta3_client$i -keyalg RSA -keystore stores/ta3_client$i.ks
keytool -export -storepass SPARtest -alias ta3_client$i -keystore stores/ta3_client$i.ks -file certs/ta3_client${i}_cert
keytool -import -storepass SPARtest -noprompt -alias ta3_client$i -keystore stores/ta3_broker.ts -file certs/ta3_client${i}_cert
keytool -import -storepass SPARtest -noprompt -alias ta3_broker -keystore stores/ta3_client$i.ts -file certs/ta3_broker_cert
done
|
#!/bin/bash
AUTHOR="osrn"
APPNAME="lazy-delegate"
APPHOME="$HOME/$APPNAME"
VENV="$APPHOME/.venv"
GITREPO="https://github.com/$AUTHOR/$APPNAME.git"
GITBRANCH="main"
# Regular Colors
CBlack='\033[0;30m' # Black
CRed='\033[0;31m' # Red
CGreen='\033[0;32m' # Green
CYellow='\033[0;33m' # Yellow
CBlue='\033[0;34m' # Blue
CPurple='\033[0;35m' # Purple
CCyan='\033[0;36m' # Cyan
CWhite='\033[0;37m' # White
NC='\033[0m' # Text Reset
clear
SUDO_USER=$1
if [ -z "$SUDO_USER" ]
then
echo -e "${CRed}Error: this script must be called with a sudo user as argument${NC}"
echo usage: $0 user
exit 1
fi
if ! id -u $SUDO_USER &>/dev/null
then
echo -e "${CRed}Error: user $SUDO_USER does not exist${NC}"
exit 1
fi
if [ -z "$(id -Gn $SUDO_USER | grep sudo)" ]
then
echo -e "${CRed}Error: $SUDO_USER must have sudo privilage${NC}"
exit 1
fi
echo
echo installing system dependencies
echo ==============================
echo -e "${CRed}You will be asked for the SUDOER's password twice; first time for su, and second time for sudo in su environment${NC}"
echo Please enter the password for $SUDO_USER
su - $SUDO_USER -c "echo Please enter the password for $SUDO_USER again
sudo -S echo 'installing...'
sudo apt-get -y install python3 python3-pip python3-dev python3-venv
echo '...done'
"
exit_code=$?
if [ "$exit_code" -ne 0 ]; then
echo -e "${CRed}Error: incorrect password or user $SUDO_USER has no password${NC}"
exit 1
fi
echo installing latest pip and venv for user
echo =======================================
python3 -m pip install --user --upgrade pip
python3 -m pip install --user virtualenv
echo '...done'
echo
echo downloading package from git repo
echo =================================
cd ~
if [ -d $APPHOME ]; then
read -p "$(echo -e "${CRed}existing installation found, shall I wipe it? [y/N]>${NC}") " r
case $r in
y|Y)
echo 'stopping jobs...'
pm2 stop $APPNAME
echo 'unregistering jobs with pm2...'
pm2 delete $APPNAME
echo 'removing package...'
rm -rf $APPHOME
;;
*) echo -e "did not wipe existing installation";;
esac
fi
if (git clone -b $GITBRANCH $GITREPO) then
echo "package retrieved from GIT"
cd $APPHOME
else
echo "local repo found! resetting to remote..."
cd $APPHOME
git reset --hard
git fetch --all
git checkout $GITBRANCH
git pull
fi
echo '...done'
echo
echo creating virtual environment
echo ============================
if [ -d $VENV ]; then
read -p "remove existing virtual environment ? [y/N]> " r
case $r in
y|Y)
rm -rf $VENV
python3 -m venv .venv
;;
*) echo -e "existing virtual environment preserved";;
esac
else
python3 -m venv .venv
fi
echo '...done'
echo
echo installing python dependencies
echo ==============================
. $VENV/bin/activate
if [ -n "$CPATH" ]; then
# Workaround for Solar vers > 3.2.0-next.0 setting CPATH
# causing psycopg2 compilation error for missing header files
OLDCPATH=$CPATH
export CPATH="/usr/include"
fi
cd $APPHOME
# wheel and psycopg2 needs to be installed seperately
pip3 install wheel
pip3 install -r requirements.txt
deactivate
echo '...done'
if [ -n "$SAVEDCPATH" ]; then
export CPATH=$OLDCPATH
fi
echo -e ${CGreen}
echo '====================='
echo 'installation complete'
echo '====================='
echo -e ${NC}
echo '>>> next steps:'
echo '==============='
echo 'This script requires pm2, which is possibly already installed;'
echo 'but otherwise you can install it with:'
echo -e ${CBlue}' npm install pm2@latest [-g]'
echo -e ${NC}' or'${CBlue}
echo ' yarn [global] add pm2'
echo -e ${NC}
echo 'First, clone the sample config provided and modify as you see fit'
echo -e ${CBlue}' cd '$APPHOME
echo ' cp src/config/config.sample src/config/config'
echo ' (edit) src/config/config'
echo -e ${NC}'All config parameters are explained in README.md'
echo
echo 'Next do;'
echo -e ${CBlue}' cd '$APPHOME
echo ' pm2 start package.json && pm2 logs '$APPNAME
echo -e ${NC}
echo 'to start the app at boot with pm2;'
echo -e ${CBlue}' cd && pm2 save'
echo -e ${NC}
echo 'to start pm2 at boot, you have two options:'
echo 'opt 1/ user with sudo privilege'
echo -e ${CBlue}' pm2 startup'
echo -e ${NC}'and follow the instructions'
echo
echo 'opt 2/ user like solar with no sudo privilege - NOTE: this is already done by solar core > 3.2.0 install'
echo -e ${CBlue}' @reboot /bin/bash -lc "source /home/solar/.solar/.env; /home/solar/.solar/.pnpm/bin/pm2 resurrect"'
echo -e ${NC}
|
<!DOCTYPE html>
<html>
<head>
<title>Submission form</title>
</head>
<body>
<form action="/submit_text" method="POST">
<label>Input Text: </label>
<input type="text" name="text" required/>
<input type="submit" value="Submit"/>
</form>
</body>
</html> |
One possible solution would be to use an authentication system such as OAuth. OAuth is an open standard for authorization that provides a secure way for users to access an application without having to share their login credentials. It also helps protect against attacks, such as cross-site request forgery and session hijacking, by providing a secure token-based authentication system. |
fn encrypt_string(string: &str) -> String {
let mut encrypted = String::new();
for chr in string.chars() {
//shifting each character by 5 to encrypt
let shifted_chr = (chr as u8 + 5) as char;
//adding the encrypted character to the encrypted string
encrypted.push(shifted_chr);
}
return encrypted;
}
fn main() {
let input_string = "Hello World";
//calling the encrypt string function
let encrypted_string = encrypt_string(&input_string);
println!("Encrypted String is {:?}", encrypted_string);
} |
#!/bin/bash
# Copyright 2017 The Openstack-Helm Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
set -xe
export OS_CLOUD=openstack_helm
: ${OSH_EXT_NET_NAME:="public"}
: ${OSH_EXT_NET_VLAN:="27"}
: ${OSH_EXT_SUBNET_NAME:="public-subnet"}
: ${OSH_EXT_SUBNET:="10.23.27.0/24"}
: ${OSH_EXT_GATEWAY:="10.23.27.1"}
: ${OSH_EXT_SUBNET_POOL_START:="10.23.27.11"}
: ${OSH_EXT_SUBNET_POOL_END:="10.23.27.99"}
tools/openstack stack create --wait \
--parameter network_name=${OSH_EXT_NET_NAME} \
--parameter physical_network_name=${OSH_EXT_NET_NAME} \
--parameter physical_network_vlan=${OSH_EXT_NET_VLAN} \
--parameter subnet_name=${OSH_EXT_SUBNET_NAME} \
--parameter subnet_cidr=${OSH_EXT_SUBNET} \
--parameter subnet_gateway=${OSH_EXT_GATEWAY} \
--parameter subnet_pool_start=${OSH_EXT_SUBNET_POOL_START} \
--parameter subnet_pool_end=${OSH_EXT_SUBNET_POOL_END} \
-t /target/tools/files/heat-public-net-deployment.yaml \
heat-public-net-deployment
: ${OSH_VM_KEY_STACK:="heat-vm-key"}
: ${OSH_PRIVATE_SUBNET:="10.0.0.0/24"}
# NOTE(portdirect): We do this fancy, and seemingly pointless, footwork to get
# the full image name for the cirros Image without having to be explicit.
IMAGE_NAME=$(tools/openstack image show -f value -c name \
$(tools/openstack image list -f csv | awk -F ',' '{ print $2 "," $1 }' | \
grep "^\"Cirros" | head -1 | awk -F ',' '{ print $2 }' | tr -d '"'))
rm -rf ${OSH_VM_KEY_STACK}*
ssh-keygen -t rsa -N '' -f $OSH_VM_KEY_STACK
chmod 600 $OSH_VM_KEY_STACK
# Setup SSH Keypair in Nova
tools/openstack keypair create --public-key \
/target/"${OSH_VM_KEY_STACK}.pub" \
${OSH_VM_KEY_STACK}
: ${OSH_EXT_DNS:="8.8.8.8"}
tools/openstack stack create --wait \
--parameter public_net=${OSH_EXT_NET_NAME} \
--parameter image="${IMAGE_NAME}" \
--parameter ssh_key=${OSH_VM_KEY_STACK} \
--parameter cidr=${OSH_PRIVATE_SUBNET} \
--parameter dns_nameserver=${OSH_EXT_DNS} \
-t /target/tools/files/heat-basic-vm-deployment.yaml \
heat-basic-vm-deployment
FLOATING_IP=$(tools/openstack stack output show \
heat-basic-vm-deployment \
floating_ip \
-f value -c output_value)
function wait_for_ssh_port {
# Default wait timeout is 300 seconds
set +x
end=$(date +%s)
if ! [ -z $2 ]; then
end=$((end + $2))
else
end=$((end + 300))
fi
while true; do
# Use Nmap as its the same on Ubuntu and RHEL family distros
nmap -Pn -p22 $1 | awk '$1 ~ /22/ {print $2}' | grep -q 'open' && \
break || true
sleep 1
now=$(date +%s)
[ $now -gt $end ] && echo "Could not connect to $1 port 22 in time" && exit -1
done
set -x
}
wait_for_ssh_port $FLOATING_IP
# SSH into the VM and check it can reach the outside world
touch ~/.ssh/known_hosts
ssh-keygen -R "$FLOATING_IP"
ssh-keyscan "$FLOATING_IP" >> ~/.ssh/known_hosts
ssh -i ${OSH_VM_KEY_STACK} cirros@${FLOATING_IP} ping -q -c 1 -W 2 ${OSH_EXT_GATEWAY}
# Check the VM can reach the metadata server
ssh -i ${OSH_VM_KEY_STACK} cirros@${FLOATING_IP} curl --verbose --connect-timeout 5 169.254.169.254
# Check to see if cinder has been deployed, if it has then perform a volume attach.
if tools/openstack service list -f value -c Type | grep -q "^volume"; then
INSTANCE_ID=$(tools/openstack stack output show \
heat-basic-vm-deployment \
instance_uuid \
-f value -c output_value)
# Get the devices that are present on the instance
DEVS_PRE_ATTACH=$(mktemp)
ssh -i ${OSH_VM_KEY_STACK} cirros@${FLOATING_IP} lsblk > ${DEVS_PRE_ATTACH}
# Create and attach a block device to the instance
tools/openstack stack create --wait \
--parameter instance_uuid=${INSTANCE_ID} \
-t /target/tools/files/heat-vm-volume-attach.yaml \
heat-vm-volume-attach
# Get the devices that are present on the instance
DEVS_POST_ATTACH=$(mktemp)
ssh -i ${OSH_VM_KEY_STACK} cirros@${FLOATING_IP} lsblk > ${DEVS_POST_ATTACH}
# Check that we have the expected number of extra devices on the instance post attach
if ! [ "$(comm -13 ${DEVS_PRE_ATTACH} ${DEVS_POST_ATTACH} | wc -l)" -eq "1" ]; then
echo "Volume not successfully attached"
exit 1
fi
fi
|
#!/usr/bin/env -S bash -x
# Configure the environment.
source set_environment.sh
# Change to monorepo directory
pushd $monorepodir
# Iterate over the polyrepo. Add a remote for each repo in the polyrepo. Fetch
# all contents from each repo. Then merge the contents into the HEAD of the
# monorepo. Finally, add a tag to denote the commits where the old repos have
# been merged into the monorepo.
for r in ${repositories[@]}; do
$dry git remote add $r $polyrepo/$r
$dry git fetch $r
$dry git merge --allow-unrelated-histories -m "Merging monorepo/$r/Monobase into monorepo" monorepo/$r/Monobase > /dev/null
$dry git tag -m "Merged $r into monorepo" monorepo/$r/merged
done
|
<gh_stars>100-1000
/*
* Copyright 2018 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.process.workitem.core.util;
import java.io.IOException;
import java.io.OutputStream;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.ProcessingEnvironment;
import javax.annotation.processing.RoundEnvironment;
import javax.annotation.processing.SupportedAnnotationTypes;
import javax.lang.model.element.Element;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.TypeMirror;
import javax.tools.Diagnostic;
import javax.tools.FileObject;
import javax.tools.StandardLocation;
import org.stringtemplate.v4.ST;
@SupportedAnnotationTypes("org.jbpm.process.workitem.core.util.Wid")
public class WidProcessor extends AbstractProcessor {
public WidProcessor() {
super();
}
private Map<String, List<Wid>> processingResults;
private boolean resetResults = true;
public boolean process(final Set<? extends TypeElement> annotations,
final RoundEnvironment roundEnv) {
if (resetResults) {
processingResults = new HashMap<>();
}
for (Element element : roundEnv.getElementsAnnotatedWith(Wid.class)) {
if (element instanceof TypeElement) {
TypeElement typeElement = (TypeElement) element;
processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE,
MessageFormat.format("Wid Processor : processing class {0}.",
typeElement.asType().toString()));
processingResults.put(typeElement.asType().toString(),
new ArrayList<>());
if (typeElement.getInterfaces() != null && typeElement.getInterfaces().size() > 0) {
for (TypeMirror mirror : typeElement.getInterfaces()) {
if (mirror.getAnnotation(Wid.class) != null) {
processingResults.get(typeElement.asType().toString()).add(mirror.getAnnotation(Wid.class));
}
}
}
processingResults.get(typeElement.asType().toString()).add(typeElement.getAnnotation(Wid.class));
}
}
return postProcessWorkItemDefinition();
}
public boolean postProcessWorkItemDefinition() {
if (processingResults == null || processingResults.size() < 1) {
return false;
}
try {
Map<String, WidInfo> wrappedResults = new HashMap<>();
for (String key : processingResults.keySet()) {
wrappedResults.put(key,
new WidInfo(processingResults.get(key)));
}
String widName = "WorkDefinitions";
if (processingEnv.getOptions().containsKey("widName")) {
widName = processingEnv.getOptions().get("widName");
} else {
processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE,
"Unable to find option \"widName\", using default (WorkDefinitions)");
}
boolean generateTemplates = false;
if (processingEnv.getOptions().containsKey("generateTemplates")) {
generateTemplates = Boolean.parseBoolean(processingEnv.getOptions().get("generateTemplates"));
} else {
processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE,
"Unable to find option \"generateTemplates\", using default (false).");
}
if (generateTemplates) {
if (processingEnv.getOptions().containsKey("templateResources")) {
List<String> templateResourceList = Arrays.asList(processingEnv.getOptions().get("templateResources").split(","));
for (String templateResource : templateResourceList) {
String templateInfo[] = templateResource.split(":");
writeStream(getFileObject("",
templateInfo[0]),
getTemplateData(templateInfo[1],
wrappedResults));
}
} else {
processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE,
"Unable to find option \"templateResources\", using default (none).");
}
} else {
processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE,
"Not generating templates.");
}
boolean generateWids = false;
if (processingEnv.getOptions().containsKey("generateWids")) {
generateWids = Boolean.parseBoolean(processingEnv.getOptions().get("generateWids"));
} else {
processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE,
"Unable to find option \"generateWids\", using default (false).");
}
if (generateWids) {
if (processingEnv.getOptions().containsKey("widsResources")) {
String widInfo[] = processingEnv.getOptions().get("widsResources").split(":");
generateWids(widInfo, wrappedResults);
} else {
processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE,
"Unable to find option \"widsResources\", using default (none).");
}
} else {
processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE,
"Not generating wids.");
}
} catch (Exception e) {
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR,
MessageFormat.format("Error post-processing workitem annotations: {0}.",
e.getMessage()));
}
return true;
}
public void generateWids(String[] widInfo, Map<String, WidInfo> wrappedResults) throws IOException {
// generate "all" wid
writeStream(getFileObject("",
widInfo[0]),
getTemplateData(widInfo[1],
wrappedResults));
// generate handler wids
Set<String> resultKeys = wrappedResults.keySet();
for(String key : resultKeys) {
Map<String, WidInfo> keyMap = new HashMap<>();
keyMap.put(key, wrappedResults.get(key));
writeStream(getFileObject("",
wrappedResults.get(key).getName() + ".wid"),
getTemplateData(widInfo[1],
keyMap));
}
}
public byte[] getTemplateData(String templateResource,
Map<String, WidInfo> widInfoMap) throws IOException {
ST stTemplate = new ST(getTemplateResourceFileAsString(templateResource),
'$',
'$');
stTemplate.add("widInfo",
widInfoMap);
stTemplate.add("openbracket",
"{");
stTemplate.add("closebracket",
"}");
return stTemplate.render().getBytes();
}
public void writeStream(FileObject fileObject,
byte[] data) throws IOException {
OutputStream stream = fileObject.openOutputStream();
stream.write(data);
stream.close();
}
public FileObject getFileObject(String pkg,
String name) throws IOException {
return processingEnv.getFiler().createResource(StandardLocation.SOURCE_OUTPUT,
pkg,
name);
}
public void setResetResults(boolean resetResults) {
this.resetResults = resetResults;
}
// for testing
public Map<String, List<Wid>> getProcessingResults() {
return processingResults;
}
// for testing
public void setProcessingResults(Map<String, List<Wid>> processingResults) {
this.processingResults = processingResults;
}
public String getTemplateResourceFileAsString(String resourceFileName) throws IOException {
FileObject fileObject = processingEnv.getFiler().getResource(
StandardLocation.CLASS_OUTPUT,
"",
resourceFileName);
return fileObject.getCharContent(true).toString();
}
// for testing
public ProcessingEnvironment getProcessingEnvironment() {
return this.processingEnv;
}
} |
class Employee:
def __init__(self, name):
self.name = name
self.items = []
def purchase_item(self, store, item):
self.items.append(item)
store.remove_item(item)
class Store:
def __init__(self, items):
self.items = items
def remove_item(self, item):
self.items.remove(item) |
// Assuming the existence of appropriate database connection and class definitions
// Retrieve the main collection name and required indexes
$mainCollectionName = ProjectModelMongoMapper::instance()->getCollectionName();
$mainIndexes = ProjectModelMongoMapper::instance()->INDEXES_REQUIRED;
// Calculate the indexes that need to be created in the main collection
$mainIndexesToCreate = MongoStore::getIndexesNotSetInCollection(SF_DATABASE, $mainCollectionName, $mainIndexes);
// Create the missing indexes in the main collection
foreach ($mainIndexesToCreate as $index) {
$result = MongoStore::createIndex(SF_DATABASE, $mainCollectionName, $index);
if ($result) {
$numberOfIndexesCreated++;
}
}
// Generate the message indicating the task completion
$message = "Ensure DB Indexes\n";
$message .= "\n------------- Main Database:\n";
$message .= "Number of indexes created: " . $numberOfIndexesCreated;
// Assuming the usage of $message for further logging or reporting |
#!/bin/bash
#
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
set -ex
exists() {
[ -e "$1" ]
}
# only set RUSTC_WRAPPER if sccache exists
if sccache --help; then
export RUSTC_WRAPPER=$(which sccache)
fi
# only set CARGO_INCREMENTAL on non-release builds
#
# This speeds up build time, but makes the resulting binaries slightly slower.
# https://doc.rust-lang.org/cargo/reference/profiles.html?highlight=incremental#incremental
if [ "${GITHUB_REF}" != "" ]; then
TAG_VERSION=${GITHUB_REF#refs/tags/}
if [ ${TAG_VERSION} == ${GITHUB_REF} ]; then
export CARGO_INCREMENTAL=1
fi
fi
mkdir -p artifacts/agent-$(uname)
cd src/agent
# unless we're doing incremental builds, start clean during CI
if [ X${CARGO_INCREMENTAL} == X ]; then
cargo clean
fi
cargo fmt -- --check
# RUSTSEC-2020-0016: a dependency net2 (pulled in from tokio) is deprecated
# RUSTSEC-2020-0036: a dependency failure (pulled from proc-maps) is deprecated
# RUSTSEC-2019-0036: a dependency failure (pulled from proc-maps) has type confusion vulnerability
# RUSTSEC-2021-0065: a dependency anymap is no longer maintained
cargo audit --deny warnings --deny unmaintained --deny unsound --deny yanked --ignore RUSTSEC-2020-0016 --ignore RUSTSEC-2020-0036 --ignore RUSTSEC-2019-0036 --ignore RUSTSEC-2021-0065
cargo-license -j > data/licenses.json
cargo build --release --locked
cargo clippy --release -- -D warnings
# export RUST_LOG=trace
export RUST_BACKTRACE=full
cargo test --release --workspace
# TODO: re-enable integration tests.
# cargo test --release --manifest-path ./onefuzz-agent/Cargo.toml --features integration_test -- --nocapture
# TODO: once Salvo is integrated, this can get deleted
cargo build --release --manifest-path ./onefuzz-telemetry/Cargo.toml --all-features
cp target/release/onefuzz-agent* ../../artifacts/agent-$(uname)
cp target/release/onefuzz-supervisor* ../../artifacts/agent-$(uname)
if exists target/release/*.pdb; then
for file in target/release/*.pdb; do
cp ${file} ../../artifacts/agent-$(uname)
done
fi
|
import nltk
from nltk.sentiment import SentimentIntensityAnalyzer
def get_sentiment_score(text):
sia = SentimentIntensityAnalyzer()
sentiment_score = sia.polarity_scores(text)['compound']
return sentiment_score
# Example usage
text1 = "The plot was good, but the characters are uncompelling and the dialog is not great."
text2 = "Today SUX!"
text3 = "Today only kinda sux! But I'll get by, lol"
text4 = "Make sure you :) or :D today!"
text5 = "Catch utf-8 emoji such as 💘 and 💋 and 😁"
text6 = "Not bad at all"
print(get_sentiment_score(text1)) # Output: -0.2263
print(get_sentiment_score(text2)) # Output: -0.5461
print(get_sentiment_score(text3)) # Output: 0.5242
print(get_sentiment_score(text4)) # Output: 0.8633
print(get_sentiment_score(text5)) # Output: 0.875
print(get_sentiment_score(text6)) # Output: 0.431 |
package kata.java;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import java.util.Optional;
import java.util.stream.IntStream;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
public class LinkedArrayDequeTest {
private LinkedArrayDeque deque;
@Before
public void setUp() throws Exception {
deque = new LinkedArrayDeque();
}
@Test
public void createEmptyDeque() throws Exception {
assertThat(deque.removeFront(), is(Optional.empty()));
}
@Test
public void addFrontOneItem() throws Exception {
deque.addFront(10);
assertThat(deque.removeFront(), is(Optional.of(10)));
assertThat(deque.removeFront(), is(Optional.empty()));
}
@Test
public void addFrontManyItems() throws Exception {
deque.addFront(10);
deque.addFront(20);
deque.addFront(30);
assertThat(deque.removeFront(), is(Optional.of(30)));
assertThat(deque.removeFront(), is(Optional.of(20)));
assertThat(deque.removeFront(), is(Optional.of(10)));
assertThat(deque.removeFront(), is(Optional.empty()));
}
@Test
public void addFrontItemsMoreThanSegmentCapacity() throws Exception {
IntStream.range(0, 20).forEach(deque::addFront);
IntStream.iterate(19, i -> i - 1).limit(20).forEach(i -> assertThat(deque.removeFront(), is(Optional.of(i))));
assertThat(deque.removeFront(), is(Optional.empty()));
}
@Test
public void addFrontItemsMoreThanOneSegment() throws Exception {
IntStream.range(0, 80).forEach(deque::addFront);
IntStream.iterate(79, i -> --i).limit(80).forEach(i -> assertThat(deque.removeFront(), is(Optional.of(i))));
assertThat(deque.removeFront(), is(Optional.empty()));
}
@Test
public void removeManyBack() throws Exception {
deque.addFront(10);
deque.addFront(20);
deque.addFront(30);
assertThat(deque.removeBack(), is(Optional.of(10)));
assertThat(deque.removeBack(), is(Optional.of(20)));
assertThat(deque.removeBack(), is(Optional.of(30)));
assertThat(deque.removeBack(), is(Optional.empty()));
}
@Test
public void removeBackMoreThanSegment() throws Exception {
IntStream.range(0, 20).forEach(deque::addFront);
IntStream.range(0, 20).forEach(i -> assertThat(deque.removeBack(), is(Optional.of(i))));
assertThat(deque.removeBack(), is(Optional.empty()));
}
@Test
public void removeBackMoreThanOneSegment() throws Exception {
IntStream.range(0, 80).forEach(deque::addFront);
IntStream.range(0, 80).forEach(i -> assertThat(deque.removeBack(), is(Optional.of(i))));
assertThat(deque.removeBack(), is(Optional.empty()));
}
@Test@Ignore
public void addBackMany() throws Exception {
deque.addBack(10);
deque.addBack(20);
deque.addBack(30);
assertThat(deque.removeBack(), is(Optional.of(30)));
assertThat(deque.removeBack(), is(Optional.of(20)));
assertThat(deque.removeBack(), is(Optional.of(10)));
assertThat(deque.removeBack(), is(Optional.empty()));
}
}
|
#!/bin/bash
curl -sL https://run.linkerd.io/install | sh
export PATH=$PATH:$HOME/.linkerd2/bin
linkerd check --pre && linkerd install | kubectl apply -f -
linkerd check || exit 1
|
package com.eliteams.quick4j.demo.dao;
import com.eliteams.quick4j.core.generic.GenericDao;
import com.eliteams.quick4j.demo.model.DemoModel;
/**
* Created by ghu on 1/23/2017.
*/
public interface DemoDao extends GenericDao<DemoModel,Long> {
}
|
#!/usr/bin/env bash
correct_file_name ()
{
new_name=$(echo "$1" | sed -e 's/ /_/g' | tr '[:upper:]' '[:lower:]')
if [ "$1" != "$new_name" ]; then
mv -T "$1" "$new_name"
fi
}
find_files ()
{
find "$1" -maxdepth 1 \( ! -regex '.*/\..*' \) | while read -r file
do
echo "$file"
correct_file_name "$file"
if [ "$1" != "$file" ] && [ -d "$file" ]; then
find_files "$file"
fi
done
}
main() {
if [ $# -eq 0 ]; then
echo "Must provide a path!"
exit 1
fi
if [ "$1" == '.' ] || [ -d "${1}" ]; then
find_files "$1"
elif [ -f "${1}" ]; then
correct_file_name "$1"
else
echo "$1 is not a valid path!"
fi
}
main "$@"
|
<reponame>Ziezi/Programming-Principles-and-Practice-Using-C-by-Bjarne-Stroustrup-
/*
TITLE Singly Linked List Chapter20Exercise14.cpp
"<NAME> "C++ Programming: Principles and Practice.""
COMMENT
Objective: Define a singly-linked list, `slist`,
in the style of `std::list`.
Which operations from `List` should be kept?
All operations remian.
Iterator is forward only.
Input: -
Output: -
Author: <NAME>
Date: 22. 02. 2017
*/
#include <iostream>
#include "Chapter20Exercise14.h"
int main()
{
try
{
SList<int> l;
SList<int>* l_ptr = nullptr;
std::cout <<"list size: "<< sizeof(l) <<'\n';
std::cout <<"pointer size: "<< sizeof(l_ptr) <<'\n';
std::cout <<"\npush_front(): \n";
l.push_front(1);
l.print();
std::cout <<'\n';
l.push_front(3);
l.print();
std::cout <<'\n';
l.push_front(2);
l.print();
std::cout <<"\npush_back(): \n";
l.push_back(19);
l.print();
std::cout <<'\n';
l.push_back(59);
l.print();
std::cout <<'\n';
l.push_back(12);
l.print();
std::cout <<"\ninsert() at begin(): \n";
SList<int>::iterator p = l.begin();
p = l.insert(p, 5);
std::cout <<"size: " << l.size() <<'\n';
l.print();
std::cout <<"\nerase() at begin(): \n";
SList<int>::iterator b = l.begin();
b = l.erase(b);
l.print();
std::cout <<'\n';
std::cout <<"front(): \n";
std::cout << l.front();
std::cout <<'\n';
std::cout <<"back(): \n";
std::cout << l.back();
std::cout <<'\n';
l.print();
std::cout <<'\n';
std::cout <<"\npop_back(): \n";
l.pop_back();
l.print();
std::cout <<'\n';
std::cout <<"\npop_front(): \n";
l.pop_front();
l.print();
std::cout <<'\n';
l.push_front(100);
l.push_back(191);
l.print();
std::cout <<'\n';
SList<int>::iterator p2 = high(l.begin(), l.end());
if (p2 != l.end())
{
std::cout <<"High = " << *p2 <<'\n';
}
}
catch (std::exception& e)
{
std::cerr << e.what();
}
getchar();
}
|
import React from 'react';
import { observer } from 'mobx-react';
import Chance from 'chance';
import Application from '../frontEndComponents/Application';
const chance = new Chance();
const createFakeAgents = num => {
const agents = [];
for (let i = 0; i < num; i++) {
agents.push(chance.name());
}
return agents;
};
@observer
class ApplicationContainer extends React.Component {
state = {
listingAgents: createFakeAgents(30),
formValues: null,
isFullApplication: true,
paymentFormSubmitInProgress: false,
paymentModalVisible: false,
cardElementComplete: false,
};
paymentFormApi = null;
onSubmitApplication = formValues => {
this.setState({
formValues,
paymentModalVisible: true,
});
};
onSubmitPaymentForm = e => {
if (e.preventDefault) e.preventDefault();
this.paymentFormApi.validateFields(async (err, values) => {
if (!this.state.cardElementComplete) return;
if (this.state.paymentFormSubmitInProgress) return;
if (!err) {
this.setState({ paymentFormSubmitInProgress: true });
console.log(values);
const {
applicantCountry,
cardOwnerName,
paymentAddress,
paymentAddressCity,
paymentAddressState,
} = values;
console.log(this.state.formValues);
const stripeToken = await this.props.stripe.createToken({
name: cardOwnerName,
address_line1: paymentAddress,
address_city: paymentAddressCity,
address_state: paymentAddressState,
address_country: applicantCountry,
});
console.log(stripeToken);
}
});
};
setFullApplication = () => {
if (!this.state.isFullApplication) {
this.setState({ isFullApplication: true });
}
};
setCreditCheckApplication = () => {
if (this.state.isFullApplication) {
this.setState({ isFullApplication: false });
}
};
handleClosePaymentModal = () => {
this.setState({
paymentModalVisible: false,
});
};
cardElementOnChange = ({ complete }) => {
this.setState({
cardElementComplete: complete,
});
};
getPaymentFormApi = paymentFormApi => {
this.paymentFormApi = paymentFormApi;
};
render() {
const { listingID, listingAgents, submit } = this.props;
return (
<Application
listingAgents={this.state.listingAgents}
onSubmit={this.onSubmitApplication}
onSubmitPaymentForm={this.onSubmitPaymentForm}
isFullApplication={this.state.isFullApplication}
setFullApplication={this.setFullApplication}
setCreditCheckApplication={this.setCreditCheckApplication}
paymentFormSubmitInProgress={this.state.paymentFormSubmitInProgress}
paymentModalVisible={this.state.paymentModalVisible}
cardElementOnChange={this.cardElementOnChange}
cardElementComplete={this.state.cardElementComplete}
handleClosePaymentModal={this.handleClosePaymentModal}
getPaymentFormApi={this.getPaymentFormApi}
/>
);
}
}
export default ApplicationContainer;
|
package com.github.chen0040.leetcode.day18.medium;
/**
* Created by xschen on 13/8/2017.
*
* link: https://leetcode.com/problems/beautiful-arrangement/description/
*/
public class BeautifulArrangement {
public class Solution {
private int count;
public int countArrangement(int N) {
count = 0;
boolean[] onStack = new boolean[N+1];
dfs(N, 1, onStack);
return count;
}
private void dfs(int N, int pos, boolean[] onStack) {
if(pos > N) {
count++;
return;
}
for(int i = 1; i <= N; ++i) {
if(!onStack[i] && (pos % i == 0 || i % pos == 0)) {
onStack[i] = true;
dfs(N, pos+1, onStack);
onStack[i] = false;
}
}
}
}
}
|
#!/bin/bash
# build the python distribution
set -e
set -x
FWDIR="$(cd "`dirname $0`"/..; pwd)"
cd "$FWDIR"
pushd ${FWDIR}/python
python setup.py sdist
popd
|
<gh_stars>1-10
#include "XlibBackend.h"
XlibBackend::XlibBackend(int width, int height, char *display_name)
: ScreenBackend("xlib", width, height)
, xlibBackendPriv(width, height, display_name)
{}
void XlibBackend::createSurface()
{
assert(!surface);
this->surface = xlibBackendPriv.cairo_surface_create(width, height);
}
void XlibBackend::setWidth(int width)
{
xlibBackendPriv.ResizeWindow(width, this->height);
ScreenBackend::setWidth(width);
}
void XlibBackend::setHeight(int height)
{
xlibBackendPriv.ResizeWindow(this->width, height);
ScreenBackend::setHeight(height);
}
Nan::Persistent<FunctionTemplate> XlibBackend::constructor;
void XlibBackend::Initialize(Local<Object> target)
{
Nan::HandleScope scope;
Local<FunctionTemplate> ctor = Nan::New<FunctionTemplate>(XlibBackend::New);
XlibBackend::constructor.Reset(ctor);
ctor->InstanceTemplate()->SetInternalFieldCount(1);
ctor->SetClassName(Nan::New<String>("XlibBackend").ToLocalChecked());
ScreenBackend::Initialize(ctor);
Nan::Set(target,
Nan::New<String>("XlibBackend").ToLocalChecked(),
Nan::GetFunction(ctor).ToLocalChecked()).Check();
}
NAN_METHOD(XlibBackend::New)
{
int width = 0;
int height = 0;
if (info[0]->IsNumber()) width = Nan::To<uint32_t>(info[0]).FromMaybe(0);
if (info[1]->IsNumber()) height = Nan::To<uint32_t>(info[1]).FromMaybe(0);
char *display_name = nullptr;
if (info[2]->IsString()) display_name = *Nan::Utf8String(info[2]);
XlibBackend *backend = new XlibBackend(width, height, display_name);
backend->Wrap(info.This());
info.GetReturnValue().Set(info.This());
}
|
package com.trikzon.armor_visibility.client.forge;
import com.trikzon.armor_visibility.ArmorVisibility;
import com.trikzon.armor_visibility.client.ArmorVisibilityClient;
import net.minecraftforge.fml.javafmlmod.FMLJavaModLoadingContext;
public class ArmorVisibilityClientForge {
public ArmorVisibilityClientForge() {
ArmorVisibility.initialize();
ArmorVisibilityClient.initialize();
FMLJavaModLoadingContext.get().getModEventBus().addListener(PlatformClientImpl::onClientSetup);
}
}
|
#!/bin/bash
TOPDIR=${TOPDIR:-$(git rev-parse --show-toplevel)}
SRCDIR=${SRCDIR:-$TOPDIR/src}
MANDIR=${MANDIR:-$TOPDIR/doc/man}
DORIANCOIND=${DORIANCOIND:-$SRCDIR/doriancoind}
DORIANCOINCLI=${DORIANCOINCLI:-$SRCDIR/doriancoin-cli}
DORIANCOINTX=${DORIANCOINTX:-$SRCDIR/doriancoin-tx}
DORIANCOINQT=${DORIANCOINQT:-$SRCDIR/qt/doriancoin-qt}
[ ! -x $DORIANCOIND ] && echo "$DORIANCOIND not found or not executable." && exit 1
# The autodetected version git tag can screw up manpage output a little bit
DRCVER=($($DORIANCOINCLI --version | head -n1 | awk -F'[ -]' '{ print $6, $7 }'))
# Create a footer file with copyright content.
# This gets autodetected fine for doriancoind if --version-string is not set,
# but has different outcomes for doriancoin-qt and doriancoin-cli.
echo "[COPYRIGHT]" > footer.h2m
$DORIANCOIND --version | sed -n '1!p' >> footer.h2m
for cmd in $DORIANCOIND $DORIANCOINCLI $DORIANCOINTX $DORIANCOINQT; do
cmdname="${cmd##*/}"
help2man -N --version-string=${DRCVER[0]} --include=footer.h2m -o ${MANDIR}/${cmdname}.1 ${cmd}
sed -i "s/\\\-${DRCVER[1]}//g" ${MANDIR}/${cmdname}.1
done
rm -f footer.h2m
|
package edu.washington.cse.instrumentation.analysis.utils;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.yaml.snakeyaml.DumperOptions;
import org.yaml.snakeyaml.DumperOptions.FlowStyle;
import org.yaml.snakeyaml.Yaml;
import soot.G;
import soot.PackManager;
import soot.SceneTransformer;
import soot.SootMethod;
import soot.Transform;
import soot.Unit;
import soot.jimple.InvokeExpr;
import soot.jimple.Stmt;
import soot.jimple.toolkits.callgraph.reflection.CallGraphBuilderBridge;
import soot.jimple.toolkits.callgraph.reflection.ComposableReflectionHandlers;
import soot.jimple.toolkits.callgraph.reflection.PluggableReflectionHandler;
import soot.jimple.toolkits.callgraph.reflection.TypeStateReflectionHandler;
import soot.jimple.toolkits.ide.icfg.JimpleBasedInterproceduralCFG;
import soot.jimple.toolkits.pointer.MemoryEfficientRasUnion;
import soot.jimple.toolkits.pointer.Union;
import soot.jimple.toolkits.pointer.UnionFactory;
import edu.washington.cse.instrumentation.analysis.resource.YamlResourceResolver;
public class ResourceStringGenerator {
public static class DeferredHandler implements PluggableReflectionHandler {
private final Class<? extends PluggableReflectionHandler> impl;
private PluggableReflectionHandler delegate;
public DeferredHandler(final Class<? extends PluggableReflectionHandler> impl) {
this.impl = impl;
}
private PluggableReflectionHandler getDelegate() {
if(this.delegate == null) {
try {
return this.delegate = this.impl.newInstance();
} catch (InstantiationException | IllegalAccessException e) {
System.exit(1);
return null;
}
} else {
return this.delegate;
}
}
@Override
public boolean handleForNameCall(final SootMethod container, final Stmt s,
final CallGraphBuilderBridge bridge) {
return getDelegate().handleForNameCall(container, s, bridge);
}
@Override
public boolean handleNewInstanceCall(final SootMethod container, final Stmt s,
final CallGraphBuilderBridge bridge) {
return getDelegate().handleNewInstanceCall(container, s, bridge);
}
@Override
public boolean handleInvokeCall(final SootMethod container, final Stmt s,
final CallGraphBuilderBridge bridge) {
return getDelegate().handleInvokeCall(container, s, bridge);
}
@Override
public boolean handleConstructorNewInstanceCall(final SootMethod container,
final Stmt s, final CallGraphBuilderBridge bridge) {
return getDelegate().handleConstructorNewInstanceCall(container, s, bridge);
}
@Override
public void handleNewMethod(final SootMethod m, final CallGraphBuilderBridge bridge) {
getDelegate().handleNewMethod(m, bridge);
}
}
public static boolean isUniqueWithin(final Unit u, final SootMethod m) {
for(final Unit u1 : m.getActiveBody().getUnits()) {
if(u == u1) {
continue;
}
if(u.toString().equals(u1.toString())) {
return false;
}
}
return true;
}
public static int getUnitTag(final Unit u, final SootMethod m) {
int i = 0;
for(final Unit u1 : m.getActiveBody().getUnits()) {
if(u1 == u) {
return i;
}
if(u1.toString().equals(u.toString())) {
i++;
}
}
throw new RuntimeException();
}
public static void main(final String[] args) {
PackManager.v().getPack("wjtp").add(new Transform("wjtp.rr-cache", new SceneTransformer() {
@Override
protected void internalTransform(final String phaseName, final Map<String, String> options) {
final YamlResourceResolver yrr = new YamlResourceResolver(options.get("input"));
final JimpleBasedInterproceduralCFG icfg = new JimpleBasedInterproceduralCFG();
final List<Map<String, Object>> resolve = new ArrayList<>();
{
final List<String> repr = new ArrayList<>();
for(final SootMethod acc : yrr.getResourceAccessMethods()) {
repr.add(acc.getSignature());
}
final Map<String, Object> entry = new HashMap<>();
entry.put("access-sigs", repr);
resolve.add(entry);
}
for(final SootMethod access : yrr.getResourceAccessMethods()) {
for(final Unit u : icfg.getCallersOf(access)) {
final Stmt s = (Stmt) u;
final InvokeExpr ie = s.getInvokeExpr();
if(!yrr.isResourceAccess(ie, s)) {
continue;
}
final Set<String> l = yrr.getAccessedResources(ie, u);
final Map<String, Object> entry = new HashMap<>();
final SootMethod methodOf = icfg.getMethodOf(u);
entry.put("method", methodOf.getSignature());
if(l == null) {
entry.put("res", l);
} else {
final ArrayList<String> rList = new ArrayList<>();
for(final String r : l) {
rList.add(r);
}
entry.put("res", rList);
}
if(isUniqueWithin(u, methodOf)) {
entry.put("unique", true);
} else {
entry.put("tag", getUnitTag(u, methodOf));
}
entry.put("unit", u.toString());
resolve.add(entry);
}
}
final DumperOptions dOptions = new DumperOptions();
dOptions.setDefaultFlowStyle(FlowStyle.BLOCK);
dOptions.setWidth(Integer.MAX_VALUE);
final Yaml y = new Yaml(dOptions);
try(PrintWriter pw = new PrintWriter(new File(options.get("output")))) {
y.dump(resolve, pw);
} catch (final FileNotFoundException e) { }
System.exit(0);
}
}) {
{
setDeclaredOptions("enabled input output");
}
});
G.v().Union_factory = new UnionFactory() {
@Override
public Union newUnion() {
return new MemoryEfficientRasUnion();
}
};
ComposableReflectionHandlers.v().addHandler(new DeferredHandler(TypeStateReflectionHandler.class));
soot.Main.main(args);
}
}
|
#!/bin/bash
set -ueo pipefail
cd "$(dirname "$(readlink -f "$BASH_SOURCE")")"
paths=( "$@" )
if [ ${#paths[@]} -eq 0 ]; then
paths=( */ )
fi
paths=( "${paths[@]%/}" )
MAVEN_METADATA_URL='https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-distribution/maven-metadata.xml'
available=( $( curl -sSL "$MAVEN_METADATA_URL" | grep -Eo '<(version)>[^<]*</\1>' | awk -F'[<>]' '{ print $3 }' | sort -Vr ) )
for path in "${paths[@]}"; do
version="${path%%-*}" # "9.2"
suffix="${path#*-}" # "jre7"
baseImage='openjdk'
case "$suffix" in
jre*|jdk*)
baseImage+=":${suffix:3}-${suffix:0:3}" # ":7-jre"
;;
esac
milestones=()
releaseCandidates=()
fullReleases=()
for candidate in "${available[@]}"; do
if [[ "$candidate" == "$version".* ]]; then
if [[ "$candidate" == *.M* ]]; then
milestones+=("$candidate")
elif [[ "$candidate" == *.RC* ]]; then
releaseCandidates+=("$candidate")
elif [[ "$candidate" == *.v* ]]; then
fullReleases+=("$candidate")
fi
fi
done
fullVersion=
if [ -n "${fullReleases-}" ]; then
fullVersion="$fullReleases"
elif [ -n "${releaseCandidates-}" ]; then
fullVersion="$releaseCandidates"
elif [ -n "${milestones-}" ]; then
fullVersion="$milestones"
fi
if [ -z "$fullVersion" ]; then
echo >&2 "Unable to find Jetty package for $path"
exit 1
fi
for variant in alpine ''; do
[ -d "$path/$variant" ] || continue
(
set -x
cp docker-entrypoint.sh generate-jetty-start.sh "$path/$variant"
sed -ri '
s/^(FROM) .*/\1 '"$baseImage${variant:+-$variant}"'/;
s/^(ENV JETTY_VERSION) .*/\1 '"$fullVersion"'/;
' "$path/$variant/Dockerfile"
)
done
done
|
const getFib = (n) => {
const arr = [0, 1];
let len = arr.length;
let res;
getInner(n, len);
function getInner(n, len) {
if (n === 1) {
res = 0;
} else if (n === 2) {
res = 1;
} else if ((n) !== (len)) {
arr.push(arr[len-1] + arr[len-2]);
len = arr.length;
getInner(n, len);
} else {
// console.log(arr);
// console.log(arr[len-1]);
res = arr[len-1];
}
}
return res;
};
const res = getFib(2);
console.log(res);
//Optimized solution
function getNthFib(n) {
// Write your code here.
const arr = [0, 1];
let res;
let count = 3;
while (count <= n) {
res = arr[0] + arr[1];
arr[0] = arr[1];
arr[1] = res;
count++;
}
return n > 1 ? arr[1] : arr[0];
}
|
<gh_stars>0
import React from "react";
const EuiIconKqlValue = props => <svg width={16} height={16} viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg" {...props}><path d="M8 4a5 5 0 1 1 0 8 5 5 0 1 1 0-8zm-.75.692a4 4 0 1 0 0 6.615A4.981 4.981 0 0 1 6 8c0-1.268.472-2.426 1.25-3.308zM11.348 11l2.078-5.637h-.739l-1.656 4.727h-.062L9.313 5.363h-.739L10.652 11h.696z" /></svg>;
export const icon = EuiIconKqlValue; |
require 'spec_helper'
require 'my-gem'
describe MyGem do
it 'requires additional testing'
end
|
import { assertEquals } from 'https://deno.land/std/testing/asserts.ts';
import { init } from './mod.ts';
async function* asyncIterable() {
yield 1;
yield 2;
yield 3;
yield 4;
}
Deno.test('init() [1, 2, 3, 4]', async () => {
const actual: number[] = [];
const expected = [1, 2, 3];
for await (const _ of init(asyncIterable())) {
actual.push(_);
}
assertEquals(actual, expected);
});
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_pages_twotone = void 0;
var ic_pages_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M7 7l4 1V5H5v6h3zm1 6H5v6h6v-3l-4 1zm9 4l-4-1v3h6v-6h-3zm-4-9l4-1-1 4h3V5h-6z",
"opacity": ".3"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M19 3H5c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h14c1.1 0 2-.9 2-2V5c0-1.1-.9-2-2-2zM5 5h6v3L7 7l1 4H5V5zm6 14H5v-6h3l-1 4 4-1v3zm-1.63-4.37l.91-2.63-.91-2.63 2.63.91 2.63-.91-.91 2.63.91 2.63-2.63-.91-2.63.91zM19 19h-6v-3l4 1-1-4h3v6zm0-8h-3l1-4-4 1V5h6v6z"
},
"children": []
}]
};
exports.ic_pages_twotone = ic_pages_twotone; |
<reponame>anthonyndunguwanja/Anthony-Ndungu-bootcamp-17
import json
import urllib2
# open the url and the screen name
# (The screen name is the screen name of the user for whom to return results for)
def get_data():
url = "http://api.twitter.com/1/statuses/user_timeline.json?screen_name=python"
# this takes a python object and dumps it to a string which is a JSON
# representation of that object
data = json.load(urllib2.urlopen(url))
# print the result
print data
get_data()
|
import requests
def upload_xml_data(xml_data: str, api_endpoint: str) -> str:
# Make the POST request to the API endpoint
response = requests.post(api_endpoint, data=xml_data, headers={'Content-Type': 'application/xml'})
# Handle the API response code
if response.status_code == 201:
return "Success - 201"
elif response.status_code == 400:
return "Bad Request - 400"
elif response.status_code == 401:
return "Unauthorized - 401"
else:
return "Unknown Response Code" |
import React from "react";
import ContentLoader from "react-content-loader";
export default function SkeletonProfile() {
const Skeleton = (props) => (
<ContentLoader
width={1000}
height={550}
style={{ width: "100%", height: "100%" }}
viewBox="0 0 1000 550"
backgroundColor="#eaeced"
foregroundColor="#ffffff"
{...props}
>
<rect x="51" y="45" rx="3" ry="3" width="906" height="17" />
<circle cx="879" cy="123" r="11" />
<circle cx="914" cy="123" r="11" />
<rect x="104" y="115" rx="3" ry="3" width="141" height="15" />
<rect x="305" y="114" rx="3" ry="3" width="299" height="15" />
<rect x="661" y="114" rx="3" ry="3" width="141" height="15" />
<rect x="55" y="155" rx="3" ry="3" width="897" height="2" />
<circle cx="880" cy="184" r="11" />
<circle cx="915" cy="184" r="11" />
<rect x="105" y="176" rx="3" ry="3" width="141" height="15" />
<rect x="306" y="175" rx="3" ry="3" width="299" height="15" />
<rect x="662" y="175" rx="3" ry="3" width="141" height="15" />
<rect x="56" y="216" rx="3" ry="3" width="897" height="2" />
<circle cx="881" cy="242" r="11" />
<circle cx="916" cy="242" r="11" />
<rect x="106" y="234" rx="3" ry="3" width="141" height="15" />
<rect x="307" y="233" rx="3" ry="3" width="299" height="15" />
<rect x="663" y="233" rx="3" ry="3" width="141" height="15" />
<rect x="57" y="274" rx="3" ry="3" width="897" height="2" />
<circle cx="882" cy="303" r="11" />
<circle cx="917" cy="303" r="11" />
<rect x="107" y="295" rx="3" ry="3" width="141" height="15" />
<rect x="308" y="294" rx="3" ry="3" width="299" height="15" />
<rect x="664" y="294" rx="3" ry="3" width="141" height="15" />
<rect x="58" y="335" rx="3" ry="3" width="897" height="2" />
<circle cx="881" cy="363" r="11" />
<circle cx="916" cy="363" r="11" />
<rect x="106" y="355" rx="3" ry="3" width="141" height="15" />
<rect x="307" y="354" rx="3" ry="3" width="299" height="15" />
<rect x="663" y="354" rx="3" ry="3" width="141" height="15" />
<rect x="57" y="395" rx="3" ry="3" width="897" height="2" />
<circle cx="882" cy="424" r="11" />
<circle cx="917" cy="424" r="11" />
<rect x="107" y="416" rx="3" ry="3" width="141" height="15" />
<rect x="308" y="415" rx="3" ry="3" width="299" height="15" />
<rect x="664" y="415" rx="3" ry="3" width="141" height="15" />
<rect x="55" y="453" rx="3" ry="3" width="897" height="2" />
<rect x="51" y="49" rx="3" ry="3" width="2" height="465" />
<rect x="955" y="49" rx="3" ry="3" width="2" height="465" />
<circle cx="882" cy="484" r="11" />
<circle cx="917" cy="484" r="11" />
<rect x="107" y="476" rx="3" ry="3" width="141" height="15" />
<rect x="308" y="475" rx="3" ry="3" width="299" height="15" />
<rect x="664" y="475" rx="3" ry="3" width="141" height="15" />
<rect x="55" y="513" rx="3" ry="3" width="897" height="2" />
<rect x="52" y="80" rx="3" ry="3" width="906" height="17" />
<rect x="53" y="57" rx="3" ry="3" width="68" height="33" />
<rect x="222" y="54" rx="3" ry="3" width="149" height="33" />
<rect x="544" y="55" rx="3" ry="3" width="137" height="33" />
<rect x="782" y="56" rx="3" ry="3" width="72" height="33" />
<rect x="933" y="54" rx="3" ry="3" width="24" height="33" />
</ContentLoader>
);
return (
<div className="panel-body">
<Skeleton />
</div>
);
}
|
package com.samus.freya.helper;
import android.app.Activity;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.pdf.PdfDocument;
import android.os.Bundle;
import android.os.ParcelFileDescriptor;
import android.print.PageRange;
import android.print.PrintAttributes;
import android.print.PrintDocumentAdapter;
import android.print.PrintDocumentInfo;
import android.print.pdf.PrintedPdfDocument;
import android.text.TextPaint;
import android.util.SparseArray;
import android.view.View;
import com.samus.freya.model.Contact;
import com.samus.freya.model.ContactService;
import com.samus.freya.model.Day;
import com.samus.freya.model.Month;
import com.samus.freya.model.Service;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Comparator;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.Locale;
/**
* Created by samus on 09.01.2017.
*/
public class ViewPrintAdapter extends PrintDocumentAdapter {
private PrintedPdfDocument mDocument;
private Context mContext;
private Month month;
private DBHelper dbHelper;
private SparseArray<Contact> contacts;
private List<Contact> contactsOrder;
private SparseArray<Float> req, hours;
private SparseArray<Day> days;
private SparseArray<Service> services;
private int maxContacts = 15; // max allowed contacts per page
private int numPages;
public ViewPrintAdapter(Context context, int month_id) {
mContext = context;
dbHelper = new DBHelper(context);
month = dbHelper.getMonth(month_id);
req = new SparseArray<>();
contacts = dbHelper.getAllContactsForMonth(month_id, req);
days = dbHelper.getAllDaysForMonth(month_id);
services = dbHelper.getAllServicesForMonth(month_id);
contactsOrder = asList(contacts);
Collections.sort(contactsOrder, new Comparator<Contact>() {
@Override
public int compare(Contact contact, Contact t1) {
if (contact.getName().split(" ").length==2 && t1.getName().split(" ").length==2)
return contact.getName().split(" ")[1].compareTo(t1.getName().split(" ")[1]);
else
return contact.getName().compareTo(t1.getName());
}
});
numPages = (contactsOrder.size()-1)/maxContacts+1;
if (contacts.size() == 0) numPages=0; // no contacts
}
private <C> List<C> asList(SparseArray<C> sparseArray) {
if (sparseArray == null) return null;
List<C> arrayList = new ArrayList<>(sparseArray.size());
for (int i = 0; i < sparseArray.size(); i++)
arrayList.add(sparseArray.valueAt(i));
return arrayList;
}
@Override
public void onLayout(PrintAttributes oldAttributes, PrintAttributes newAttributes,
android.os.CancellationSignal cancellationSignal,
LayoutResultCallback callback, Bundle extras) {
PrintAttributes pdfPrintAttrs = new PrintAttributes.Builder().
setColorMode(PrintAttributes.COLOR_MODE_MONOCHROME).
setMediaSize(PrintAttributes.MediaSize.ISO_A4.asLandscape()).
setResolution(new PrintAttributes.Resolution("zooey", Activity.PRINT_SERVICE, 300, 300)).
setMinMargins(PrintAttributes.Margins.NO_MARGINS).
build();
mDocument = new PrintedPdfDocument(mContext, pdfPrintAttrs);
if (cancellationSignal.isCanceled()) {
callback.onLayoutCancelled();
return;
}
PrintDocumentInfo.Builder builder = new PrintDocumentInfo
.Builder("print_output.pdf")
.setContentType(PrintDocumentInfo.CONTENT_TYPE_DOCUMENT)
.setPageCount(numPages);
PrintDocumentInfo info = builder.build();
callback.onLayoutFinished(info, true);
}
@Override
public void onWrite(PageRange[] pages, ParcelFileDescriptor destination,
android.os.CancellationSignal cancellationSignal,
WriteResultCallback callback) {
int leftgap = 30;
int rightgap = leftgap;
int topgap = 120;
int bottomgap = 20;
int padding = 6;
TextPaint textPaint = new TextPaint();
textPaint.setColor(Color.BLACK);
Paint paint = new Paint();
paint.setColor(Color.BLACK);
paint.setStrokeWidth(1);
Rect rect = new Rect();
textPaint.getTextBounds("SDADSAS", 0, 5, rect);
int textheight = rect.height();
hours = new SparseArray<>();
for (int i=0; i<contacts.size(); i++) hours.append(contacts.keyAt(i), 0f);
for (int i = 0; i < numPages; i++) {
// Start the page
PdfDocument.Page page = mDocument.startPage(i);
textPaint.setTextSize(12f);
// get the page canvas and measure it.
Canvas pageCanvas = page.getCanvas();
int width = pageCanvas.getWidth();
int height = pageCanvas.getHeight();
int gap = (height-topgap-bottomgap)/maxContacts;
int maxNameWidth = 0; // TODO set a max and use sname if too long or smaller font or two lines
int contactsLeft = 0;
// Name section
pageCanvas.drawLine(leftgap, topgap-gap/2-textheight/2, width-rightgap, topgap-gap/2-textheight/2, paint);
for (int l=0; l<maxContacts && i*maxContacts+l<contacts.size(); l++) {
Contact contact = contactsOrder.get(i*maxContacts+l);
String field = contact.getName() + ", " + contact.getWh();
paint.getTextBounds(field, 0, field.length(), rect);
if (rect.width() > maxNameWidth) maxNameWidth = rect.width();
pageCanvas.drawText(contact.getName() + ", " + contact.getWh(),
leftgap+padding, topgap+l*gap, textPaint);
pageCanvas.drawLine(leftgap, topgap+l*gap+gap/2-textheight/2, width-rightgap, topgap+l*gap+gap/2-textheight/2, paint);
contactsLeft = l;
}
pageCanvas.drawLine(leftgap, topgap-gap/2-textheight/2, leftgap, topgap+contactsLeft*gap+gap/2-textheight/2, paint); // left line
pageCanvas.drawLine(width-rightgap, topgap-gap/2-textheight/2, width-rightgap, topgap+contactsLeft*gap+gap/2-textheight/2, paint); // right line
int curX = leftgap + maxNameWidth + 2*padding+2; // +2 is from width of line
int curY = topgap-gap/2-textheight/2-2*textheight-2*padding;
pageCanvas.drawLine(curX, curY, curX, topgap+contactsLeft*gap+gap/2-textheight/2, paint); // after name line
// Req section
paint.getTextBounds("100.0/100.0",0,11,rect);
maxNameWidth = rect.width()+3;
//pageCanvas.drawLine(width-rightgap-maxNameWidth-2*padding, curY, width-rightgap-maxNameWidth-2*padding, topgap+contactsLeft*gap+gap/2-textheight/2, paint); // left of req line
// Day section
int numDays = days.size();
String[] dayNames = new String[] { "So", "Mo", "Di", "Mi",
"Do", "Fr", "Sa" };
Calendar cal = new GregorianCalendar(month.getYear(), month.getMonth()-1, 1);
int shift = cal.get(Calendar.DAY_OF_WEEK) + 6;
int horizontal_gap = Math.round((1.0f*width-rightgap-maxNameWidth-2*padding-curX)/numDays);
int dif = (horizontal_gap*numDays-(width-rightgap-maxNameWidth-2*padding-curX));
pageCanvas.drawLine(curX, curY, curX+numDays*horizontal_gap, curY, paint); // over date line
for (int l=0; l<numDays; l++) {
Day day = days.get(l);
if (day.getDate() > 9) pageCanvas.drawText(String.valueOf(day.getDate()), curX+l*horizontal_gap+padding/2, curY+padding/2+textheight, textPaint); // day number
else pageCanvas.drawText(String.valueOf(day.getDate()), curX+l*horizontal_gap+padding, curY+padding/2+textheight, textPaint); // day number
textPaint.getTextBounds(dayNames[(l+shift)%7], 0, dayNames[(l+shift)%7].length(), rect);
int dateWidth = rect.width();
pageCanvas.drawText(dayNames[(l+shift)%7], curX+l*horizontal_gap+(horizontal_gap-dateWidth)/2, curY+padding+2*textheight, textPaint); // day name
pageCanvas.drawLine(curX+(l+1)*horizontal_gap, curY, curX+(l+1)*horizontal_gap, topgap+contactsLeft*gap+gap/2-textheight/2, paint); // inner lines
}
// Service Section
int x0 = curX; // with horizonral_gap
int y0 = topgap; // with gap
for (int l=0; l<numDays; l++) {
Day day = days.get(l);
List<ContactService> css = dbHelper.getAllContactsForDay(day.getId());
for (ContactService cs: css) {
int conInd = contactsOrder.indexOf(contacts.get(cs.contact_id));
if (conInd >= (i+1)*maxContacts || conInd < i*maxContacts) continue;
String servText = services.get(cs.service_id).getDesc();
textPaint.getTextBounds(servText, 0, servText.length(), rect);
int servWidth = rect.width();
pageCanvas.drawText(servText, x0+l*horizontal_gap+(horizontal_gap-servWidth)/2, y0+(conInd%maxContacts)*gap, textPaint);
if (services.get(cs.service_id).getSpe()) hours.append(cs.contact_id, hours.get(cs.contact_id) + contacts.get(cs.contact_id).getWh()/5);
else hours.append(cs.contact_id, hours.get(cs.contact_id) + services.get(cs.service_id).getVal());
}
}
// additional req section
for (int l=0; l<maxContacts && i*maxContacts+l<contacts.size(); l++) {
float have = hours.get(contactsOrder.get(l).getId());
float need = req.get(contactsOrder.get(l).getId());
String reqText = String.format(Locale.US, "%.1f", have) + "/" + String.format(Locale.US, "%.1f", need);
pageCanvas.drawText(reqText, width-rightgap-maxNameWidth-padding+dif, topgap+l*gap, textPaint);
}
textPaint.setTextSize(21f); // Title
String monText = month.toString();
textPaint.getTextBounds(monText, 0, monText.length(), rect);
pageCanvas.drawText(monText, width/2-rect.width()/2, topgap/2, textPaint);
mDocument.finishPage(page);
}
try {
mDocument.writeTo(new FileOutputStream(
destination.getFileDescriptor()));
} catch (IOException e) {
callback.onWriteFailed(e.toString());
return;
} finally {
mDocument.close();
mDocument = null;
}
callback.onWriteFinished(new PageRange[]{new PageRange(0, numPages)});
}
} |
package org.jeecgframework.core.common.dao;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletResponse;
import org.jeecgframework.web.system.pojo.base.TSDepart;
import org.jeecgframework.web.system.pojo.base.TSUser;
import org.jeecgframework.core.common.model.common.UploadFile;
import org.jeecgframework.core.common.model.json.ComboTree;
import org.jeecgframework.core.common.model.json.ImportFile;
import org.jeecgframework.core.common.model.json.TreeGrid;
import org.jeecgframework.core.extend.template.Template;
import org.jeecgframework.tag.vo.easyui.ComboTreeModel;
import org.jeecgframework.tag.vo.easyui.TreeGridModel;
public interface ICommonDao extends IGenericBaseCommonDao{
/**
* admin账户密码初始化
* @param user
*/
public void pwdInit(TSUser user,String newPwd);
/**
* 检查用户是否存在
* */
public TSUser getUserByUserIdAndUserNameExits(TSUser user);
//public String getUserRole(TSUser user);
/**
* 文件上传
* @param request
*/
public <T> T uploadFile(UploadFile uploadFile);
/**
* 文件下载或预览
* @param uploadFile
* @return
*/
public HttpServletResponse viewOrDownloadFile(UploadFile uploadFile);
public Map<Object,Object> getDataSourceMap(Template template);
/**
* 生成XML文件
* @param fileName XML全路径
*/
public HttpServletResponse createXml(ImportFile importFile);
/**
* 解析XML文件
* @param fileName XML全路径
*/
public void parserXml(String fileName);
public List<ComboTree> comTree(List<TSDepart> all,ComboTree comboTree);
/**
* 根据模型生成JSON
* @param all 全部对象
* @param in 已拥有的对象
* @param comboBox 模型
* @return
*/
public List<ComboTree> ComboTree(List all,ComboTreeModel comboTreeModel,List in);
public List<TreeGrid> treegrid(List all,TreeGridModel treeGridModel);
}
|
<filename>src/js/panels/home/base.js<gh_stars>0
import React from 'react';
import {connect} from 'react-redux';
import {Div, Panel, Group, Button, PanelHeader} from "@vkontakte/vkui";
import {closePopout, openPopout} from '../../store/router/actions';
import * as VK from "../../services/VK";
import * as API from "../../services/API";
import {getURLParams} from "../../services/_functions";
import {SnackbarAlert} from "../../services/renderers";
class HomePanelBase extends React.Component {
state = {
snackbar: null
};
requestAccess = async () => {
let URLParams = getURLParams();
if (!URLParams['vk_group_id']) {
this.setState({
snackbar:
SnackbarAlert(
2, "" +
"Откройте сервис из-под группы",
() => this.setState({snackbar: null})
)
});
return;
}
let CommunityAuthToken = await VK.getCommunityAuthToken(['app_widget'], URLParams['vk_group_id']);
if (!CommunityAuthToken) {
this.setState({
snackbar:
SnackbarAlert(
2, "" +
"Предоставьте права приложению",
() => this.setState({snackbar: null})
)
});
return;
}
let saveAccessToken = await API.saveCommunityAuthToken(
URLParams['vk_group_id'],
URLParams['vk_user_id'],
CommunityAuthToken
);
if (saveAccessToken.error) {
this.setState({
snackbar:
SnackbarAlert(
2, "" +
"Возникла ошибка при сохранении токена",
() => this.setState({snackbar: null})
)
});
} else {
this.setState({
snackbar:
SnackbarAlert(
1, "" +
"Токен сообщества сохранен",
() => this.setState({snackbar: null})
)
});
}
};
requestAllowMessages = async () => {
let URLParams = getURLParams();
let allowMessages = await VK.getAppAllowMessagesFromGroup(URLParams['vk_user_id']);
};
render() {
const {id} = this.props;
return (
<Panel id={id}>
<PanelHeader>ПромоДруг</PanelHeader>
<Group>
<Div>
<Button size="l" stretched={true} onClick={this.requestAccess}>Предоставить доступ к виджету</Button>
</Div>
</Group>
<Group>
<Div>
<Button size="l" stretched={true} onClick={this.requestAllowMessages}>Предоставить доступ на получение сервисных уведомлений</Button>
</Div>
</Group>
{this.state.snackbar}
</Panel>
);
}
}
const mapDispatchToProps = {
openPopout,
closePopout,
};
export default connect(null, mapDispatchToProps)(HomePanelBase); |
<gh_stars>1-10
package de.ids_mannheim.korap.rewrite;
import com.fasterxml.jackson.databind.JsonNode;
import de.ids_mannheim.korap.config.KustvaktConfiguration;
import de.ids_mannheim.korap.exceptions.KustvaktException;
import de.ids_mannheim.korap.user.User;
/**
* @author hanl
* @date 30/06/2015
*/
public interface RewriteTask {
/**
* unspecified query rewrite that gets injected the entire root
* node during preprocessing
*/
interface RewriteQuery extends RewriteTask {
/**
* @param node
* Json node in KoralNode wrapper
* @param config
* {@link KustvaktConfiguration} singleton instance
* to use default configuration parameters
* @param user
* injected by rewrite handler if available. Might
* cause {@link NullPointerException} if not
* checked properly
* @return
*/
KoralNode rewriteQuery (KoralNode node, KustvaktConfiguration config,
User user) throws KustvaktException;
}
/**
* Post processor targeted at result sets for queries
* {@link RewriteResult} queries will run
* after {@link IterableRewritePath} have been processed
*/
interface RewriteResult extends RewriteTask {
JsonNode rewriteResult (KoralNode node) throws KustvaktException;
}
/**
* nodes subject to rewrites at fixed json pointer location.
* Json-pointer based rewrites are processed after iterable
* rewrites
* Deletion via KoralNode not allowed. Supports pre- and
* post-processing
*/
interface RewriteNodeAt extends RewriteQuery, RewriteResult {
String at ();
}
/**
* terminal object nodes that are subject to rewrites through node
* iteration
* (both object and array node iteration supported)
*/
interface IterableRewritePath extends RewriteQuery, RewriteResult {
String path ();
}
/**
* koral token nodes that are subject to rewrites
* Be aware that node rewrites are processed before query
* rewrites. Thus query rewrite may override previous node
* rewrites {@link RewriteKoralToken} rewrite DOES NOT support the
* deletion of the respective node
*/
interface RewriteKoralToken extends RewriteQuery {}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.