text stringlengths 1 1.05M |
|---|
#!/bin/sh
apt-get install -yq build-essential pkg-config
|
/* global $ */
var io = require("socket.io-client");
var client = require("./src/game-client");
var config = require("./config.json");
function run(flag) {
client.renderer = flag ? require("./src/mode/god") : require("./src/mode/player");
client.connectGame("//" + location.host, $("#name").val(), function(success, msg) {
if (success) {
$("#main-ui").fadeIn(1000);
$("#begin, #wasted").fadeOut(1000);
}
else {
$("#error").text(msg);
}
}, flag);
}
$(document).ready(function() {
var err = $("#error");
if (!window.WebSocket) {
err.text("Your browser does not support WebSockets!");
return;
}
err.text("Loading... Please wait"); //TODO: show loading screen
(function() {
var socket = io(`//${location.host}`, {
forceNew: true,
upgrade: false,
transports: ["websocket"]
});
socket.on("connect", function() {
socket.emit("pings");
});
socket.on("pongs", function() {
socket.disconnect();
err.text("All done, have fun!");
$("#name").keypress(function(evt) {
if (evt.which === 13) run();
});
$(".start").removeAttr("disabled").click(function(evt) {
run();
});
$(".spectate").removeAttr("disabled").click(function(evt) {
run(true);
});
});
socket.on("connect_error", function() {
err.text("Cannot connect with server. This probably is due to misconfigured proxy server. (Try using a different browser)");
});
})();
});
//Event listeners
$(document).keydown(function(e) {
var newHeading = -1;
switch (e.which) {
case 38: newHeading = 0; break; //UP
case 87: newHeading = 0; break; //UP (W)
case 39: newHeading = 1; break; //RIGHT
case 68: newHeading = 1; break; //RIGHT (D)
case 40: newHeading = 2; break; //DOWN
case 83: newHeading = 2; break; //DOWN (S)
case 37: newHeading = 3; break; //LEFT
case 65: newHeading = 3; break; //LEFT (A)
default: return; //Exit handler for other keys
}
client.changeHeading(newHeading);
//e.preventDefault();
});
$(document).on("touchmove", function(e) {
e.preventDefault();
});
$(document).on("touchstart", function (e1) {
var x1 = e1.targetTouches[0].pageX;
var y1 = e1.targetTouches[0].pageY;
$(document).one("touchend", function (e2) {
var x2 = e2.changedTouches[0].pageX;
var y2 = e2.changedTouches[0].pageY;
var deltaX = x2 - x1;
var deltaY = y2 - y1;
var newHeading = -1;
if (deltaY < 0 && Math.abs(deltaY) > Math.abs(deltaX)) newHeading = 0;
else if (deltaX > 0 && Math.abs(deltaY) < deltaX) newHeading = 1;
else if (deltaY > 0 && Math.abs(deltaX) < deltaY) newHeading = 2;
else if (deltaX < 0 && Math.abs(deltaX) > Math.abs(deltaY)) newHeading = 3;
client.changeHeading(newHeading);
});
});
$(".menu").on("click", function() {
client.disconnect();
$("#main-ui, #wasted").fadeOut(1000);
$("#begin").fadeIn(1000);
});
$(".toggle").on("click", function() {
$("#settings").slideToggle();
});
|
#!/usr/bin/env bash
set -euo pipefail
tutorialdir="$(realpath "${TUTORIAL_DIR:-$(pwd)}")"
rundir="$(mktemp -d)"/run
export INGRESS_PORT=8080
export API_PORT=3000
export API_URI="http://localhost:$API_PORT"
export PGDATA="$rundir"/db
export PGHOST="$rundir"/dbsocket
export PGUSER=postgres
export PGDATABASE=postgres
export DB_URI="postgresql://$PGDATABASE?host=$PGHOST&user=$PGUSER"
export SHAKEDOWN_URL="http://localhost:$INGRESS_PORT"
# Clear run dir from previous runs.
rm -rf "$rundir"
# DATABASE
dblog="$rundir/db.log"
dbsetuplog="$rundir/dbsetup.log"
dblog() {
echo "$1" >> "$dbsetuplog"
}
mkdir -p "$rundir"/{db,dbsocket}
dblog "Initializing database cluster..."
PGTZ=UTC initdb --no-locale --encoding=UTF8 --nosync -U "$PGUSER" --auth=trust >> "$dbsetuplog"
dblog "Starting the database cluster..."
pg_ctl -l "$dblog" start -o "-F -c listen_addresses=\"\" -k $PGHOST" >> "$dbsetuplog"
dblog "Loading application from $tutorialdir..."
psql -v ON_ERROR_STOP=1 -f "$tutorialdir/app.sql" >> "$dbsetuplog"
stopDb() {
pg_ctl stop >> "$dbsetuplog"
}
trap 'stopDb; pkill -P $$; rm -rf $rundir' exit
# POSTGREST API
postgrest "$tutorialdir"/postgrest.conf >> "$rundir/api.log" &
# NGINX INGRESS
mkdir -p "$rundir"/nginx/conf
envsubst < "$tutorialdir"/nginx.conf > "$rundir"/nginx/conf/nginx.conf
nginx -p "$rundir"/nginx 2> "$rundir"/nginx/err.log &
# MAIN
echo "Waiting for application to be ready..."
while [ "$(curl -o /dev/null -s -w "%{response_code}" "http://localhost:8080/")" != "200" ]; do
sleep 0.1;
done
if [ "$#" -lt 1 ]; then
echo "Application is ready. Press Ctrl-C to exit."
wait
else
echo "Application is ready. Running command \"$*\"..."
# We don't `exec` the command in order to keep our exit trap.
"$@"
fi
|
<reponame>sparber/CComprehensiveDatatypes
package tree.declarations;
import tree.DefaultTreeNode;
import tree.expressions.TAssignmentExpression;
import tree.symbols.TSBraceLeft;
import tree.symbols.TSBraceRight;
import tree.symbols.TSComma;
public class TInitializer extends DefaultTreeNode {
public TInitializer(TInitializer node) {
super(node);
}
public TInitializer(TSBraceLeft brace_left, TInitializerList inits, TSBraceRight brace_right) {
addChild(brace_left);
addChild(inits);
addChild(brace_right);
}
public TInitializer(TSBraceLeft brace_left, TInitializerList inits, TSComma comma, TSBraceRight brace_right) {
addChild(brace_left);
addChild(inits);
addChild(comma);
addChild(brace_right);
}
public TInitializer(TAssignmentExpression aexpr) {
addChild(aexpr);
}
}
|
#!/bin/bash
source ${HOME}/.bashrc.rdbox-hq
source setenv_build_softether.sh
#
pushd . > /dev/null
#
echo "[INFO] Install some packages"
sudo apt update
sudo apt install -y apt-utils net-tools
sudo apt install -y libreadline-dev libreadline5 \
openssl libssl-dev libncurses5-dev zlib1g-dev \
bridge-utils traceroute dnsutils \
make gcc git ssh vim
#
mkdir -p ${HOME}/git
cd ${HOME}/git
curl -L --output ${FILE_SOFTETHER_SRC_TAR_GZ} ${URL_SOFTETHER_RELEASE}
tar xvfz ${FILE_SOFTETHER_SRC_TAR_GZ}
#
umask 022
cd ${SOFTETHER_VERSION}
./configure
make
#
cd bin
tar cfz ${HOME}/git/${FILE_SOFTETHER_BIN_TAR_GZ} ./vpn*
#
popd > /dev/null
#
|
import re
def process_wiki_page_preview(content: str) -> str:
# Regular expression to find URLs in the content
url_pattern = r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+'
# Replace URLs with HTML anchor tags
processed_content = re.sub(url_pattern, lambda x: f"<a href='{x.group(0)}'>{x.group(0)}</a>", content)
return processed_content |
import cv2
import numpy as np
cap = cv2.VideoCapture(1)
cap.set(3, 640)
cap.set(4, 480)
while(1):
# Take each frame
_, frame = cap.read()
# Convert BGR to HSV
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
# define range of blue color in HSV
# H[0,179], S[0,255], V[0,255]
lower_blue = np.array([90,50,50])
upper_blue = np.array([130,255,255])
lower_red = np.array([0,50,50])
upper_red = np.array([10,255,255])
mask0 = cv2.inRange(hsv, lower_red, upper_red)
lower_red = np.array([170,50,50])
upper_red = np.array([180,255,255])
mask1 = cv2.inRange(hsv, lower_red, upper_red)
red_mask = mask0 + mask1
gray = frame.copy()
gray = cv2.cvtColor(gray, cv2.COLOR_BGR2GRAY)
# Threshold the HSV image to get only blue colors
blue_mask = cv2.inRange(hsv, lower_blue, upper_blue)
# Bitwise-AND mask and original image
blue_res = cv2.bitwise_and(frame,frame, mask=blue_mask)
red_res = cv2.bitwise_and(frame, frame, mask=red_mask)
cv2.imshow('frame',frame)
cv2.imshow('gray',gray)
cv2.imshow('blue',blue_res)
cv2.imshow('red', red_res)
k = cv2.waitKey(5) & 0xFF
if k == 27:
break
cv2.destroyAllWindows() |
<gh_stars>1-10
package model
import (
"time"
)
type Account struct {
ID uint `gorm:"primary_key"`
CreatedAt time.Time
UpdatedAt time.Time
DeletedAt *time.Time `sql:"index"`
ChannelID int `json:"channel"`
Account string `json:"account"`
PassWord string `json:"<PASSWORD>_<PASSWORD>"`
}
type AccountModel struct {
CommonModel
}
func GetAccountModel() *AccountModel {
return &AccountModel{CommonModel{db: commonDb}}
}
|
#!/bin/bash
PASSWORD=/home/ubuntu/.eth-accounts/password
parity \
--author 0xc73bA89c340a5839473b2486fbf3af479Aa8e620 \
--unlock 0xc73bA89c340a5839473b2486fbf3af479Aa8e620,\
0xfb3eb19cf16d4aF3B9e190e80A2f0C48ab4f1188,\
0x3DeEe88A871433a5709c93eF63Eb04F4eACE0ac5,\
0x448f8EC95B31A64436222c013C668E7634984C79,\
0x33eC7255FD0acF37e48868FB75e3DBdd12Fcbdf4,\
0x37fBb918808d2c505Bc7274e9Df771D537A3989f,\
0x165A5b95298B2F03bb2a2276Ea91e066D25f8573,\
0xC809d1AA78D55Fe99782A8Fa278425DAB8871c58,\
0x26b83c6c87c25721218289587abC0c032ba7A12e \
--password=$PASSWORD \
--mode active \
--force-ui \
--geth \
--ui-interface 0.0.0.0 \
--jsonrpc-interface '0.0.0.0' --jsonrpc-hosts all \
--unsafe-expose \
--chain ropsten
|
<reponame>Tatsinnit/hackathon-aks-upgrade-doctor<gh_stars>0
package rules
import (
"context"
"fmt"
"io"
"github.com/gosuri/uiprogress"
)
type engineImpl struct {
Stdout io.Writer
}
// NewEngine creates a new engine.
func NewEngine(stdout io.Writer) Engine {
return &engineImpl{
Stdout: stdout,
}
}
var _ Engine = &engineImpl{}
func (e *engineImpl) CheckRulesSet(
ctx context.Context,
clusterCtx ClusterContext,
rs RulesSet,
) ([]*CheckResult, error) {
if len(rs) < 1 {
return nil, nil
}
progress := uiprogress.New()
bar := progress.AddBar(len(rs))
bar.PrependFunc(func(b *uiprogress.Bar) string {
return fmt.Sprintf("checking %d/%d", b.Current(), b.Total)
})
progress.Start()
defer progress.Stop()
var checkResults []*CheckResult
for _, rule := range rs {
results, err := rule.GetCheckResults(ctx, clusterCtx)
if err != nil {
results = []*CheckResult{{
RuleID: rule.RuleID(),
Category: Failed,
Description: fmt.Sprintf("check result failed: %s", err.Error()),
}}
}
checkResults = append(checkResults, results...)
bar.Incr()
}
return checkResults, nil
}
|
#!/bin/sh
# Licensed Materials - Property of IBM
# 5648-F10 (C) Copyright International Business Machines Corp. 2005
# All Rights Reserved
# US Government Users Restricted Rights - Use, duplication or disclosure
# restricted by GSA ADP Schedule Contract with IBM Corp.
case "$0" in
/*) fullpath=$0;;
*) fullpath=`pwd`/$0;;
esac
installsourcepath=`echo "$fullpath" | sed "s,/\./,/,g; s,/[^/][^/]*/\.\./,/,g; s,//,/,g; s,/[^/]*$,,"`
# fixup symlink if possible
[ -f /bin/pwd ] && installsourcepath=`cd $installsourcepath 2>/dev/null && /bin/pwd`
export LaunchPadContentDirectory=content/unittest/
export LaunchPadSkinDirectory=skins/unittest/
export LaunchPadStartPage=utframeset.html
export LaunchPadLogFilter=SE
$installsourcepath/${1:-Mozilla}.sh
|
<nav>
<ul>
<li><a href="#">Home</a></li>
<li><a href="#">About Us</a></li>
<li><a href="#">Services</a></li>
<li><a href="#">Contact Us</a></li>
</ul>
</nav> |
# Write your solution here
def create_tuple(x: int, y: int, z: int):
list = [x,y,z]
new_tuple = (min(list), max(list), sum(list))
return new_tuple
if __name__ == "__main__":
print(create_tuple(5, 3, -1)) |
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "ruby_money/version"
Gem::Specification.new do |spec|
spec.name = "ruby_money"
spec.version = Money::VERSION
spec.authors = ["<NAME>"]
spec.email = ["<EMAIL>"]
spec.summary = %q{Simple handling of money and currencies.}
spec.description = %q{Make operations and exchange between existent currencies using Money objects.}
spec.homepage = "http://rubygems.org/gems/natonnelier/money"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.16"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
end
|
import pytest
import uvicore
from uvicore.support.dumper import dump, dd
@pytest.mark.asyncio
async def test_select_one(app1):
from app1.models.post import Post
post = await Post.query().include('tags').find(1)
dump(post)
assert post.slug == 'test-post1'
assert [
'linux',
'mac',
'bsd',
'test1',
'test2'
] == [x.name for x in post.tags]
@pytest.mark.asyncio
async def test_select_many(app1):
from app1.models.post import Post
posts = await Post.query().include('tags').get()
dump(posts)
assert len(posts) == 7
assert [
'linux',
'mac',
'bsd',
'test1',
'test2'
] == [x.name for x in posts[0].tags]
assert [
'linux',
'bsd',
] == [x.name for x in posts[1].tags]
assert [
'linux',
'bsd',
'laravel'
] == [x.name for x in posts[6].tags]
@pytest.mark.asyncio
async def test_where(app1):
from app1.models.post import Post
# Remember these children level wheres only filter the parent (posts)
# but all tags for those parents are still shown. Use .filter() to filter children.
posts = await Post.query().include('tags').where('tags.name', 'linux').get()
dump(posts)
# Should filter parent
assert [
'test-post1',
'test-post2',
'test-post7',
] == [x.slug for x in posts]
# But not any children
assert len(posts[0].tags) == 5
assert len(posts[1].tags) == 2
assert len(posts[2].tags) == 3
@pytest.mark.asyncio
async def test_where_through_one_to_many(app1):
#from uvicore.auth.models.user import User
from app1.models.user import User
users = await User.query().include(
'posts', # The One-To-Many the Many-To-Many is going through
'posts.tags', # Many-To-Many
).where('posts.tags.name', 'linux').get()
dump(users)
# Should limit by just 2 users
assert [
'<EMAIL>',
'<EMAIL>',
] == [x.email for x in users]
# But should not filter any children
assert len(users[0].posts) == 2
assert len(users[1].posts) == 1
assert len(users[0].posts[0].tags) == 5
assert len(users[0].posts[1].tags) == 2
assert len(users[1].posts[0].tags) == 3
@pytest.mark.asyncio
async def test_filter(app1):
from app1.models.post import Post
# Filters limit the children records only, never parents.
# So this still shows ALL posts, but for each post, only shows tags that = linux
posts = await Post.query().include('tags').filter('tags.name', 'linux').get()
dump(posts)
assert len(posts) == 7
assert len(posts[0].tags) == 1
assert len(posts[1].tags) == 1
assert len(posts[6].tags) == 1
@pytest.mark.asyncio
async def test_or_filter(app1):
from app1.models.post import Post
# Filters limit the children records only, never parents.
# So this still shows ALL posts, but for each post, only shows tags that = linux OR mac
posts = await Post.query().include('tags').or_filter([
('tags.name', 'linux'),
('tags.name', 'mac'),
]).get()
dump(posts)
assert len(posts) == 7
assert len(posts[0].tags) == 2
assert len(posts[1].tags) == 1
assert len(posts[6].tags) == 1
|
<reponame>bbennett7/sinatra-bucketlist-project
class Experience < ActiveRecord::Base
belongs_to :user
belongs_to :location
def slug
self.name.downcase.strip.gsub(/[^0-9a-z\-\b]/, "-")
end
def self.find_all_by_slug(slug)
experience_matches = Experience.select{|experience| experience.slug == slug}
end
end
|
<reponame>eengineergz/Lambda<gh_stars>0
import React from 'react';
import { axiosWithAuth } from '../utils/axiosWithAuth';
import DisplayAPI from './DisplayAPI';
import ContactForm from './ContactForm';
class FriendsList extends React.Component {
state = {
FriendsList: [
{
name: 'Joe',
age: 24,
email: '<EMAIL>'
},
{
name: 'Erica',
age: 38,
email: '<EMAIL>'
}
]
};
componentDidMount() {
this.getData();
}
getData = () => {
axiosWithAuth()
.get('http://localhost:5000/api/friends')
.then(res => {
console.log(res.data);
this.setState({
FriendsList: res.data
});
})
.catch(err => console.log(err));
};
render() {
return (
<div className="gas-prices">
{this.props.fetchingData && (
<div className="key spinner">
<p>Loading Data</p>
</div>
)}
{console.log('FriendsList.js Friends Response = ' + this.state.FriendsList)}
<ContactForm />
{this.state.FriendsList.map(friend => (
<DisplayAPI
key={friend.id}
name={friend.name}
age={friend.age}
email={friend.email}
id={friend.id}
/>
))}
</div>
);
}
}
export default FriendsList;
|
<reponame>wenfang6/XSQL
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.druid
import org.joda.time.{DateTime, Interval}
import org.json4s.JsonAST._
import org.json4s.JsonDSL._
case class TimeSeriesQuery(
source: String,
interval: Interval,
descending: String = "false",
granularity: Granularity,
aggregate: Seq[Aggregation],
postAggregate: Seq[PostAggregation] = Nil,
filter: QueryFilter = QueryFilter.All) {
val g: JValue = granularity match {
case SimpleGranularity(name) => name
case p: PeriodGranularity => p.toJson
case d: DurationGranularity => d.toJson
}
def toJson: JValue = {
JObject(
"queryType" -> "timeseries",
"dataSource" -> source,
"descending" -> descending,
"granularity" -> g,
"aggregations" -> aggregate.map(_.toJson),
"postAggregations" -> postAggregate.map(_.toJson),
"intervals" -> Time.intervalToString(interval),
"filter" -> filter.toJson)
}
}
case class TimeSeriesResponse(data: Seq[(DateTime, Map[String, Any])])
object TimeSeriesResponse {
implicit val formats = org.json4s.DefaultFormats
def parse(js: JValue): TimeSeriesResponse = {
js match {
case JArray(results) =>
val data = results.map { r =>
val time = Time.parse((r \ "timestamp").extract[String])
val values = (r \ "result").asInstanceOf[JObject].values
time -> values
}
TimeSeriesResponse(data)
case JNothing =>
TimeSeriesResponse(null)
case err @ _ =>
throw new IllegalArgumentException("Invalid time series response: " + err)
}
}
}
|
<filename>openshift/openshift/src/main/java/org/arquillian/cube/openshift/impl/fabric8/model/F8DeploymentConfig.java
/*
* JBoss, Home of Professional Open Source
* Copyright 2016 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.arquillian.cube.openshift.impl.fabric8.model;
import java.util.Map;
import org.arquillian.cube.openshift.api.model.DeploymentConfig;
/**
* F8DeploymentConfig
* <p/>
* Wraps a F8 deployment config object.
*
* @author <NAME>
*/
public class F8DeploymentConfig implements DeploymentConfig {
private final io.fabric8.openshift.api.model.v3_1.DeploymentConfig delegate;
/**
* Create a new F8DeploymentConfig.
*/
public F8DeploymentConfig(final io.fabric8.openshift.api.model.v3_1.DeploymentConfig delegate) {
this.delegate = delegate;
}
@Override
public String getName() {
return delegate.getMetadata().getName();
}
@Override
public Integer getReplicas() {
return delegate.getSpec().getReplicas();
}
@Override
public Map<String, String> getSelector() {
return delegate.getSpec().getSelector();
}
@Override
public String toString() {
return String.format("DeploymentConfig[name=%s,replicas=%s,selector=%s]", getName(), getReplicas(), getSelector());
}
}
|
package com.spark
import com.typesafe.config.{Config, ConfigFactory}
final class Settings(conf: Option[Config] = None) extends Serializable {
val AppName = "zentiment-miner"
// hadoop config
val sparkMaster = "local"
// spark streaming context granularity in milliseconds
val granularity = 1000
}
|
<filename>src/animation/tracks/types.ts<gh_stars>100-1000
import type { FrameInfo } from '../Animator';
import type Pose from '../../armature/Pose';
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
export type Lerp = typeof ELerp[ keyof typeof ELerp ];
export const ELerp = {
Step : 0,
Linear : 1,
Cubic : 2,
} as const;
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
export interface ITrack{
name : string;
timeStampIndex : number;
values : Float32Array;
boneIndex : number;
fnLerp : fnInterp<any>
apply( pose: Pose, fi: FrameInfo ): this;
setInterpolation( i: Lerp ): this;
}
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
export type fnInterp<T> = ( track: ITrack, fi: FrameInfo, out: T ) => T; |
<filename>task_opts_linux_test.go
/*
Copyright The containerd Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package containerd
import (
"context"
"testing"
"github.com/containerd/containerd/runtime/linux/runctypes"
)
func TestWithNoNewKeyringAddsNoNewKeyringToOptions(t *testing.T) {
var taskInfo TaskInfo
var ctx context.Context
var client Client
err := WithNoNewKeyring(ctx, &client, &taskInfo)
if err != nil {
t.Fatal(err)
}
opts := taskInfo.Options.(*runctypes.CreateOptions)
if !opts.NoNewKeyring {
t.Fatal("NoNewKeyring set on WithNoNewKeyring")
}
}
func TestWithNoNewKeyringDoesNotOverwriteOtherOptions(t *testing.T) {
var taskInfo TaskInfo
var ctx context.Context
var client Client
taskInfo.Options = &runctypes.CreateOptions{NoPivotRoot: true}
err := WithNoNewKeyring(ctx, &client, &taskInfo)
if err != nil {
t.Fatal(err)
}
opts := taskInfo.Options.(*runctypes.CreateOptions)
if !opts.NoPivotRoot {
t.Fatal("WithNoNewKeyring overwrote other options")
}
}
|
<filename>Examples/CustomMachine/rbm.py
# Copyright 2019 The Simons Foundation, Inc. - All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import netket
import numpy as _np
__all__ = ["PyRbm"]
class PyRbm(netket.machine.CxxMachine):
"""
__Do not use me in production code!__
A proof of concept implementation of a complex-valued RBM in pure Python.
This is an example of how to subclass `CxxMachine` so that the machine will
be usable with NetKet's C++ core.
This class can be used as a drop-in replacement for `RbmSpin`.
"""
def __init__(
self, hilbert, alpha=None, use_visible_bias=True, use_hidden_bias=True
):
r"""Constructs a new RBM.
Args:
hilbert: Hilbert space.
alpha: `alpha * hilbert.size` is the number of hidden spins.
use_visible_bias: specifies whether to use a bias for visible
spins.
use_hidden_bias: specifies whether to use a bias for hidden spins.
"""
# NOTE: The following call to __init__ is important!
super(PyRbm, self).__init__(hilbert)
n = hilbert.size
if alpha < 0:
raise ValueError("`alpha` should be non-negative")
m = int(round(alpha * n))
self._w = _np.empty([m, n], dtype=_np.complex128)
self._a = _np.empty(n, dtype=_np.complex128) if use_visible_bias else None
self._b = _np.empty(m, dtype=_np.complex128) if use_hidden_bias else None
def _n_par(self):
r"""Returns the number of parameters in the machine. We just sum the
sizes of all the tensors we hold.
"""
return (
self._w.size
+ (self._a.size if self._a is not None else 0)
+ (self._b.size if self._b is not None else 0)
)
def _log_val(self, x, out):
r"""Computes the logarithm of the wave function given a spin
configuration ``x``.
"""
r = _np.dot(x, self._w.T)
if self._b is not None:
r += self._b
_np.sum(PyRbm._log_cosh(r), axis=-1, out=out)
if self._a is not None:
out += _np.dot(x, self._a)
# Officially, we should return
# self._w.shape[0] * 0.6931471805599453 + r
# but the C++ implementation ignores the "constant factor"
def _der_log(self, x, out):
batch_size = x.shape[0]
i = 0
if self._a is not None:
out[:, i : i + x.shape[1]] = x
i += x.shape[1]
r = _np.dot(x, self._w.T)
if self._b is not None:
r += self._b
_np.tanh(r, out=r)
if self._b is not None:
out[:, i : i + self._b.shape[0]] = r
i += self._b.shape[0]
t = out[:, i : i + self._w.size]
t.shape = (batch_size, self._w.shape[0], self._w.shape[1])
_np.einsum("ij,il->ijl", r, x, out=t)
def _is_holomorphic(self):
r"""Complex valued RBM a holomorphic function.
"""
return True
def state_dict(self):
from collections import OrderedDict
return OrderedDict(
[("a", self._a.view()), ("b", self._b.view()), ("w", self._w.view())]
)
@staticmethod
def _log_cosh(x):
# TODO: Handle big numbers properly
return _np.log(_np.cosh(x))
|
/**
* Typeguard to check if a variable is a string or not.
*/
export function isString(x: any): x is string {
return typeof x === 'string';
}
|
<reponame>SoutheyLab/genericpipe<gh_stars>1-10
'''
Initialisation and use of concurrency-friendly logging facility.
'''
import ruffus.cmdline as cmdline
class Logger(object):
'''Concurrency friendly logging facility'''
def __init__(self, prog_name, log_file, verbosity):
proxy, mutex = cmdline.setup_logging(__name__, log_file, verbosity)
self.proxy = proxy
self.mutex = mutex
def info(self, message):
'''Display an informational message to the log file'''
with self.mutex:
self.proxy.info(message)
|
/*
* Copyright 2014 <NAME> (http://frugalmechanic.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fm.sbt.s3
import com.amazonaws.AmazonServiceException
import com.amazonaws.services.s3.model.{ListObjectsRequest, ObjectMetadata}
import fm.sbt.S3URLHandler
import java.io.InputStream
import java.net.{HttpURLConnection, URL}
import java.nio.charset.StandardCharsets
import java.time.ZoneOffset
import java.time.format.DateTimeFormatter
import java.util.Date
object S3URLConnection {
private val s3: S3URLHandler = new S3URLHandler()
}
/**
* Implements an HttpURLConnection for compatibility with Coursier (https://github.com/coursier/coursier)
*/
final class S3URLConnection(url: URL) extends HttpURLConnection(url) {
import S3URLConnection.s3
private[this] var response: Option[S3Response] = None
def connect(): Unit = {
val (client, bucket, key) = s3.getClientBucketAndKey(url)
val res: Option[S3Response] = try {
getRequestMethod.toLowerCase match {
case "head" =>
url.getPath match {
case p if p.endsWith("/maven-metadata.xml") || p.endsWith("/maven-metadata.xml.sha1") =>
val meta = new ObjectMetadata()
meta.setLastModified(new Date)
Option(HEADResponse(meta))
case _ => Option(HEADResponse(client.getObjectMetadata(bucket, key)))
}
case "get" =>
url.getPath match {
// Directory Listing (HTML)
case p if p.endsWith("/") =>
// "/" as a delimiter to be returned only entries in the first level (no recursion),
// with (pseudo) sub-directories indeed ending with a "/"
val req = new ListObjectsRequest(bucket, key, null, "/", null)
Option(GETListHTMLResponse(key, client.listObjects(req)))
// Version info
case p if p.endsWith("/maven-metadata.xml.sha1") =>
S3MavenMetadata.getSha1(client, bucket, key).map{ contents =>
TextResponse(contents.getBytes(StandardCharsets.UTF_8), new Date)
}
case p if p.endsWith("/maven-metadata.xml") =>
S3MavenMetadata.getXml(client, bucket, key).map{ contents =>
XmlResponse(contents.getBytes(StandardCharsets.UTF_8), new Date)
}
case _ => Option(GETResponse(client.getObject(bucket, key)))
}
case "post" => ???
case "put" => ???
case _ => throw new IllegalArgumentException("Invalid request method: "+getRequestMethod)
}
} catch {
case ex: AmazonServiceException =>
responseCode = ex.getStatusCode
None
}
response = if (res.isDefined) res else response
responseCode = if (response.isEmpty) 404 else 200
// Also set the responseMessage (an HttpURLConnection field) for better compatibility
responseMessage = statusMessageForCode(responseCode)
connected = true
}
def usingProxy(): Boolean = Option(s3.getProxyConfiguration.getProxyHost).exists{ _ != "" }
override def getInputStream: InputStream = {
if (!connected) connect()
response.flatMap{ _.inputStream }.orNull
}
override def getHeaderField(n: Int): String = {
// n == 0 means you want the HTTP Status Line
// This is called from HttpURLConnection.getResponseCode()
if (n == 0 && responseCode != -1) {
s"HTTP/1.0 $responseCode ${statusMessageForCode(responseCode)}"
} else {
super.getHeaderField(n)
}
}
override def getHeaderField(field: String): String = {
if (!connected) connect()
field.toLowerCase match {
case "content-type" => response.map{ _.meta.getContentType }.orNull
case "content-encoding" => response.map{ _.meta.getContentEncoding }.orNull
case "content-length" => response.map{ _.meta.getContentLength().toString }.orNull
case "last-modified" => response.map{ _.meta.getLastModified }.map{ _.toInstant.atOffset(ZoneOffset.UTC) }.map{ DateTimeFormatter.RFC_1123_DATE_TIME.format }.orNull
case _ => null // Should return null if no value for header
}
}
override def disconnect(): Unit = {
response.foreach{ _.close() }
}
private def statusMessageForCode(code: Int): String = {
// I'm not sure if we care about any codes besides 200 and 404
code match {
case 200 => "OK"
case 404 => "Not Found"
case _ => "DUMMY"
}
}
}
|
#!/bin/bash
# Copyright 2016 Nitor Creations Oy
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [ "$_ARGCOMPLETE" ]; then
# Handle command completion executions
unset _ARGCOMPLETE
source $(n-include autocomplete-helpers.sh)
case $COMP_CWORD in
2)
if [ "$COMP_INDEX" = "$COMP_CWORD" ]; then
DRY="-d "
fi
compgen -W "$DRY-h $(get_stack_dirs)" -- $COMP_CUR
;;
3)
compgen -W "$(get_cdk $COMP_PREV)" -- $COMP_CUR
;;
*)
exit 1
;;
esac
exit 0
fi
usage() {
echo "usage: ndt deploy-cdk [-d] [-h] component cdk-name" >&2
echo "" >&2
echo "Exports ndt parameters into component/cdk-name/variables.json, runs pre_deploy.sh in the" >&2
echo "cdk project and runs cdk diff; cdk deploy for the same" >&2
echo "" >&2
echo "positional arguments:" >&2
echo " component the component directory where the cdk directory is" >&2
echo " cdk-name the name of the cdk directory that has the template" >&2
echo " For example for lambda/cdk-sender/bin/MyProject.ts" >&2
echo " you would give sender" >&2
echo "" >&2
echo "optional arguments:" >&2
echo " -d, --dryrun dry-run - do only parameter expansion and pre_deploy.sh and cdk diff" >&2
echo " -h, --help show this help message and exit" >&2
if "$@"; then
echo "" >&2
echo "$@" >&2
fi
exit 1
}
if [ "$1" = "--help" -o "$1" = "-h" ]; then
usage
fi
if [ "$1" = "-d" -o "$1" = "--dryrun" ]; then
DRYRUN=1
shift
fi
die () {
echo "$1" >&2
usage
}
set -xe
component="$1" ; shift
[ "${component}" ] || die "You must give the component name as argument"
cdk="$1"; shift
[ "${cdk}" ] || die "You must give the cdk name as argument"
TSTAMP=$(date +%Y%m%d%H%M%S)
if [ -z "$BUILD_NUMBER" ]; then
BUILD_NUMBER=$TSTAMP
else
BUILD_NUMBER=$(printf "%04d\n" $BUILD_NUMBER)
fi
eval "$(ndt load-parameters "$component" -c "$cdk" -e -r)"
#If assume-deploy-role.sh is on the path, run it to assume the appropriate role for deployment
if [ -n "$DEPLOY_ROLE_ARN" ] && [ -z "$AWS_SESSION_TOKEN" ]; then
eval $(ndt assume-role $DEPLOY_ROLE_ARN)
elif which assume-deploy-role.sh > /dev/null && [ -z "$AWS_SESSION_TOKEN" ]; then
eval $(assume-deploy-role.sh)
fi
ndt load-parameters "$component" -c "$cdk" -j -r > "$component/cdk-$ORIG_CDK_NAME/variables.json"
cd "$component/cdk-$ORIG_CDK_NAME"
if [ -x "./pre_deploy.sh" ]; then
"./pre_deploy.sh"
fi
cdk synth
cdk diff ||:
if [ -n "$DRYRUN" ]; then
echo "Dry run - quitting"
exit 0
fi
set -e
cdk deploy
if [ -x "./post_deploy.sh" ]; then
"./post_deploy.sh"
fi
|
/*
* Copyright 2015 Realm Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.realm;
/**
* This class contains all Realm attributes for a Realm field.
* These will usually match the annotations found in the {@code io.realm.annotation} package. See the relevant
* annotation for further information on each modifier.
*/
public enum FieldAttribute {
/**
* Marks a field as indexed.
*
* @see io.realm.annotations.Index
*/
INDEXED,
/**
* Marks a field as a primary key. This also implicitly mark it as {@link #INDEXED}.
*
* @see io.realm.annotations.PrimaryKey
*/
PRIMARY_KEY,
/**
* Marks a field as explicitly not allowing null values. The default behavior for allowing {@code
* null} depends on the type of the field.
*
* @see io.realm.annotations.Required
* @see RealmObjectSchema#setRequired(String, boolean)
*/
REQUIRED
}
|
//*********************************************************************************
//
// Copyright(c) 2016-2017 <NAME> All Rights Reserved
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//*********************************************************************************
package cmu.xprize.comp_questions;
import android.graphics.PointF;
import android.os.Handler;
import android.text.Html;
import android.text.Layout;
import android.util.Log;
import android.widget.TextView;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import cmu.xprize.util.CPersonaObservable;
import cmu.xprize.util.ILoadableObject;
import cmu.xprize.util.IScope;
import cmu.xprize.util.JSON_Helper;
import cmu.xprize.util.Num2Word;
import cmu.xprize.util.TCONST;
import edu.cmu.xprize.listener.ListenerBase;
/**
* This view manager emulates the same student interaction that was present in the
* initial release of the MARi reading tutor.
*/
public class CQn_ViewManagerMari implements ICQn_ViewManager, ILoadableObject {
private ListenerBase mListener;
private IVManListener mOwner;
private TextView mPageText;
// state for the current sentence
private int currentIndex = -1; // current sentence index in storyName, -1 if unset
private int storyPart = 0;
private boolean endOfStory = false;
private int completeSentenceIndex = 0;
private String sentenceWords[]; // current sentence words to hear
private int expectedWordIndex = 0; // index of expected next word in sentence
private static int[] creditLevel = null; // per-word credit levelFolder according to current hyp
private boolean changingSentence = false;
private ArrayList<String> sentences = null; //list of sentences of the given passage
private String currentSentence; //currently displayed sentence that need to be recognized
private String completedSentencesFmtd = "";
private String completedSentences = "";
private String assetLocation;
private IVManListener _publishListener;
// json loadable
public String parser;
public String data[];
public CMari_Data rhymes[];
static final String TAG = "CQn_ViewManagerMari";
public CQn_ViewManagerMari(CQn_Component parent, ListenerBase listener) {
mPageText = (TextView) parent.findViewById(R.id.SstoryText);
mListener = listener;
}
@Override
public void initStory(IVManListener owner, String assetPath, String location) {
}
@Override
public void onDestroy() {
}
@Override
public void startStory() {
}
public void setSpeakButton(String command) {
}
public void setPageFlipButton(String command) {
}
@Override
public void seekToPage(int pageIndex) {
}
@Override
public void nextPage() {
}
@Override
public void prevPage() {
}
@Override
public void seekToParagraph(int paraIndex) {
}
@Override
public void nextPara() {
}
@Override
public void prevPara() {
}
@Override
public void seekToLine(int lineIndex) {
}
@Override
public void echoLine() {
}
@Override
public void parrotLine() {
}
@Override
public void nextLine() {
}
@Override
public void prevLine() {
}
@Override
public void seekToWord(int wordIndex) {
}
public void setPublishListener(IVManListener publishListener) {
_publishListener = publishListener;
}
@Override
public void nextWord() {
}
@Override
public void prevWord() {
}
@Override
public void setHighLight(String highlight, boolean update) {
}
public boolean loadStory(String storyURL) {
return true;
}
@Override
public void setRandomGenericQuestion(){
}
@Override
public void setClozeQuestion(){
}
@Override
public void decideToPlayGenericQuestion() {
}
@Override
public void genericQuestions(){
}
@Override
public void displayGenericQuestion(){
}
@Override
public void setPictureMatch(){
}
@Override
public void setClozePage(){
}
@Override
public void displayPictureMatching(){
}
@Override
public void displayClozeQuestion(){
}
@Override
public void hasClozeDistractor(){
}
@Override
public void hasQuestion(){
}
@Override
public void enableImageButtons(){
}
@Override
public void resetImageButtons(){
}
@Override
public void showImageButtons(){
}
@Override
public void hideImageButtons(){
}
@Override
public void disableImageButtons(){
}
@Override
public void enableClozeButtons(){
}
@Override
public void resetClozeButtons(){
}
@Override
public void showClozeButtons(){
}
@Override
public void hideClozeButtons(){
}
@Override
public void disableClozeButtons(){
}
@Override
public void showClozeWordInBlank(){
}
@Override
public void hideClozeWordInBlank(){
}
@Override
public void publishClozeWord(){
}
@Override
public void highlightClozeWord(){
}
@Override
public void undoHighlightClozeWord(){
}
@Override
public void playClozeSentence(){
}
public void execCommand(String command, Object target ) {
long delay = 0;
switch(command) {
}
}
@Override
public void onUpdate(ListenerBase.HeardWord[] heardWords, boolean finalResult) {
String logString = "";
for (int i = 0; i < heardWords.length; i++) {
logString += heardWords[i].hypWord.toLowerCase() + ":" + heardWords[i].iSentenceWord + " | ";
}
Log.i("ASR", "New HypSet: " + logString);
// TODO: Change to setPauseRecognizer to flush the queue should obviate the need for
// changingSentence test. Validate this is the case.
//
// The recongnizer runs asynchronously so ensure we don't process any
// hypotheses while we are changing sentences otherwise it can skip a sentence.
// This is because nextSentence is also called asynchronously
//
if(changingSentence || finalResult) {
Log.d("ASR", "Ignoring Hypothesis");
return;
}
updateSentence(heardWords); // update current sentence state and redraw
// move on if all words in current sentence have been read
if(sentenceComplete()) {
changingSentence = true;
mListener.setPauseListener(true);
// schedule advance after short delay to allow time to see last word credited on screen
new Handler().postDelayed(new Runnable() {
public void run() {
//nextSentence(mOwner, null);
changingSentence = false;
}
}, 100);
}
}
@Override
public void onUpdate(String[] heardWords, boolean finalResult) {
}
@Override
public void continueListening() {
}
/**
* @param heardWords Update the sentence credit levelFolder with the credit levelFolder of the heard words
*/
private void updateSentence(ListenerBase.HeardWord[] heardWords) {
Log.d("ASR", "New Hypothesis Set:");
if (heardWords.length >= 1) {
// Reset partial credit levelFolder of sentence words
//
for (int i = 0; i < creditLevel.length; i++) {
// don't touch words with permanent credit
if (creditLevel[i] != ListenerBase.HeardWord.MATCH_EXACT)
creditLevel[i] = ListenerBase.HeardWord.MATCH_UNKNOWN;
}
for (ListenerBase.HeardWord hw : heardWords) {
Log.d("ASR", "Heard:" + hw.hypWord);
// assign the highest credit found among all hypothesis words
//
if (hw.matchLevel >= creditLevel[hw.iSentenceWord]) {
creditLevel[hw.iSentenceWord] = hw.matchLevel;
}
}
expectedWordIndex = getFirstUncreditedWord();
// Tell the listener when to stop matching words. We don't want to match words
// past the current expected word or they will be highlighted
// This is a MARi induced constraint
// TODO: make it so we don't need this - use matched past the next word to flag
// a missed word
//
mListener.updateNextWordIndex(expectedWordIndex);
// Update the sentence text display to show credit, expected word
//
UpdateSentenceDisplay();
}
}
/**
* Update the displayed sentence based on the newly calculated credit levelFolder
*/
private void UpdateSentenceDisplay() {
String fmtSentence = "";
String[] words = currentSentence.split("\\s+");
for (int i = 0; i < words.length; i++) {
String styledWord = words[i]; // default plain
// show credit status with color
if (creditLevel[i] == ListenerBase.HeardWord.MATCH_EXACT) { // match found, but not credited
styledWord = "<font color='#00B600'>" + styledWord + "</font>";
} else if (creditLevel[i] == ListenerBase.HeardWord.MATCH_MISCUE) { // wrongly read
styledWord = "<font color='red'>" + styledWord + "</font>";
} else if (creditLevel[i] == ListenerBase.HeardWord.MATCH_TRUNCATION) { // heard only half the word
} else {
}
if (i == expectedWordIndex) {// style the next expected word
styledWord.replace("<u>", "");
styledWord.replace("</u>", "");
styledWord = "<u>" + styledWord + "</u>";
// Publish the word to the component so it can set a scritable varable
//_publishListener.publishTargetWord(styledWord);
}
fmtSentence += styledWord + " ";
}
fmtSentence += "<br>";
mPageText.setText(Html.fromHtml(completedSentencesFmtd + fmtSentence));
updateCompletedSentence();
broadcastActiveTextPos(mPageText, words);
}
/**
* Broadcast current target work position for persona eye tracking.
*
* Notes:
* XML storyName source text must be entered without extra space or linebreaks.
*
* <selectlevel levelFolder="1">
* <storyName storyName="1">
* <part part="1">Uninterrupted text</part>
* </storyName>
*
* @param text
* @param words
* @return
*/
private PointF broadcastActiveTextPos(TextView text, String[] words){
PointF point = new PointF(0,0);
int charPos = 0;
int maxPos;
if(expectedWordIndex >= 0) {
for (int i1 = 0; i1 < expectedWordIndex; i1++) {
charPos += words[i1].length() + 1;
}
charPos += words[expectedWordIndex].length()-1;
charPos = completedSentences.length() + charPos;
// Note that sending a value greater than maxPos will corrupt the textView
//
maxPos = text.getText().length();
charPos = (charPos > maxPos) ? maxPos : charPos;
try {
Layout layout = text.getLayout();
point.x = layout.getPrimaryHorizontal(charPos);
int y = layout.getLineForOffset(charPos);
point.y = layout.getLineBottom(y);
} catch (Exception exception) {
Log.d(TAG, "getActiveTextPos: " + exception.toString());
}
CPersonaObservable.broadcastLocation(text, TCONST.LOOKAT, point);
}
return point;
}
/**
* Get the first uncredited word of the current sentence
*
* @return index of uncredited word
*/
private int getFirstUncreditedWord() {
int result = 0;
for (int i = 0; i < creditLevel.length; i++) {
if (creditLevel[i] != ListenerBase.HeardWord.MATCH_EXACT) {
result = i;
break;
}
}
return result;
}
/**
* to make auto scroll for the sentences
*/
public void updateCompletedSentence() {
int lastVisibleLineNumber = 0;
int totalNoOfLines = 1;
int height = mPageText.getHeight();
int scrollY = mPageText.getScrollY();
Layout layout = mPageText.getLayout();
if(layout != null) {
lastVisibleLineNumber = layout.getLineForVertical(scrollY + height);
totalNoOfLines = mPageText.getLineCount() - 1;
}
if (lastVisibleLineNumber < totalNoOfLines) {
completeSentenceIndex = currentIndex;
completedSentencesFmtd = "";
completedSentences = "";
}
}
/**
*
* @param index
* @return
*/
public boolean isWordCredited(int index) {
return index >= 0 &&
(index == 0 || (creditLevel[index - 1] == ListenerBase.HeardWord.MATCH_EXACT));
}
/**
* Get number of exact matches
* @return
*/
public int getNumWordsCredited() {
int n = 0;
for (int cl : creditLevel) {
if (cl == ListenerBase.HeardWord.MATCH_EXACT)
n += 1;
}
return n;
}
public boolean sentenceComplete() {
return getNumWordsCredited() >= sentenceWords.length;
}
@Override
public boolean endOfData() {
return endOfStory;
}
@Override
public void onUpdate(String[] heardWords) {
}
// /**
// * Show the next available sentence to the user
// */
// @Override
// public void nextSentence(IVManListener callback, String assetPath) {
//
// if(mListener != null)
// mListener.deleteLogFiles();
//
// mOwner = callback;
// switchSentence(currentIndex + 1); // for now just loop around single storyName
// }
/**
* Initialize mListener with the specified sentence
*
* @param index index of the sentence that needs to be initialized
*/
private boolean switchSentence(int index) {
boolean result = true;
// We've exhausted all the sentences in the storyName
if (index == sentences.size()) {
Log.d("ASR", "End of Story");
// Kill off the mListener.
// When this returns the recognizerThread is dead and the mic
// has been disconnected.
if (mListener != null)
mListener.stop();
endOfStory = true;
result = false;
}
else {
if (index > 0) { // to set grey color for the finished sentence
completedSentencesFmtd = "<font color='grey'>";
completedSentences = "";
for (int i = completeSentenceIndex; i < index; i++) {
completedSentences += sentences.get(i);
completedSentences += ". ";
}
completedSentencesFmtd += completedSentences;
completedSentencesFmtd += "</font>";
}
currentIndex = index % sentences.size();
currentSentence = sentences.get(currentIndex).trim() + ".";
// get array or words to hear for new sentence
sentenceWords = ListenerBase.textToWords(currentSentence);
// reset all aggregate hyp info for new sentence
// fills default value 0 = MATCH_UNKNOWN
creditLevel = new int[sentenceWords.length];
expectedWordIndex = 0;
// show sentence and start listening for it
// If we are starting from the beginning of the sentence then end any current sentence
if (mListener != null) {
mListener.reInitializeListener(true);
mListener.listenFor(sentenceWords, 0);
mListener.setPauseListener(false);
}
// _publishListener.publishTargetSentence(currentSentence);
// _publishListener.publishTargetWordIndex(expectedWordIndex);
UpdateSentenceDisplay();
}
return result;
}
//************ Serialization
/**
* Load the data source
*
* @param jsonData
*/
@Override
public void loadJSON(JSONObject jsonData, IScope scope) {
JSON_Helper.parseSelf(jsonData, this, CClassMap.classMap, scope);
Pattern pattern = Pattern.compile("[0-9]{2,}(,*\\d*)");
Matcher matcher = pattern.matcher(data[storyPart]);
StringBuffer sb = new StringBuffer();
while (matcher.find()) {
int number = Integer.parseInt(matcher.group(0).replaceAll(",", ""));
matcher.appendReplacement(sb, Num2Word.transform(number, "LANG_EN"));
}
matcher.appendTail(sb);
System.out.println(sb.toString());
sentences = new ArrayList<String>(Arrays.asList(sb.toString().split("\\.")));
}
@Override
public boolean isClozeMode() { return false;}
@Override
public boolean isGenMode() { return false;}
@Override
public boolean isPicMode() {return false;}
}
|
<filename>consensus/testing.go
package consensus
import (
"context"
"github.com/filecoin-project/go-filecoin/actor"
"github.com/filecoin-project/go-filecoin/address"
"github.com/filecoin-project/go-filecoin/state"
"github.com/filecoin-project/go-filecoin/types"
"gx/ipfs/QmRu7tiRnFk9mMPpVECQTBQJqXtmG132jJxA1w9A7TtpBz/go-ipfs-blockstore"
"gx/ipfs/QmPVkJMTeRC6iBByPWdrRkD3BE5UXsj5HPzb4kPqL186mS/testify/require"
)
// TestView is an implementation of stateView used for testing the chain
// manager. It provides a consistent view that the storage market
// that returns 1 for storage total and 1 for any miner storage.
type TestView struct{}
var _ PowerTableView = &TestView{}
// Total always returns 1.
func (tv *TestView) Total(ctx context.Context, st state.Tree, bstore blockstore.Blockstore) (uint64, error) {
return uint64(1), nil
}
// Miner always returns 1.
func (tv *TestView) Miner(ctx context.Context, st state.Tree, bstore blockstore.Blockstore, mAddr address.Address) (uint64, error) {
return uint64(1), nil
}
// HasPower always returns true.
func (tv *TestView) HasPower(ctx context.Context, st state.Tree, bstore blockstore.Blockstore, mAddr address.Address) bool {
return true
}
// RequireNewTipSet instantiates and returns a new tipset of the given blocks
// and requires that the setup validation succeed.
func RequireNewTipSet(require *require.Assertions, blks ...*types.Block) types.TipSet {
ts, err := types.NewTipSet(blks...)
require.NoError(err)
return ts
}
// RequireTipSetAdd adds a block to the provided tipset and requires that this
// does not error.
func RequireTipSetAdd(require *require.Assertions, blk *types.Block, ts types.TipSet) {
err := ts.AddBlock(blk)
require.NoError(err)
}
// TestPowerTableView is an implementation of the powertable view used for testing mining
// wherein each miner has totalPower/minerPower power.
type TestPowerTableView struct{ minerPower, totalPower uint64 }
// NewTestPowerTableView creates a test power view with the given total power
func NewTestPowerTableView(minerPower uint64, totalPower uint64) *TestPowerTableView {
return &TestPowerTableView{minerPower: minerPower, totalPower: totalPower}
}
// Total always returns value that was supplied to NewTestPowerTableView.
func (tv *TestPowerTableView) Total(ctx context.Context, st state.Tree, bstore blockstore.Blockstore) (uint64, error) {
return tv.totalPower, nil
}
// Miner always returns value that was supplied to NewTestPowerTableView.
func (tv *TestPowerTableView) Miner(ctx context.Context, st state.Tree, bstore blockstore.Blockstore, mAddr address.Address) (uint64, error) {
return tv.minerPower, nil
}
// HasPower always returns true.
func (tv *TestPowerTableView) HasPower(ctx context.Context, st state.Tree, bstore blockstore.Blockstore, mAddr address.Address) bool {
return true
}
// TestSignedMessageValidator is a validator that doesn't validate to simplify message creation in tests.
type TestSignedMessageValidator struct{}
var _ SignedMessageValidator = (*TestSignedMessageValidator)(nil)
// Validate always returns nil
func (tsmv *TestSignedMessageValidator) Validate(ctx context.Context, msg *types.SignedMessage, fromActor *actor.Actor) error {
return nil
}
// TestBlockRewarder is a rewarder that doesn't actually add any rewards to simplify state tracking in tests
type TestBlockRewarder struct{}
var _ BlockRewarder = (*TestBlockRewarder)(nil)
// BlockReward is a noop
func (tbr *TestBlockRewarder) BlockReward(ctx context.Context, st state.Tree, minerAddr address.Address) error {
// do nothing to keep state root the same
return nil
}
// GasReward is a noop
func (tbr *TestBlockRewarder) GasReward(ctx context.Context, st state.Tree, minerAddr address.Address, msg *types.SignedMessage, gas *types.AttoFIL) error {
// do nothing to keep state root the same
return nil
}
// NewTestProcessor creates a processor with a test validator and test rewarder
func NewTestProcessor() *DefaultProcessor {
return &DefaultProcessor{
signedMessageValidator: &TestSignedMessageValidator{},
blockRewarder: &TestBlockRewarder{},
}
}
|
package io.github.rcarlosdasilva.weixin.model.request.common;
import io.github.rcarlosdasilva.weixin.common.ApiAddress;
import io.github.rcarlosdasilva.weixin.model.request.base.BasicWeixinRequest;
/**
* 微信服务器IP列表请求模型
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
public class WeixinServerIpsRequest extends BasicWeixinRequest {
public WeixinServerIpsRequest() {
this.path = ApiAddress.URL_CERTIFICATE_SERVER_IP;
}
}
|
set -eux
cd real_example && Rscript real_example.R && cd ..
Rscript gene_frequency_distribution_figure.R
|
#!/bin/bash
# ddev-webserver healthcheck
set -eo pipefail
sleeptime=59
# Make sure that both phpstatus, mounted code NOT mailhog
# (mailhog is excluded on hardened/prod)
# are working.
# Since docker doesn't provide a lazy period for startup,
# we track health. If the last check showed healthy
# as determined by existence of /tmp/healthy, then
# sleep at startup. This requires the timeout to be set
# higher than the sleeptime used here.
if [ -f /tmp/healthy ]; then
printf "container was previously healthy, so sleeping ${sleeptime} seconds before continuing healthcheck... "
sleep ${sleeptime}
fi
phpstatus="false"
htmlaccess="false"
if curl --fail -s 127.0.0.1/phpstatus >/dev/null ; then
phpstatus="true"
printf "phpstatus: OK "
else
printf "phpstatus: FAILED "
fi
if ls /var/www/html >/dev/null; then
htmlaccess="true"
printf "/var/www/html: OK "
else
printf "/var/www/html: FAILED"
fi
if [ "${phpstatus}" = "true" ] && [ "${htmlaccess}" = "true" ]; then
touch /tmp/healthy
exit 0
fi
rm -f /tmp/healthy
exit 1
|
def count_ways(amount, denom):
# create an array to store results of subproblems
table = [0]*(amount + 1)
# base case (If given value is 0)
table[0] = 1
# pick all coins one by one and update table[]
# values after the index greater than or equal
# to the value of the picked coin
for den in denom:
for i in range(den,amount + 1):
table[i] += table[i-den]
return table[amount]
# Driver code
amount = 10
denom = [1, 2, 5]
print(count_ways(amount, denom)) |
<filename>transacao/src/test/java/br/com/orangetalents/eventotransacao/model/TransacaoTest.java
package br.com.orangetalents.eventotransacao.model;
import br.com.orangetalents.consultatransacao.view.TransacaoResponse;
import br.com.orangetalents.eventotransacao.view.CartaoEvento;
import br.com.orangetalents.eventotransacao.view.CartaoResponse;
import br.com.orangetalents.eventotransacao.view.EstabelecimentoEvento;
import br.com.orangetalents.eventotransacao.view.EstabelecimentoResponse;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.boot.test.context.SpringBootTest;
import java.math.BigDecimal;
import java.time.LocalDateTime;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.samePropertyValuesAs;
@SpringBootTest
@ExtendWith(MockitoExtension.class)
class TransacaoTest {
@Test
@DisplayName("Deve converter de Model para Response")
void fromModelToResponse1() {
EstabelecimentoEvento estabelecimentoEvento = new EstabelecimentoEvento("nome", "cidade", "endereço");
CartaoEvento cartaoEvento = new CartaoEvento("id", "<EMAIL>");
Estabelecimento estabelecimentoConvertido = estabelecimentoEvento.toModel();
Cartao cartaoConvertido = cartaoEvento.toModel();
EstabelecimentoResponse estabelecimentoResponseConvertido = estabelecimentoConvertido.fromModelToResponse();
CartaoResponse cartaoResponseConvertido = cartaoConvertido.fromModelToResponse();
EstabelecimentoResponse estabelecimentoResponseEsperado = new EstabelecimentoResponse("nome", "cidade", "endereço");
CartaoResponse cartaoResponseEsperado = new CartaoResponse("id", "<EMAIL>");
Transacao transacao = new Transacao("id", BigDecimal.TEN, estabelecimentoEvento, cartaoEvento,
LocalDateTime.of(2021, 03, 19, 14, 02));
TransacaoResponse transacaoResponseConvertida = transacao.fromModelToResponse();
assertThat(transacaoResponseConvertida, instanceOf(TransacaoResponse.class));
assertThat(estabelecimentoResponseConvertido, samePropertyValuesAs(estabelecimentoResponseEsperado));
assertThat(cartaoResponseConvertido, samePropertyValuesAs(cartaoResponseEsperado));
}
} |
#!/bin/bash
#SBATCH --job-name=/data/unibas/boittier/test-neighbours
#SBATCH --nodes=1
#SBATCH --ntasks=1
#SBATCH --partition=short
#SBATCH --output=/data/unibas/boittier/test-neighbours_%A-%a.out
hostname
# Path to scripts and executables
cubefit=/home/unibas/boittier/fdcm_project/mdcm_bin/cubefit.x
fdcm=/home/unibas/boittier/fdcm_project/fdcm.x
ars=/home/unibas/boittier/fdcm_project/ARS.py
# Variables for the job
n_steps=0
n_charges=24
scan_name=SCAN_amide1.pdb-
suffix=.xyz.chk
cubes_dir=/data/unibas/boittier/fdcm/amide/scan-large
output_dir=/data/unibas/boittier/test-neighbours
frames=/home/unibas/boittier/fdcm_project/mdcms/amide/model1/frames.txt
initial_fit=/home/unibas/boittier/fdcm_project/mdcms/amide/model1/24_charges_refined.xyz
initial_fit_cube=/home/unibas/boittier/fdcm_project/mdcms/amide/model1/amide1.pdb.chk
prev_frame=113
start_frame=114
next_frame=141
acd=/home/unibas/boittier/fdcm_project/0_fit.xyz.acd
start=$start_frame
next=$next_frame
dir='frame_'$next
output_name=$output_dir/$dir/$dir'-'$start'-'$next'.xyz'
initial_fit=$output_dir/"frame_"$start/"frame_"$start'-'$prev_frame'-'$start'.xyz'
# Go to the output directory
mkdir -p $output_dir
cd $output_dir
mkdir -p $dir
cd $dir
# Do Initial Fit
# for initial fit
esp1=$cubes_dir/$scan_name$start$suffix'.p.cube'
dens1=$cubes_dir/$scan_name$start$suffix'.d.cube'
esp=$cubes_dir/$scan_name$next$suffix'.p.cube'
dens=$cubes_dir/$scan_name$next$suffix'.d.cube'
# adjust reference frame
python $ars -charges $initial_fit -pcube $dens1 -pcube2 $dens -frames $frames -output $output_name -acd $acd > $output_name.ARS.log
# do gradient descent fit
$fdcm -xyz $output_name.global -dens $dens -esp $esp -stepsize 0.2 -n_steps $n_steps -learning_rate 0.5 -output $output_name > $output_name.GD.log
# adjust reference frame
python $ars -charges $output_name -pcube $esp -pcube2 $esp -frames $frames -output $output_name -acd $acd > $output_name.ARS-2.log
# make a cube file for the fit
$cubefit -v -generate -esp $esp -dens $dens -xyz refined.xyz > $output_name.cubemaking.log
# do analysis
$cubefit -v -analysis -esp $esp -esp2 $n_charges'charges.cube' -dens $dens > $output_name.analysis.log
echo $PWD
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.wpforms = void 0;
var wpforms = {
"viewBox": "0 0 1536 1792",
"children": [{
"name": "path",
"attribs": {
"d": "M515 911v128h-252v-128h252zM515 656v127h-252v-127h252zM1273 1167v128h-341v-128h341zM1273 911v128h-672v-128h672zM1273 656v127h-672v-127h672zM1408 1516v-1240q0-8-6-14t-14-6h-32l-378 256-210-171-210 171-378-256h-32q-8 0-14 6t-6 14v1240q0 8 6 14t14 6h1240q8 0 14-6t6-14zM553 406l185-150h-406zM983 406l221-150h-406zM1536 276v1240q0 62-43 105t-105 43h-1240q-62 0-105-43t-43-105v-1240q0-62 43-105t105-43h1240q62 0 105 43t43 105z"
}
}]
};
exports.wpforms = wpforms; |
def data_processing_pipeline(update_frequency, intervention_threshold, max_iterations):
from Algorithms.Update.relative_agreement import perform_update as wrapped_update
from Algorithms.Intervention.degree import intervene as wrapped_intervene
count = 0
for iteration in range(1, max_iterations + 1):
if iteration % update_frequency == 0:
wrapped_update() # Perform update operation
if count > intervention_threshold:
wrapped_intervene() # Perform intervention operation
count += 1
# Additional processing or return statement if needed |
<gh_stars>0
import type { ITokenBalanceProps } from "../types";
import { parseBalance } from "../lib/util";
export const TokenBalance = ({ balance, symbol }: ITokenBalanceProps) => {
return <p>{`${parseBalance(balance ?? "0")} ${symbol}`}</p>;
};
|
#!/bin/bash
scriptPos=`dirname $0`
serverCount=$2
rm -f $scriptPos/../vagrant/ubuntu_memcached/MemcachedServer*.pid
if [ $1='start' ]; then
cd "$scriptPos/../vagrant/ubuntu_memcached" && vagrant ssh -c "/vagrant/scripts/startMemcachedServer.sh start $serverCount"
else
cd "$scriptPos/../vagrant/ubuntu_memcached" && vagrant ssh -c "/vagrant/scripts/startMemcachedServer.sh stop $serverCount"
fi
|
"""This module contains tests of appmock mocking tcp server.
"""
__author__ = "<NAME>"
__copyright__ = "Copyright (C) 2015 ACK CYFRONET AGH"
__license__ = "This software is released under the MIT license cited in " \
"LICENSE.txt"
from tests import *
from tests.utils.path_utils import config_file, get_file_name, make_logdir
from environment import docker, appmock, common
from appmock import appmock_client
import socket
import ssl
import time
class TestAppmockTCPExample:
@classmethod
# Run the evn_up.py script, capture and parse the output
def setup_class(cls):
logdir = make_logdir(ENV_UP_LOGDIR, get_file_name(__file__))
cls.result = appmock.up(image='onedata/builder', bindir=APPMOCK_DIR,
dns_server='none', uid=common.generate_uid(),
config_path=os.path.join(config_file('env.json')),
logdir=logdir)
@classmethod
# Clean up removing all dockers created in the test
def teardown_class(cls):
docker.remove(cls.result['docker_ids'], force=True, volumes=True)
# An example test showing usage of appmock in tests and testing all its functionalities
def test_tcp_example(self):
[container] = self.result['docker_ids']
appmock_ip = docker.inspect(container)['NetworkSettings']['IPAddress'].encode(
'ascii')
# Send some requests, check counters, order appmock to send something to the client and receive it
send_some_and_receive(appmock_ip)
# Send multiple messages to itself and verify if they were received
make_appmock_send_to_itself(appmock_ip)
# Check waiting for messages
wait_until_appmock_receives(appmock_ip)
# Check counter endpoint
use_counter_endpoint(appmock_ip)
# An example code which could be verified using appmock
def send_some_and_receive(appmock_ip):
# Lets assume we are testing a code that sends some messages on tcp port
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
ssl_sock = ssl.wrap_socket(s)
ssl_sock.settimeout(20)
ssl_sock.connect((appmock_ip, 5555))
type1_message = 'test5\n'
type1_message_count = 5
type2_message = 'test12\n'
type2_message_count = 12
all_messages_count = type1_message_count + type2_message_count
message_to_client = 'test_message'
def send_some():
for i in range(type1_message_count):
ssl_sock.send(type1_message)
for i in range(type2_message_count):
ssl_sock.send(type2_message)
# TODO jesli nie ma tego sleepa to jest jakies zakleszczenie
# nie rozumiem dlaczego. Pewnie dlatego ze z tego samego procesu
# wysylam blokujace zadanie, zeby mi cos server przyslal.
time.sleep(1)
# We are running the test and tested code in the same process, so lets
# send data from here. Normally, it would be sent from test code.
appmock_client.tcp_server_send(appmock_ip, 5555, message_to_client, 1)
# Now receive something and return it
result = ssl_sock.recv()
return result
received = send_some()
# Now, we can verify if expected messages were sent by the tested code
assert type1_message_count == appmock_client.tcp_server_specific_message_count(appmock_ip, 5555, type1_message)
assert type2_message_count == appmock_client.tcp_server_specific_message_count(appmock_ip, 5555, type2_message)
assert all_messages_count == appmock_client.tcp_server_all_messages_count(appmock_ip, 5555)
# And if 'message' was received on the socket
assert message_to_client == received
# Reset counters
appmock_client.reset_tcp_server_history(appmock_ip)
assert 0 == appmock_client.tcp_server_specific_message_count(appmock_ip, 5555, type1_message)
assert 0 == appmock_client.tcp_server_specific_message_count(appmock_ip, 5555, type2_message)
assert 0 == appmock_client.tcp_server_all_messages_count(appmock_ip, 5555)
# And check again
received = send_some()
assert type1_message_count == appmock_client.tcp_server_specific_message_count(appmock_ip, 5555, type1_message)
assert type2_message_count == appmock_client.tcp_server_specific_message_count(appmock_ip, 5555, type2_message)
assert all_messages_count == appmock_client.tcp_server_all_messages_count(appmock_ip, 5555)
assert message_to_client == received
ssl_sock.close()
# Connects to appmock as TCP client and orders appmock to send 1000 messages to it, verifies if they came
def make_appmock_send_to_itself(appmock_ip):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
ssl_sock = ssl.wrap_socket(s)
ssl_sock.settimeout(20)
ssl_sock.connect((appmock_ip, 5555))
appmock_client.tcp_server_send(appmock_ip, 5555, 'message', 1000)
# Now receive 10 messages
result = True
for i in range(1000):
result = result and 'message' == ssl_sock.recv()
ssl_sock.close()
assert result == True
# Connects to appmock as TCP client and sends 1000 requests, waits for them to be received.
def wait_until_appmock_receives(appmock_ip):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
ssl_sock = ssl.wrap_socket(s)
ssl_sock.settimeout(20)
ssl_sock.connect((appmock_ip, 5555))
for i in range(1000):
ssl_sock.send('test1\n')
ssl_sock.send('test2\n')
appmock_client.tcp_server_wait_for_specific_messages(appmock_ip, 5555, 'test1\n', 1000)
appmock_client.tcp_server_wait_for_specific_messages(appmock_ip, 5555, 'test2\n', 1000)
ssl_sock.send('test1\n')
ssl_sock.send('test2\n')
appmock_client.tcp_server_wait_for_specific_messages(appmock_ip, 5555, 'test1\n', 1000, accept_more=True)
appmock_client.tcp_server_wait_for_specific_messages(appmock_ip, 5555, 'test2\n', 1000, accept_more=True)
appmock_client.reset_tcp_server_history(appmock_ip)
for i in range(1000):
ssl_sock.send('sdfg\n')
ssl_sock.send('adfgsdf\n')
ssl_sock.send('345rthas\n')
ssl_sock.send('367sth\n')
ssl_sock.send('qatert547\n')
appmock_client.tcp_server_wait_for_any_messages(appmock_ip, 5555, 5000)
appmock_client.reset_tcp_server_history(appmock_ip)
for i in range(5):
ssl_sock.send('a\n')
for i in range(5):
ssl_sock.send('b\n')
correct_history = ['a\n', 'a\n', 'a\n', 'a\n', 'a\n', 'b\n', 'b\n', 'b\n', 'b\n', 'b\n']
# Wait for messages to arrive
appmock_client.tcp_server_wait_for_any_messages(appmock_ip, 5555, 10)
# Test the correctness of tcp_server_history endpoint
result = appmock_client.tcp_server_history(appmock_ip, 5555)
assert result == correct_history
# Test if wait functions correctly returns msg history if requested.
result = appmock_client.tcp_server_wait_for_any_messages(appmock_ip, 5555, 10, return_history=True)
assert result == correct_history
result = appmock_client.tcp_server_wait_for_specific_messages(appmock_ip, 5555, 'a\n', 5, return_history=True)
assert result == correct_history
result = appmock_client.tcp_server_wait_for_specific_messages(appmock_ip, 5555, 'b\n', 5, return_history=True)
assert result == correct_history
# Connects to appmock as TCP client and sends 100000 requests on counter endpoint, waits for them to be received.
def use_counter_endpoint(appmock_ip):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
ssl_sock = ssl.wrap_socket(s)
ssl_sock.settimeout(20)
ssl_sock.connect((appmock_ip, 6666))
for i in range(20000):
ssl_sock.send('sdfg\n')
ssl_sock.send('adfgsdf\n')
ssl_sock.send('345rthas\n')
ssl_sock.send('367sth\n')
ssl_sock.send('qatert547\n')
appmock_client.tcp_server_wait_for_any_messages(appmock_ip, 6666, 100000)
assert 100000 == appmock_client.tcp_server_all_messages_count(appmock_ip, 6666)
appmock_client.tcp_server_wait_for_specific_messages(appmock_ip, 6666, 'sdfg\n', 20000)
appmock_client.tcp_server_wait_for_specific_messages(appmock_ip, 6666, 'adfgsdf\n', 20000)
appmock_client.tcp_server_wait_for_specific_messages(appmock_ip, 6666, '345rthas\n', 20000)
appmock_client.tcp_server_wait_for_specific_messages(appmock_ip, 6666, '367sth\n', 20000)
appmock_client.tcp_server_wait_for_specific_messages(appmock_ip, 6666, 'qatert547\n', 20000)
assert 20000 == appmock_client.tcp_server_specific_message_count(appmock_ip, 6666, 'sdfg\n')
assert 20000 == appmock_client.tcp_server_specific_message_count(appmock_ip, 6666, 'adfgsdf\n')
assert 20000 == appmock_client.tcp_server_specific_message_count(appmock_ip, 6666, '345rthas\n')
assert 20000 == appmock_client.tcp_server_specific_message_count(appmock_ip, 6666, '367sth\n')
assert 20000 == appmock_client.tcp_server_specific_message_count(appmock_ip, 6666, 'qatert547\n')
appmock_client.reset_tcp_server_history(appmock_ip)
for i in range(2000):
ssl_sock.send('sdfg\n')
ssl_sock.send('adfgsdf\n')
ssl_sock.send('345rthas\n')
ssl_sock.send('367sth\n')
ssl_sock.send('qatert547\n')
appmock_client.tcp_server_wait_for_any_messages(appmock_ip, 6666, 10000)
assert 10000 == appmock_client.tcp_server_all_messages_count(appmock_ip, 6666)
appmock_client.tcp_server_wait_for_specific_messages(appmock_ip, 6666, 'sdfg\n', 2000)
appmock_client.tcp_server_wait_for_specific_messages(appmock_ip, 6666, 'adfgsdf\n', 2000)
appmock_client.tcp_server_wait_for_specific_messages(appmock_ip, 6666, '345rthas\n', 2000)
appmock_client.tcp_server_wait_for_specific_messages(appmock_ip, 6666, '367sth\n', 2000)
appmock_client.tcp_server_wait_for_specific_messages(appmock_ip, 6666, 'qatert547\n', 2000)
assert 2000 == appmock_client.tcp_server_specific_message_count(appmock_ip, 6666, 'sdfg\n')
assert 2000 == appmock_client.tcp_server_specific_message_count(appmock_ip, 6666, 'adfgsdf\n')
assert 2000 == appmock_client.tcp_server_specific_message_count(appmock_ip, 6666, '345rthas\n')
assert 2000 == appmock_client.tcp_server_specific_message_count(appmock_ip, 6666, '367sth\n')
assert 2000 == appmock_client.tcp_server_specific_message_count(appmock_ip, 6666, 'qatert547\n')
appmock_client.reset_tcp_server_history(appmock_ip)
appmock_client.tcp_server_wait_for_any_messages(appmock_ip, 6666, 0)
|
<gh_stars>0
#include<iostream>
#include<vector>
#include<unordered_map>
using namespace std ;
vector<int> sol ;
void solution(int* people , int nskills , int cp , vector<int> oneSolution , int smask){
if(cp == nskills){
if(smask == ((1<<nskills)-1)){
if(sol.size()==0 || sol.size() > oneSolution.size()){
sol = oneSolution;
}
}
return ;
}
solution(people , nskills , cp+1 , oneSolution , smask);
oneSolution.push_back(cp);
solution(people , nskills , cp+1 , oneSolution , (smask | people[cp]));
oneSolution.pop_back();
return ;
}
int main(){
int n ; cin >> n ;
unordered_map<string , int> smap;
for(int i=0 ;i<n ; i++){
string temp ;
cin >> temp ;
smap[temp] = i;
}
int np ;
cin >> np ;
int* people = new int[np];
for(int i=0 ;i<np ;i++){
int personSkills ;
cin >> personSkills ;
people[i] =0 ;
for (int j = 0; j < personSkills; j++) {
string skill;
cin >> skill;
int snum = smap[skill];
people[i] = people[i] | (1 << snum);
}
}
vector<int> oneSolution ;
solution(people, n, 0,oneSolution, 0);
cout << "[";
for(int i=0 ;i<sol.size(); i++){
if(i == sol.size()-1){
cout << sol[i] << "]";
}else{
cout << sol[i] << ", " ;
}
}
return 0;
} |
module.exports = require('./non-requireable-extension.manifest');
|
# Set the path to save checkpoints
OUTPUT_DIR='YOUR_PATH/k400_videomae_pretrain_base_patch16_224_frame_16x4_tube_mask_ratio_0.9_e800/eval_lr_1e-3_epoch_100'
# path to Kinetics set (train.csv/val.csv/test.csv)
DATA_PATH='YOUR_PATH/list_kinetics-400'
# path to pretrain model
MODEL_PATH='YOUR_PATH/k400_videomae_pretrain_base_patch16_224_frame_16x4_tube_mask_ratio_0.9_e800/checkpoint-799.pth'
# batch_size can be adjusted according to number of GPUs
# this script is for 64 GPUs (8 nodes x 8 GPUs)
OMP_NUM_THREADS=1 python -m torch.distributed.launch --nproc_per_node=8 \
--master_port 12320 --nnodes=8 --node_rank=$1 --master_addr=$2 \
run_class_finetuning.py \
--model vit_base_patch16_224 \
--data_set Kinetics-400 \
--nb_classes 400 \
--data_path ${DATA_PATH} \
--finetune ${MODEL_PATH} \
--log_dir ${OUTPUT_DIR} \
--output_dir ${OUTPUT_DIR} \
--batch_size 8 \
--num_sample 1 \
--input_size 224 \
--short_side_size 224 \
--save_ckpt_freq 10 \
--num_frames 16 \
--sampling_rate 4 \
--opt adamw \
--lr 1e-3 \
--opt_betas 0.9 0.999 \
--weight_decay 0.05 \
--epochs 100 \
--dist_eval \
--test_num_segment 5 \
--test_num_crop 3 \
--enable_deepspeed |
function findProperByType(propers, type) {
if (!Array.isArray(propers)) {
return null;
}
const propersByType = propers.filter((p) => p.type === type);
return propersByType.length === 0 ? null : propersByType.shift();
}
function hasReadings(propers) {
return (
findProperByType(propers, 19) &&
findProperByType(propers, 2) &&
findProperByType(propers, 1)
);
}
/**
* Return the first collection with a color proper.
* @param {...any} allPropers
*/
function findColor(...allPropers) {
return allPropers.reduce((prev, current) => {
return prev ?? findProperByType(current, 25);
}, null)?.text;
}
module.exports = {
findProperByType,
hasReadings,
findColor,
};
|
max=10
SUSY=SUSY
ij=ijcnn1
R=real-sim
e=epsilon_normalized
s=skin_nonskin
H=heart_scale
c=covtype.libsvm.binary.scale
rcv1=rcv1_train.binary
p=phishing
a=a9a
i=1
D=./data
O=./output
L=./logistic
for dir in "$D" "$O" "$L"
do
if [ ! -d "$dir" ]; then
mkdir $dir
fi
done
for ratio in "0.01" "0.02" "0.05" "0.95" "0.98" "0.99"
do
for file in "$SUSY"# "$ij" "$e" "$s" "$H" "$a" "$c" "$p" "$rcv1"
do
if [ ! -e output/${file}_ratio_${ratio}.txt ]; then
python dataset_auc.py --dataset $file --request_ratio ${ratio} --output_file ${file}_ratio_${ratio}.txt --batch_size 32 --num_epochs $max --num_time_steps 20000
fi
echo $file
echo $ratio
if [ ! -e logistic/logistic_${file}_ratio_${ratio}.txt ]; then
python dataset_acc.py --dataset $file --request_ratio ${ratio} --output_file logistic_${file}_ratio_${ratio}.txt --batch_size 32
fi
done
done
|
package org.aquacoope.mo.exception;
public class StationNotFoundException extends RuntimeException {
public StationNotFoundException(String id) {
super("Could not find station " + id);
}
}
|
#!/bin/bash
#This program uninstalls the application
echo "Are you sure you want to uninstalls CarRentalApp? press y:Yes or n:No"
read ans
if [ $ans == y ]; then
echo "Uninstalling file.."
rm -r ~/Desktop/CarRentalApp/
echo "Uninstalled succesfully."
else
echo "The file is not uninstalled."
exit
fi
|
<reponame>LucasIcarus/The-Early-Death-of-UCN
import LRU from 'lru-cache'
import localFetch, { Request, Headers, Response } from 'node-fetch'
import config from '../src/core/config'
const inBrowser = typeof window !== 'undefined'
const createCache = () => LRU({
max: 1000,
maxAge: 1000 * 60 * 15 // 15 min cache
})
const cache = createCache()
// const createServerSideAPI = () => {
// function warmCache() {
//
// }
// const api = {}
// api.__ids__ = {}
// }
const localUrl = (url) => {
if (url.startsWith('//')) {
return `https:${url}`
}
if (url.startsWith('http')) {
return url
}
return `http://${config.host}${url}`
}
const fetch = (child) => {
if (cache && cache.has(child.name)) {
return Promise.resolve(cache.get(child.name))
} else {
return new Promise((resolve, reject) => {
localFetch(localUrl('api/' + child.query)).then(res => res.json()).then(json => {
const val = json
val.__lastUpdated = Date.now()
cache && cache.set(child.name, val)
resolve(val)
})
})
}
}
fetch({ name: 'test', query: 'graphql?query={posts{state, publishedDate}}' }).then(val => {
console.log(val.data)
})
|
def compute_average(numbers):
total = 0
for num in numbers:
total += num
return total / len(numbers)
average = compute_average(numbers)
print(average) # Outputs 6 |
#!/bin/bash
sysctl -w net.ipv4.conf.all.forwarding=1
sysctl -w net.ipv4.conf.all.rp_filter=0
ip address add 1.0.1.6/24 dev eth0
sysctl -w net.ipv4.conf.eth0.forwarding=1
sysctl -w net.ipv4.conf.eth0.rp_filter=0
sysctl -w net.ipv6.conf.eth0.disable_ipv6=1
sysctl -w net.ipv6.conf.eth0.autoconf=0
ip link set eth0 up
|
<gh_stars>1-10
#!/usr/bin/env node
'use strict';
const cli = require('../src/index');
const help = require('../src/help');
const version = require('../src/version');
const argv = process.argv.slice(2);
if (argv.indexOf('--version') !== -1 || argv.indexOf('-v') !== -1) version.init();
if (argv.indexOf('--help') !== -1 || argv.indexOf('-h') !== -1) help.init();
if (argv.indexOf('init') !== -1 || argv.indexOf('i') !== -1) cli.init(argv);
else help.init();
|
#!/bin/bash
mkdir -p vars
curl -o minifab -sL https://tinyurl.com/yxa2q6yr && chmod +x minifab
./minifab up --fabric-release 2.2
set -o allexport
source vars/run/peer1.org0.example.com.env
set +o allexport
docker-compose up -d
|
#!/usr/bin/env python
#-*-coding:utf-8-*-
#Copyright (c) 2020-2021 Huawei Device Co., Ltd.
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
# Perform for Z kernel headers
import sys
import os
import getopt
import subprocess
import time
import string
import re
import glob
# /*..*/ //...
Rule1 = "(\/\*(\s|.)*?\*\/)|(\/\/.*)"
c1=re.compile(Rule1)
#use list for order
key_list = [
# grep key, gerp dir, sed key, sed replace
["CONFIG_64BIT", ".", "CONFIG_64BIT", "__LP64__"],
["__unused;", ".", "__unused;", "__linux_unused;"],
["__unused\[", ".", "__unused\[", "__linux_unused["],
[" private;", ".", " private;", " __linux_private;"],
["\*virtual;", ".", "\*virtual;", "\* __linux_virtual;"],
["struct msqid_ds", ".", "struct msqid_ds", "struct __kernel_legacy_msqid_ds"],
["struct semid_ds", ".", "struct semid_ds", "struct __kernel_legacy_semid_ds"],
["struct shmid_ds", ".", "struct shmid_ds", "struct __kernel_legacy_shmid_ds"],
["struct ipc_perm", ".", "struct ipc_perm", "struct __kernel_legacy_ipc_perm"],
["union semun", ".", "union semun", "union __kernel_legacy_semun"],
["_NSIG", ".", "_NSIG", "_KERNEL__NSIG"],
["_KERNEL__NSIG_", ".", "_KERNEL__NSIG_", "_NSIG_"],
["#define NSIG\s", ".", "#define NSIG", "#define _KERNEL_NSIG"],
["SIGRTMIN", ".", "SIGRTMIN", "__SIGRTMIN"],
["SIGRTMAX", ".", "SIGRTMAX", "__SIGRTMAX"],
["struct udphdr", ".", "struct udphdr", "struct __kernel_udphdr"],
["__attribute_const__", ".", "__attribute_const__", "__attribute__((__const__))"],
["struct __kernel_sockaddr_storage", ".", "struct __kernel_sockaddr_storage", "struct sockaddr_storage"],
["off_t", "asm-mips/asm/stat.h", "off_t", "__kernel_off_t"],
["#ifndef __EXPORTED_HEADERS__", ".", "#ifndef __EXPORTED_HEADERS__", "#if 0 \/*__EXPORTED_HEADERS__*\/"],
["#ifndef __KERNEL_STRICT_NAMES", ".", "#ifndef __KERNEL_STRICT_NAMES", "#if 0 \/*__KERNEL_STRICT_NAMES*\/"],
["#ifdef __CHECKER__", ".", "#ifdef __CHECKER__", "#if 0 \/*__CHECKER__*\/"],
["#ifdef __HAVE_BUILTIN_BSWAP16__", ".", "#ifdef __HAVE_BUILTIN_BSWAP16__", "#if 1 \/*__HAVE_BUILTIN_BSWAP16__*\/"],
["#ifdef __HAVE_BUILTIN_BSWAP32__", ".", "#ifdef __HAVE_BUILTIN_BSWAP32__", "#if 1 \/*__HAVE_BUILTIN_BSWAP32__*\/"],
["#ifdef __HAVE_BUILTIN_BSWAP64__", ".", "#ifdef __HAVE_BUILTIN_BSWAP64__", "#if 1 \/*__HAVE_BUILTIN_BSWAP64__*\/"],
["#if defined(__ARMEB__)", "asm-arm", "#if defined(__ARMEB__)", "#if 0 \/*__ARMEB__*\/"],
["#ifdef __ARMEB__", "asm-arm", "#ifdef __ARMEB__", "#if 0 \/*__ARMEB__*\/"],
["defined(__ARM_EABI__)", "asm-arm", "defined(__ARM_EABI__)", "1 \/*defined(__ARM_EABI__)*\/"],
["#ifdef __MIPSEB__", "asm-mips", "#ifdef __MIPSEB__", "#if 0 \/*__MIPSEB__*\/"],
["#elif defined (__MIPSEB__)", "asm-mips", "#elif defined (__MIPSEB__)", "#elif 0 \/*__MIPSEB__*\/"],
["#if defined(__MIPSEB__)", "asm-mips", "#if defined(__MIPSEB__)", "#if 0 \/*__MIPSEB__*\/"],
["#elif defined(__MIPSEL__)", "asm-mips", "#elif defined(__MIPSEL__)", "#elif 1 \/*__MIPSEL__*\/"],
["#elif defined (__MIPSEL__)", "asm-mips", "#elif defined (__MIPSEL__)", "#elif 1 \/*__MIPSEL__*\/"],
["\!defined(__SANE_USERSPACE_TYPES__)", "asm-mips", "\!defined(__SANE_USERSPACE_TYPES__)", "0 \/*__SANE_USERSPACE_TYPES__*\/"],
["\/\*.*\*\/", ".", "\/\*.*\*\/", ""],
]
def deal_file(src):
# file exist or not
if not os.path.exists(src):
print 'Error: file - %s doesn\'t exist.'% src
return False
if os.path.islink(src):
print 'Error: file - %s is a link.'
return False
filetype = (os.path.splitext(src))[1]
if not filetype in ['.c','.h','.cpp','.hh','.cc']:
return False
try:
if not os.access(src, os.W_OK):
os.chmod(src, 0664)
except:
print 'Error: you can not chang %s\'s mode.'% src
# del all comments
inputf = open(src, 'r')
outputfilename = (os.path.splitext(src))[0] + '_no_comment'+filetype
outputf = open(outputfilename, 'w')
lines=inputf.read()
inputf.close()
lines=re.sub(Rule1,"",lines)
outputf.write(lines)
outputf.close()
#del right space & blank line
os.remove(src)
inputf = open(outputfilename, 'r')
outputf = open(src, 'w')
#write head info
outputf.write("/*\n")
outputf.write(" * This header was generated from the Linux kernel headers by update_headers.py,\n")
outputf.write(" * to provide necessary information from kernel to userspace, such as constants,\n")
outputf.write(" * structures, and macros, and thus, contains no copyrightable information.\n")
outputf.write(" */\n")
line = inputf.readline()
while line:
if line == '\n':
line = line.strip("\n")
else:
line = line.rstrip() + '\n'
outputf.write(line)
line = inputf.readline()
inputf.close()
outputf.close()
os.remove(outputfilename)
return True
def deal_dir(src):
# dir exist or not
if not os.path.exists(src):
print 'Error: dir - %s is not exist.'%s (src)
return False
filelists = os.listdir(src)
for eachfile in filelists:
eachfile = src + '/' +eachfile
if os.path.isdir(eachfile):
deal_dir(eachfile)
elif os.path.isfile(eachfile):
deal_file(eachfile)
return True
def copy_kernel_headers(build_top, original_dir, target_dir):
headers_dir = build_top + '/' + target_dir
external_dir = build_top + original_dir
headers_rm_cmd = "rm " + headers_dir + "/uapi -rf"
headers_cp_cmd = "cp " + external_dir + " " + headers_dir + " -r"
try:
status = subprocess.call(headers_rm_cmd, shell=True)
except:
printf("[Error] rm error!!!")
try:
status = subprocess.call(headers_cp_cmd, shell=True)
except:
printf("[Error] cp error!!!")
return 1
def replace_key_words_ext(keys):
replace_key_words(keys[0], keys[1], keys[2], keys[3])
return 1
def replace_key_words(original_key, target_dir, replace_key, replace_str):
replace_cmd = "grep \""+ original_key + "\" -rl " + target_dir + " | xargs sed -i \"s/" + replace_key + "/" + replace_str + "/g\""
try:
status = subprocess.call(replace_cmd, shell=True)
except:
printf("[Error] replace CONFIG_64BIT error!!!")
return 1
def usage():
print(" Usage: run the script at the root of project.\n For example:\n\
python ./foundation/ccruntime/libc/kernel/tools/update_Z_header.py")
return 0
def main():
if len(sys.argv) > 1:
usage()
return -1
kernel_dir = "prebuilts/lite/sysroot/ohos_tmp"
external_dir = '/prebuilts/lite/sysroot/ohos_tmp_ori/uapi'
status = -1
try:
build_top = os.environ["Z_BUILD_TOP"]
except KeyError as e:
print("[Warning] no env : %s, use current work dir." %(str(e)))
build_top = subprocess.check_output("pwd", shell=True)
status = copy_kernel_headers(build_top.strip(), external_dir, kernel_dir)
if status != 1:
print("[Error] copy files fail!!!")
sys.exti(1)
try:
os.chdir(kernel_dir + "/uapi")
except:
print("[Error] no uapi dir1!!!")
sys.exit(1)
current_dir = os.getcwd()
if "uapi" not in current_dir:
print("[Error] not uapi dir!!!")
sys.exit(1)
for val in key_list:
replace_key_words_ext(val)
try:
os.chdir("../")
except:
print("[Error] no uapi dir2!!!")
sys.exit(1)
deal_dir("uapi")
return 1
if __name__ == "__main__":
starttime=time.time()
main()
endtime=time.time()
print(endtime-starttime)
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.fhwa.c2cri.tmdd.emulation.entitydata.filters;
import java.util.ArrayList;
import org.fhwa.c2cri.tmdd.emulation.entitydata.EntityEmulationData;
import org.fhwa.c2cri.tmdd.emulation.exceptions.FilterGenerationException;
/**
*
* @author TransCore ITS, LLC Created: Feb 3, 2016
*/
public class ValueInRangeFilter implements DataFilter {
private String rangeStart;
private String rangeEnd;
private String filteredItem;
private ArrayList valueSet;
private int schemaId;
private BaseType baseType;
private String enumeration;
private EntityEmulationData.EntityDataType entityDataType;
private ValueInRangeFilter() {
}
;
public ValueInRangeFilter(EntityEmulationData.EntityDataType entityDataType, String filteredItem, String rangeStart, String rangeEnd) {
this.rangeStart = rangeStart;
this.rangeEnd = rangeEnd;
this.filteredItem = filteredItem;
this.valueSet = valueSet;
this.entityDataType = entityDataType;
}
@Override
public String getFilterSpecification() throws FilterGenerationException {
String filterSpecification = "";
if (schemaId == 0) {
return "";
} else {
try {
switch (baseType) {
case STRING:
filterSpecification = "(EntityIndex in (select EntityIndex from " + entityDataType + " where (SchemaDetailId = " + schemaId + " and EntityElementValue >= \"" + rangeStart + "\" and EntityElementValue <= \"" + rangeEnd + "\")))";
break;
default:
throw new Exception("Filter BaseType " + baseType + " has not been implemented in the ValueInRangeFilter.");
}
} catch (Exception ex) {
throw new FilterGenerationException(ex);
}
}
return filterSpecification;
}
@Override
public String getFilteredItem() {
return this.filteredItem;
}
@Override
public void setFilterItemIdentifier(int schemaId, BaseType baseType, String enumeration) {
this.schemaId = schemaId;
this.baseType = baseType;
if (enumeration != null) {
this.enumeration = enumeration.replace("{", "").replace("}", "");
}
}
}
|
/*
** chestboardmessage.hpp
** Login : <<EMAIL>>
** Started on Tue Jan 27 09:11:15 2009 <NAME>
** $Id$
**
** Author(s):
** - <NAME> <<EMAIL>>
**
** Copyright (C) 2009 Aldebaran Robotics
*/
#ifndef CHESTBOARDMESSAGE_HPP_
# define CHESTBOARDMESSAGE_HPP_
#include <bn/usb/deviceaddress.hpp>
namespace bn
{
namespace usb
{
#define MAX_PACKET_SIZE (4096)
static const uInt8 ChestProtocolEscape = 0xAA;
static const uInt8 ChestProtocolAck = 0x30;
static const uInt8 ChestProtocolNAck = 0x20;
typedef struct UsbMessageType {
uInt8 type;
uInt8 bus;
uInt8 dev;
uInt8 reg;
uInt8 size;
uInt8 ack;
uInt8 *buffer;
} UsbMessage;
typedef enum {
BusRs485Up,
BusRs485Down,
BusI2c,
BusChest,
} BusType;
typedef enum {
Read,
Write
} RWType;
typedef enum {
NAck = 0,
Ack = 1,
Msg = 2
} AckType;
typedef enum {
Single = 0,
PollingRequest,
PollingAnswer,
PollingClear
} MessageType;
/*
*
*/
class ChestboardMessage : public bn::AnyPtr<ChestboardMessage>
{
friend class ChestboardManager;
public:
ChestboardMessage();
static Ptr<ChestboardMessage> create(void);
public:
public:
virtual void addMessage(uInt8 type,
uInt8 bus,
uInt8 dev,
uInt8 reg,
uInt8 toRead);
virtual void addMessage(uInt8 type,
Ptr<DeviceAddress> dev,
uInt8 toRead);
virtual void setMessageData(const Int8 *pMsg,
uInt32 pMsgSize);
virtual uInt32 messageCount()const { return fReadMsgCount; }
//should we return a DeviceAddress too?
virtual const Int8 *message(sInt32 index, sInt32 *pSize)const;
virtual UsbMessage *message(sInt32 index);
protected:
virtual Int8 *stageOut(sInt32 *availableSize);
virtual void stageIn(Int8 *data, sInt32 size);
protected:
/**
* decode the special usb char (0xAA)
* the AA is duplicate
*/
void decodeAA();
void decodeMessages();
void decodeMessage(sInt32 index);
/**
* copy the footer into the buffer
*/
void setFooter();
protected:
Int8 fBuffer[MAX_PACKET_SIZE];
Int8 *fBufferRead;
//read message
UsbMessage fReadUsbMessage[255];
Int8 *fReadMsgStart[255];
sInt32 fReadMsgSize[255];
sInt32 fReadMsgCount;
sInt32 fToReadSize;
sInt32 fReadSize;
sInt32 fReadCount;
sInt32 fMsgCount;
sInt32 fIndex;
const uInt8 fReadHeaderSize;
const uInt8 fReadFooterSize;
const uInt8 fHeaderSize;
const uInt8 fFooterSize;
};
}
}
#endif /* !CHESTBOARDMESSAGE_PP_ */
|
import { createLoader, safeValues, values } from 'configuru'
import { Level } from 'pino'
const loader = createLoader({
defaultConfigPath: '.env.jsonc',
})
const configSchema = {
logger: {
defaultLevel: loader.custom(x => x as Level)('LOGGER_DEFAULT_LEVEL'),
pretty: loader.bool('LOGGER_PRETTY'),
},
auth: {
directBearerAuth: loader.bool('AUTH_DIRECT_BEARER_ENABLED'),
},
enableTests: loader.bool('ENABLE_TESTS'),
node: {
env: loader.string('NODE_ENV'),
},
server: {
port: loader.number('SERVER_PORT'),
allowResponseErrors: loader.bool('SERVER_ALLOW_RESPONSE_ERRORS'),
corsHeaders: loader.string('SERVER_CORS_ALLOW_HEADERS'),
corsOrigins: loader.string('SERVER_CORS_ALLOW_ORIGINS'),
},
}
export default values(configSchema)
export const safeConfig = safeValues(configSchema)
|
<reponame>nzilbb/roofRunner<filename>js/services/distribution.service.js
(function(){
'use strict';
angular.module('RoofRunnerApp')
.factory('distributionService', distributionService);
distributionService.$inject = ['$http','$q','SweetAlert', '$timeout', 'shuffleService'];
function distributionService($http, $q, SweetAlert, $timeout, shuffleService){
var distributionService = {};
distributionService.expId;
distributionService.getList = function(expId, shuffle){
var def = $q.defer();
distributionService.expId = expId;
getMaster()
.then(determineLeastUsedList)
.then(getTargetList)
.then(function(result){
if(shuffle == true){
result.data = shuffleService.shuffleArray(result.data);
}
def.resolve(result.data);
});
return(def.promise);
}
function getMaster(){
//load experiment JSON file
var getListDeffered = $q.defer();
var promise = $http.get('experiments/' + distributionService.expId + '/master.json?' + (new Date().getTime()));
promise.then(function(result) {
getListDeffered.resolve(result.data);
}, function(reason) {
SweetAlert.swal({
title: "Error!",
text: "Could not get master experiment script. Please contact the requester for more details.",
type: "error"
});
getListDeffered.reject(reason);
console.log(reason);
});
return(getListDeffered.promise);
}
function determineLeastUsedList(master){
var def = $q.defer();
//0. first create count property on master object
for(var l in master){
master[l].count = 0;
}
//1. get used lists
var ref = firebase.database().ref().child(distributionService.expId).child('lists');
ref.orderByChild("filename").on("value", function(lists){
var usedLists = lists.val();
ref.off("value");
//2. iterate and compare
for(var p in usedLists){
if(usedLists.hasOwnProperty(p)){
for(var l=0;l<master.length;l++){
if(master[l].filename == usedLists[p].filename){
master[l].count += 1;
}
}
};
}
//sort descending based on count property
master = master.sort(function(obj1, obj2) {
return obj1.count - obj2.count;
})
def.resolve(master[0]);
});
return(def.promise);
}
function getTargetList(targetList){
var def = $q.defer();
var list = targetList.filename;
var shuffle = targetList.shuffle;
$http.get('experiments/' + distributionService.expId + "/lists/" + list + "?" + (new Date().getTime()))
.then(function(result){
if(shuffle == true){
result.data = shuffleService.shuffleArray(result.data);
}
def.resolve(result);
}, function(reason) {
SweetAlert.swal({
title: "Error!",
text: "Could not get individual list. Please contact the requester for more details.",
type: "error"
});
def.reject(reason);
console.log(reason);
})
//store used list on server
var ref = firebase.database().ref().child(distributionService.expId).child('lists');
ref.push({"filename":list});
return(def.promise);
}
return distributionService;
}
})();
|
/*
* @Author: dang
* @Date: 2021-04-14 23:37:30
* @LastEditTime: 2021-09-04 17:34:19
* @LastEditors: Please set LastEditors
* @Description: A worm
* @FilePath: \iot_prewarning_dz\src\views\shuiku\workbench\api.js
*/
import request from "@/utils/request";
const BASE_API_2 = process.env.VUE_APP_BASE_API_2; //yunw
const BASE_API_6 = process.env.VUE_APP_BASE_API_6; //yunw
export const previewUrl = BASE_API_6;
//台账
export function standing(params) {
return request({
url: "/brief/standing",
baseURL: BASE_API_6,
method: "GET",
params: params
});
}
// getPatrolDetailById 详情
export function getPatrolDetailById(query) {
return request({
baseURL: BASE_API_6,
url: "/app/getPatrolDetailById",
method: "post",
data: query
});
}
// /web/point/getReservoirImage 根据水库ID查询图潘信息
export function getReservoirImage(query) {
return request({
baseURL: BASE_API_6,
url: "/web/point/getReservoirImage",
method: "get",
params: query
});
}
// 巡查简报 /order/generateAssignedOrder 获取地址
export function generateAssignedOrder(query) {
return request({
baseURL: BASE_API_6,
url: "/order/generateAssignedOrder",
method: "post",
data: query
});
}
// 预览地址 html /app/docToHtml?docPath
export function getDocToHtml(query) {
return request({
baseURL: BASE_API_6,
url: `/app/docToHtml?docPath=${query}`,
method: "get"
});
}
// 巡查养护 月左侧列表 /ledger/monthStatisticsList
export function monthStatisticsList(query) {
return request({
baseURL: BASE_API_6,
url: "/ledger/monthStatisticsList",
method: "post",
data: query
});
}
// 巡查详情 /ledger/monthStatisticsListDetail
export function monthStatisticsListDetail(query) {
return request({
baseURL: BASE_API_6,
url: "/ledger/monthStatisticsListDetail",
method: "post",
data: query
});
}
//养护详情 monthStatisticsYHListDetail
export function monthStatisticsYHListDetail(query) {
return request({
baseURL: BASE_API_6,
url: "/ledger/monthStatisticsYHListDetail",
method: "post",
data: query
});
}
//巡视检查列表 /app/getPatrolListByUid
export function getPatrolListByUid(query) {
return request({
baseURL: BASE_API_6,
url: "/app/getPatrolListByUid",
method: "post",
data: query
});
}
//台账统计
export function getStatisticsList(query) {
return request({
baseURL: BASE_API_6,
url: "/app/getPatrolListByUid",
method: "post",
data: query
});
}
//获取巡查员路径
export function queryUserPatrolByTmAndUserId(query) {
return request({
baseURL: BASE_API_6,
url: "/app/queryUserPatrolByTmAndUserId",
method: "get",
params: query
});
}
// 添加备注
export function addRemark(query) {
return request({
baseURL: BASE_API_6,
url: "/ledger/addRemark",
method: "post",
data: query
});
}
// 添加备注
export function updateRemark(query) {
return request({
baseURL: BASE_API_6,
url: "/ledger/updateRemark",
method: "post",
data: query
});
}
|
//A Basic Algorithm to find the shortest path
//Create a data structure to store the edges of the graph
HashMap<String, HashMap<String, Integer>> graph;
//A utility function to find the minimum distance of the cities
int findMinDistance(String start, String end){
// Check if the start city exists in the graph
if(!graph.containsKey(start)) return -1;
//Create a HashMap to store the minimum distance of the cities
HashMap<String, Integer> minDistance = new HashMap<>();
//Initialize the minDistance HashMap by setting all the values to infinity
for(String city: graph.keySet()){
minDistance.put(city, Integer.MAX_VALUE);
}
//Set the initial minimum distance of the start city to 0
minDistance.put(start, 0);
// Create a set to store all the cities which have already been visited
Set<String> visitedCities = new HashSet<>();
// while the visited Cities set is not empty
while(!visitedCities.isEmpty()){
//Find the city with the minimum distance in the minDistance HashMap
String currentCity = findMinDistanceCity(minDistance, visitedCities);
//Add the city to the visited Cities set
visitedCities.add(currentCity);
//Find all the neighbors of the current City
HashMap<String, Integer> neighbors = graph.get(currentCity);
//Iterate over all the neighbors
for(String neighborCity: neighbors.keySet()){
//Find the new distance to the neighbor
int newDistance = minDistance.get(currentCity) + neighbors.get(neighborCity);
//If the new distance is smaller than the current distance, update the minDistance HashMap
if(newDistance < minDistance.get(neighborCity))
minDistance.put(neighborCity, newDistance);
}
}
// return the minimum distance between the start and end cities
return minDistance.get(end);
}
//A utility function to find the city with the minimum distance in the minDistance HashMap
String findMinDistanceCity (HashMap<String, Integer> minDistance, Set<String> visitedCities){
int minDistance = Integer.MAX_VALUE;
String minDistanceCity = null;
for(String city: minDistance.keySet()){
if(minDistance.get(city) < minDistance && !visitedCities.contains(city)){
minDistanceCity = city;
minDistance = minDistance.get(city);
}
}
return minDistanceCity;
} |
<filename>httperr_test.go
package httperr_test
import (
"fmt"
"net/http"
"net/url"
"regexp"
"strings"
"testing"
"github.com/facebookgo/httperr"
)
const question = "world"
const answer = "42"
var redactor = strings.NewReplacer(question, answer)
func TestRedactError(t *testing.T) {
t.Parallel()
template := "hello %s"
originalErr := fmt.Errorf(template, question)
redactedErr := httperr.RedactError(originalErr, redactor)
expectedStr := fmt.Sprintf(template, answer)
actualStr := redactedErr.Error()
if actualStr != expectedStr {
t.Fatalf(`was expecting "%s" but got "%s"`, expectedStr, actualStr)
}
actualErr := redactedErr.Actual()
if originalErr != actualErr {
t.Fatal("did not get expected Actual reference")
}
}
func TestWrapWithoutResponse(t *testing.T) {
t.Parallel()
template := "hello %s"
originalErr := fmt.Errorf(template, question)
originalReq := &http.Request{
Method: "GET",
URL: &url.URL{
Scheme: "https",
Host: "daaku.org",
Path: "/bar/",
},
}
wrapErr := httperr.NewError(originalErr, redactor, originalReq, nil)
expectedStr := `GET https://daaku.org/bar/ failed with hello 42`
actualStr := wrapErr.Error()
if actualStr != expectedStr {
t.Fatalf(`was expecting "%s" but got "%s"`, expectedStr, actualStr)
}
actualErr := wrapErr.Actual()
if originalErr != actualErr {
t.Fatal("did not get expected Actual reference")
}
actualReq := wrapErr.Request()
if actualReq != originalReq {
t.Fatal("did not get expected Request reference")
}
}
func TestWrapWithResponse(t *testing.T) {
t.Parallel()
template := "hello %s"
originalErr := fmt.Errorf(template, question)
originalReq := &http.Request{
Method: "GET",
URL: &url.URL{
Scheme: "https",
Host: "daaku.org",
Path: "/bar/",
},
}
originalRes := &http.Response{
Status: http.StatusText(http.StatusBadGateway),
}
wrapErr := httperr.NewError(originalErr, redactor, originalReq, originalRes)
expectedStr := `GET https://daaku.org/bar/ got Bad Gateway failed with hello 42`
actualStr := wrapErr.Error()
if actualStr != expectedStr {
t.Fatalf(`was expecting "%s" but got "%s"`, expectedStr, actualStr)
}
actualErr := wrapErr.Actual()
if originalErr != actualErr {
t.Fatal("did not get expected Actual reference")
}
actualReq := wrapErr.Request()
if actualReq != originalReq {
t.Fatal("did not get expected Request reference")
}
actualRes := wrapErr.Response()
if actualRes != originalRes {
t.Fatal("did not get expected Response reference")
}
}
func TestRedactNoOp(t *testing.T) {
t.Parallel()
if httperr.RedactNoOp().Replace(answer) != answer {
t.Fatal("no op did something")
}
}
func TestRedactRegexp(t *testing.T) {
t.Parallel()
re := regexp.MustCompile("(access_token|client_secret)=([^&]*)")
repl := "$1=-- XX -- REDACTED -- XX --"
redactor := httperr.RedactRegexp(re, repl)
orig := "foo&access_token=1"
expected := "foo&access_token=-- XX -- REDACTED -- XX --"
actual := redactor.Replace(orig)
if actual != expected {
t.Fatalf(`expected "%s" actual "%s"`, expected, actual)
}
}
|
#!/bin/bash
mlExtension=".moonlight"
function isIp {
[[ $1 =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]] && return 0
return 1
}
function ipToAvahi {
isIp $1 && avahi-resolve-address $1 | cut -f 2 && return 0
return 1
}
function avahiHasLocal {
return $(echo $1 | grep -q '.local$')
}
function avahiAddLocal {
! isIp $1 && ! avahiHasLocal $1 && echo "$1.local" && return 0
return 1
}
function avahiToIp {
hn=$1
isIp $hn && return 1
! avahiHasLocal $hn && hn=$(avahiAddLocal $hn)
avahi-resolve-host-name -4 $hn | cut -f 2 && return 0
}
function hostDisplay {
echo "($1)"
}
function gameShortNameToFileName {
echo "$1$(hostExtenstion $2)"
}
function hostExtenstion {
echo "_$1$mlExtension"
} |
// Define the ToolGenerateArgs class
public class ToolGenerateArgs
{
// Define properties for input parameters, configuration settings, and project context
public string InputParameters { get; set; }
public string ConfigurationSettings { get; set; }
public string ProjectContext { get; set; }
}
// Define the IPlugin interface
public interface IPlugin
{
string Generate(ToolGenerateArgs args);
}
// Implement a sample plugin that generates code based on the input arguments
public class SamplePlugin : IPlugin
{
public string Generate(ToolGenerateArgs args)
{
// Sample code generation logic based on input parameters and project context
string generatedCode = $"// Generated code based on input: {args.InputParameters}, context: {args.ProjectContext}";
return generatedCode;
}
}
// Usage of the sample plugin
public class PluginUsage
{
public void UseSamplePlugin()
{
// Create an instance of the sample plugin
IPlugin plugin = new SamplePlugin();
// Create a sample ToolGenerateArgs object
ToolGenerateArgs args = new ToolGenerateArgs
{
InputParameters = "SampleInput",
ConfigurationSettings = "SampleConfig",
ProjectContext = "SampleProject"
};
// Generate code using the sample plugin
string generatedCode = plugin.Generate(args);
// Output the generated code
Console.WriteLine(generatedCode);
}
} |
#!/bin/bash
# Stop at any error, show all commands
set -exuo pipefail
# Get script directory
MY_DIR=$(dirname "${BASH_SOURCE[0]}")
# Get build utilities
source $MY_DIR/build_utils.sh
mkdir /opt/python
for PREFIX in $(find /opt/_internal/ -mindepth 1 -maxdepth 1 \( -name 'cpython*' -o -name 'pypy*' \)); do
# Some python's install as bin/python3. Make them available as
# bin/python.
if [ -e ${PREFIX}/bin/python3 ] && [ ! -e ${PREFIX}/bin/python ]; then
ln -s python3 ${PREFIX}/bin/python
fi
${PREFIX}/bin/python -m ensurepip
if [ -e ${PREFIX}/bin/pip3 ] && [ ! -e ${PREFIX}/bin/pip ]; then
ln -s pip3 ${PREFIX}/bin/pip
fi
PY_VER=$(${PREFIX}/bin/python -c "import sys; print('.'.join(str(v) for v in sys.version_info[:2]))")
# Since we fall back on a canned copy of pip, we might not have
# the latest pip and friends. Upgrade them to make sure.
${PREFIX}/bin/pip install -U --require-hashes -r ${MY_DIR}/requirements${PY_VER}.txt
# Create a symlink to PREFIX using the ABI_TAG in /opt/python/
ABI_TAG=$(${PREFIX}/bin/python ${MY_DIR}/python-tag-abi-tag.py)
ln -s ${PREFIX} /opt/python/${ABI_TAG}
# Make versioned python commands available directly in environment.
if [[ "${PREFIX}" == *"/pypy"* ]]; then
ln -s ${PREFIX}/bin/python /usr/local/bin/pypy${PY_VER}
else
ln -s ${PREFIX}/bin/python /usr/local/bin/python${PY_VER}
fi
done
# Create venv for auditwheel & certifi
TOOLS_PATH=/opt/_internal/tools
/opt/python/cp39-cp39/bin/python -m venv $TOOLS_PATH
source $TOOLS_PATH/bin/activate
# Install default packages
pip install -U --require-hashes -r $MY_DIR/requirements3.9.txt
# Install certifi and pipx
pip install -U --require-hashes -r $MY_DIR/requirements-base-tools.txt
# Make pipx available in PATH,
# Make sure when root installs apps, they're also in the PATH
cat <<EOF > /usr/local/bin/pipx
#!/bin/bash
set -euo pipefail
if [ \$(id -u) -eq 0 ]; then
export PIPX_HOME=/opt/_internal/pipx
export PIPX_BIN_DIR=/usr/local/bin
fi
${TOOLS_PATH}/bin/pipx "\$@"
EOF
chmod 755 /usr/local/bin/pipx
# Our openssl doesn't know how to find the system CA trust store
# (https://github.com/pypa/manylinux/issues/53)
# And it's not clear how up-to-date that is anyway
# So let's just use the same one pip and everyone uses
ln -s $(python -c 'import certifi; print(certifi.where())') /opt/_internal/certs.pem
# If you modify this line you also have to modify the versions in the Dockerfiles:
export SSL_CERT_FILE=/opt/_internal/certs.pem
# Deactivate the tools virtual environment
deactivate
# install other tools with pipx
pushd $MY_DIR/requirements-tools
for TOOL_PATH in $(find . -type f); do
TOOL=$(basename ${TOOL_PATH})
pipx install --pip-args="--require-hashes -r" ${TOOL}
done
popd
# We do not need the precompiled .pyc and .pyo files.
clean_pyc /opt/_internal
# remove cache
rm -rf /root/.cache
# /MOD START: install valhalla and osrm dependencies
if [ "${AUDITWHEEL_POLICY}" == "manylinux2010" ] || [ "${AUDITWHEEL_POLICY}" == "manylinux2014" ]; then
PACKAGE_MANAGER=yum
COMPILE_DEPS="boost-devel sqlite-devel libspatialite-devel protobuf-devel libcurl-devel luajit-devel geos-devel"
elif [ "${AUDITWHEEL_POLICY}" == "manylinux_2_24" ]; then
PACKAGE_MANAGER=apt
# valhalla
COMPILE_DEPS="libspatialite-dev libprotobuf-dev libgeos-dev libluajit-5.1-dev libcurl4-openssl-dev libgeos++-dev protobuf-compiler"
else
echo "Unsupported policy: '${AUDITWHEEL_POLICY}'"
exit 1
fi
if [ "${PACKAGE_MANAGER}" == "yum" ]; then
yum -y install ${COMPILE_DEPS}
yum clean all
rm -rf /var/cache/yum
elif [ "${PACKAGE_MANAGER}" == "apt" ]; then
export DEBIAN_FRONTEND=noninteractive
apt-get update -qq
apt-get install -qq -y --no-install-recommends ${COMPILE_DEPS}
apt-get clean -qq
rm -rf /var/lib/apt/lists/*
else
echo "Not implemented"
exit 1
fi
hardlink -cv /opt/_internal
# update system packages
LC_ALL=C ${MY_DIR}/update-system-packages.sh
|
# This file is using unix file endings
printf "\33c"
set -e
echo =====================
echo Setting up...
echo =====================
make clean
echo
echo Clean complete
echo
make src/w01
echo =====================
echo Compilation complete
echo =====================
echo
echo Test 1: Increasing numbers - Expcted result 4
echo "5 0 1 2 3 4" | ./src/w01
echo Test 2: Decreasing numbers - Expcted result 0
echo "5 4 3 2 1 0" | ./src/w01
echo Test 3: Toppoint in the middle - Expcted result 2
echo "5 0 1 2 1 0" | ./src/w01
echo Test 4: Flat middle - Expcted result 1
echo "5 0 1 1 1 0" | ./src/w01
|
<gh_stars>0
/**
* 微信认证登录
*/
/**
* @author root
*
*/
package cn.cerc.oauth.wx; |
def get_index_position(s, c):
return s.find(c)
result = get_index_position("Hello World","W")
print(result) |
<gh_stars>1-10
//
// USRuntimeHelper.h
// USRuntimeHelper
//
// Created by <NAME> on 9/25/14.
// Copyright (c) 2014 ufosky.<EMAIL>. All rights reserved.
//
#import <Foundation/Foundation.h>
void US_invokeInstanceMethod(id obj, NSString *selectorName, NSArray *arguments, void *returnValue);
void US_invokeClassMethod(Class cls, NSString *selectorName, NSArray *arguments, void *returnValue);
void US_invokeClassMethodByName(NSString *className, NSString *selectorName, NSArray *arguments, void *returnValue);
void US_swizzleInstanceMethod(Class c, SEL orig, SEL new);
void US_swizzleClassMethod(Class c, SEL orig, SEL new);
void US_replaceClassMethod(Class c, SEL sel, SEL backup, IMP imp);
void US_replaceClassMethodByName(Class c, NSString *selName, NSString *backupName, IMP imp);
void US_replaceInstanceMethod(Class c, SEL sel, SEL backup, IMP imp);
void US_replaceInstanceMethodByName(Class c, NSString *selName, NSString *backupName, IMP imp); |
import math
from bisect import bisect_right
class CustomLRScheduler:
def __init__(
self,
optimizer,
milestones: list,
gamma=0.1,
warmup_iter=500,
warmup_ratio=5e-4,
warmup='exp',
last_epoch=-1,
):
self.optimizer = optimizer
self.milestones = milestones
self.gamma = gamma
self.warmup_iter = warmup_iter
self.warmup_ratio = warmup_ratio
self.warmup = warmup
self.last_epoch = last_epoch
self.current_epoch = 0
self.base_lr = [group['lr'] for group in optimizer.param_groups]
def step(self, epoch):
self.current_epoch = epoch
if self.current_epoch < self.warmup_iter:
if self.warmup == 'linear':
warmup_factor = 1.0 / self.warmup_iter
new_lr = [min(self.base_lr[i] + self.current_epoch * warmup_factor * self.base_lr[i], self.base_lr[i]) for i in range(len(self.base_lr))]
elif self.warmup == 'exp':
alpha = self.warmup_ratio * self.base_lr[0]
new_lr = [self.base_lr[i] * (1 - math.exp(-self.current_epoch / alpha)) for i in range(len(self.base_lr))]
else:
new_lr = [self.base_lr[i] * (self.gamma ** bisect_right(self.milestones, self.current_epoch)) for i in range(len(self.base_lr))]
for i, param_group in enumerate(self.optimizer.param_groups):
param_group['lr'] = new_lr[i] |
<gh_stars>1-10
include RapiDoc::RapiConfig
namespace :rapi_doc do
desc "Generate the config files"
task :setup do
if File.directory?(config_dir)
puts "#{BASE_DIR}/#{File.basename(config_dir)} exists"
else
FileUtils.mkdir(config_dir)
end
%w(config_file layout_file class_layout_file frameset_file main_file).each do |type_file|
target_file = send(type_file, :target)
template_file = send(type_file, :template)
if File.exist? target_file
puts "#{BASE_DIR}/#{File.basename(target_file)} exists"
else
FileUtils.cp template_file, config_dir
puts "Generated #{BASE_DIR}/#{File.basename(template_file)}" # TODO Add instructions for users to update the config file
end
end
end
desc "Generate the API Documentation"
task :generate do
begin
yml = YAML::load(File.open(config_file(:target)))
rescue
puts "It seems that you don't have the config files yet. Please run rake rapi_doc:setup"
end
# Generating documentations
if yml
resources = []
yml.keys.each do |key|
resources << RapiDoc::ResourceDoc.new(key, yml[key]["location"], yml[key]["doc_path"])
end
# generate the apidoc
RapiDoc::RAPIDoc.new(resources)
end
end
end
|
<gh_stars>0
package com.go2group.jira.webwork;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.atlassian.jira.component.ComponentAccessor;
import com.atlassian.jira.project.Project;
import com.atlassian.jira.web.action.JiraWebActionSupport;
import com.go2group.hipchat.HipChatProxyClient;
import com.go2group.hipchat.HipChatProxyClient.Room;
import com.go2group.hipchat.components.ConfigurationManager;
public class AnnouncementAction extends JiraWebActionSupport {
private static final long serialVersionUID = -7422430977881201919L;
// private static final Logger log = LoggerFactory.getLogger(AnnouncementAction.class);
private final HipChatProxyClient hipChatApiClient;
private final ConfigurationManager configurationManager;
private Collection<Room> rooms;
private String[] roomsToNotify;
private boolean messagePosted;
private String message;
private String color;
private String format;
private String roomOption;
private String notify;
public AnnouncementAction(HipChatProxyClient hipChatApiClient, ConfigurationManager configurationManager) {
this.hipChatApiClient = hipChatApiClient;
this.configurationManager = configurationManager;
}
@Override
public String doDefault() throws Exception {
String authToken = configurationManager.getHipChatApiToken();
rooms = this.hipChatApiClient.getRooms(authToken);
return INPUT;
}
@Override
public String doExecute() throws Exception {
if (message != null) {
String authToken = configurationManager.getHipChatApiToken();
if (authToken != null) {
rooms = this.hipChatApiClient.getRooms(authToken);
//message = getMessageWithUrls(message);
this.messagePosted = true;
if ("all".equals(roomOption)) {
for (Room room : rooms) {
this.hipChatApiClient.notifyRoom(authToken, room.getRoomId().toString(), message, color, format, getNotify());
}
} else if ("subscribed".equals(roomOption)) {
Set<String> rooms = new HashSet<String>();
List<Project> projects = ComponentAccessor.getProjectManager().getProjectObjects();
for (Project project : projects) {
List<String> roomsToNotify = this.configurationManager.getHipChatRooms(project.getKey());
rooms.addAll(roomsToNotify);
}
for (String room : rooms) {
this.hipChatApiClient.notifyRoom(authToken, room, message, color, format, getNotify());
}
} else if (roomsToNotify != null) {
for (String room : roomsToNotify) {
this.hipChatApiClient.notifyRoom(authToken, room, message, color, format, getNotify());
}
} else {
this.messagePosted = false;
}
}
}
return SUCCESS;
}
public Collection<Room> getRooms() {
return rooms;
}
public void setRooms(Collection<Room> rooms) {
this.rooms = rooms;
}
public String[] getRoomsToNotify() {
return roomsToNotify;
}
public void setRoomsToNotify(String[] roomsToNotify) {
this.roomsToNotify = roomsToNotify;
}
public boolean isMessagePosted() {
return messagePosted;
}
public void setMessagePosted(boolean messagePosted) {
this.messagePosted = messagePosted;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public String getColor() {
return color;
}
public void setColor(String color) {
this.color = color;
}
public String getRoomOption() {
return roomOption;
}
public void setRoomOption(String roomOption) {
this.roomOption = roomOption;
}
public String getFormat() {
return format;
}
public void setFormat(String format) {
this.format = format;
}
public String getNotify() {
return notify;
}
public void setNotify(String notify) {
this.notify = notify;
}
}
|
func flattenBands(_ bands: [[String]]) -> [String] {
return bands.flatMap { $0 }
}
// Test
let bands = [
["BandA_member1", "BandA_member2", "BandA_member3"],
["BandB_member1", "BandB_member2"],
["BandC_member1", "BandC_member2", "BandC_member3", "BandC_member4"]
]
let flattenedBands = flattenBands(bands)
print(flattenedBands) // Output: ["BandA_member1", "BandA_member2", "BandA_member3", "BandB_member1", "BandB_member2", "BandC_member1", "BandC_member2", "BandC_member3", "BandC_member4"] |
import time
start_time = time.time()
for i in range(1000):
print(i)
end_time = time.time()
print(f"Total time: {end_time - start_time}s") |
package mezz.jei.input;
import javax.annotation.Nullable;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.List;
import mezz.jei.input.click.ClickFocusHandler;
import mezz.jei.input.click.GuiAreaClickHandler;
import mezz.jei.input.click.MouseClickState;
import net.minecraftforge.client.event.GuiScreenEvent;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.screen.Screen;
import net.minecraft.client.gui.screen.inventory.ContainerScreen;
import net.minecraft.client.gui.widget.TextFieldWidget;
import net.minecraft.client.util.InputMappings;
import mezz.jei.api.gui.handlers.IGuiClickableArea;
import mezz.jei.api.ingredients.IIngredientHelper;
import mezz.jei.api.recipe.IFocus;
import mezz.jei.api.runtime.IIngredientManager;
import mezz.jei.bookmarks.BookmarkList;
import mezz.jei.config.IEditModeConfig;
import mezz.jei.config.IWorldConfig;
import mezz.jei.config.IngredientBlacklistType;
import mezz.jei.config.KeyBindings;
import mezz.jei.events.EventBusHelper;
import mezz.jei.gui.Focus;
import mezz.jei.gui.GuiScreenHelper;
import mezz.jei.gui.overlay.IngredientListOverlay;
import mezz.jei.gui.overlay.bookmarks.LeftAreaDispatcher;
import mezz.jei.gui.recipes.RecipesGui;
import mezz.jei.ingredients.IngredientFilter;
import mezz.jei.ingredients.IngredientManager;
import mezz.jei.util.ReflectionUtil;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class InputHandler {
private static final Logger LOGGER = LogManager.getLogger();
private final IIngredientManager ingredientManager;
private final WeakReference<IngredientFilter> weakIngredientFilter;
private final RecipesGui recipesGui;
private final IngredientListOverlay ingredientListOverlay;
private final IEditModeConfig editModeConfig;
private final IWorldConfig worldConfig;
private final GuiScreenHelper guiScreenHelper;
private final LeftAreaDispatcher leftAreaDispatcher;
private final BookmarkList bookmarkList;
private final List<IShowsRecipeFocuses> showsRecipeFocuses = new ArrayList<>();
private final CombinedMouseHandler clickHandlers;
public InputHandler(
RecipesGui recipesGui,
IngredientFilter ingredientFilter,
IngredientManager ingredientManager,
IngredientListOverlay ingredientListOverlay,
IEditModeConfig editModeConfig,
IWorldConfig worldConfig,
GuiScreenHelper guiScreenHelper,
LeftAreaDispatcher leftAreaDispatcher,
BookmarkList bookmarkList
) {
this.ingredientManager = ingredientManager;
this.weakIngredientFilter = new WeakReference<>(ingredientFilter);
this.recipesGui = recipesGui;
this.ingredientListOverlay = ingredientListOverlay;
this.editModeConfig = editModeConfig;
this.worldConfig = worldConfig;
this.guiScreenHelper = guiScreenHelper;
this.leftAreaDispatcher = leftAreaDispatcher;
this.bookmarkList = bookmarkList;
this.showsRecipeFocuses.add(recipesGui);
this.showsRecipeFocuses.add(ingredientListOverlay);
this.showsRecipeFocuses.add(leftAreaDispatcher);
this.showsRecipeFocuses.add(new GuiContainerWrapper(guiScreenHelper));
this.clickHandlers = new CombinedMouseHandler(
new ClickEditHandler(),
ingredientListOverlay.getMouseHandler(),
leftAreaDispatcher.getMouseHandler(),
new ClickFocusHandler(this, recipesGui),
new ClickGlobalHandler(),
new GuiAreaClickHandlerGenerator()
);
}
public void registerToEventBus() {
EventBusHelper.registerWeakListener(this, GuiScreenEvent.KeyboardKeyPressedEvent.Pre.class, InputHandler::onGuiKeyPressedEvent);
EventBusHelper.registerWeakListener(this, GuiScreenEvent.KeyboardCharTypedEvent.Pre.class, InputHandler::onGuiCharTypedEvent);
EventBusHelper.registerWeakListener(this, GuiScreenEvent.KeyboardKeyPressedEvent.Post.class, InputHandler::onGuiKeyboardEvent);
EventBusHelper.registerWeakListener(this, GuiScreenEvent.KeyboardCharTypedEvent.Post.class, InputHandler::onGuiCharTypedEvent);
EventBusHelper.registerWeakListener(this, GuiScreenEvent.MouseClickedEvent.Pre.class, InputHandler::onGuiMouseEvent);
EventBusHelper.registerWeakListener(this, GuiScreenEvent.MouseReleasedEvent.Pre.class, InputHandler::onGuiMouseEvent);
EventBusHelper.registerWeakListener(this, GuiScreenEvent.MouseScrollEvent.Pre.class, InputHandler::onGuiMouseEvent);
}
/**
* When we have keyboard focus, use Pre
*/
public void onGuiKeyPressedEvent(GuiScreenEvent.KeyboardKeyPressedEvent.Pre event) {
if (hasKeyboardFocus()) {
handleKeyEvent(event.getKeyCode(), event.getScanCode(), event.getModifiers());
event.setCanceled(true);
}
}
/**
* When we have keyboard focus, use Pre
*/
public void onGuiCharTypedEvent(GuiScreenEvent.KeyboardCharTypedEvent.Pre event) {
if (hasKeyboardFocus() && handleCharTyped(event.getCodePoint(), event.getModifiers())) {
event.setCanceled(true);
}
}
/**
* Without keyboard focus, use Post
*/
public void onGuiKeyboardEvent(GuiScreenEvent.KeyboardKeyPressedEvent.Post event) {
if (!hasKeyboardFocus() && handleKeyEvent(event.getKeyCode(), event.getScanCode(), event.getModifiers())) {
event.setCanceled(true);
}
}
/**
* Without keyboard focus, use Post
*/
public void onGuiCharTypedEvent(GuiScreenEvent.KeyboardCharTypedEvent.Post event) {
if (!hasKeyboardFocus() && handleCharTyped(event.getCodePoint(), event.getModifiers())) {
event.setCanceled(true);
}
}
public void onGuiMouseEvent(GuiScreenEvent.MouseClickedEvent.Pre event) {
int mouseButton = event.getButton();
if (mouseButton > -1) {
Screen screen = event.getGui();
double mouseX = event.getMouseX();
double mouseY = event.getMouseY();
IMouseHandler handler = this.clickHandlers.handleClick(screen, mouseX, mouseY, mouseButton, MouseClickState.SIMULATE);
if (handler != null) {
event.setCanceled(true);
}
}
}
public void onGuiMouseEvent(GuiScreenEvent.MouseReleasedEvent.Pre event) {
int mouseButton = event.getButton();
if (mouseButton > -1) {
IMouseHandler handled = this.clickHandlers.handleClick(event.getGui(), event.getMouseX(), event.getMouseY(), mouseButton, MouseClickState.EXECUTE);
if (handled != null) {
event.setCanceled(true);
}
}
}
public void onGuiMouseEvent(GuiScreenEvent.MouseScrollEvent.Pre event) {
double dWheel = event.getScrollDelta();
double mouseX = event.getMouseX();
double mouseY = event.getMouseY();
IMouseHandler overlayMouseHandler = ingredientListOverlay.getMouseHandler();
if (overlayMouseHandler.handleMouseScrolled(mouseX, mouseY, dWheel) ||
leftAreaDispatcher.handleMouseScrolled(mouseX, mouseY, dWheel)) {
event.setCanceled(true);
}
}
@Nullable
public IClickedIngredient<?> getFocusUnderMouseForClick(double mouseX, double mouseY) {
for (IShowsRecipeFocuses gui : showsRecipeFocuses) {
if (gui.canSetFocusWithMouse()) {
IClickedIngredient<?> clicked = gui.getIngredientUnderMouse(mouseX, mouseY);
if (clicked != null) {
return clicked;
}
}
}
return null;
}
@Nullable
private IClickedIngredient<?> getIngredientUnderMouseForKey(double mouseX, double mouseY) {
for (IShowsRecipeFocuses gui : showsRecipeFocuses) {
IClickedIngredient<?> clicked = gui.getIngredientUnderMouse(mouseX, mouseY);
if (clicked != null) {
return clicked;
}
}
return null;
}
public class ClickEditHandler implements IMouseHandler {
@Override
public IMouseHandler handleClick(Screen screen, double mouseX, double mouseY, int mouseButton, MouseClickState clickState) {
if (!worldConfig.isEditModeEnabled()) {
return null;
}
IClickedIngredient<?> clicked = getFocusUnderMouseForClick(mouseX, mouseY);
if (clicked == null) {
return null;
}
if (!clickState.isSimulate()) {
handler(clicked);
}
return this;
}
private <V> void handler(IClickedIngredient<V> clicked) {
V ingredient = clicked.getValue();
IngredientBlacklistType blacklistType = Screen.hasControlDown() ? IngredientBlacklistType.WILDCARD : IngredientBlacklistType.ITEM;
IIngredientHelper<V> ingredientHelper = ingredientManager.getIngredientHelper(ingredient);
IngredientFilter ingredientFilter = weakIngredientFilter.get();
if (ingredientFilter == null) {
LOGGER.error("Can't edit the config blacklist, the ingredient filter is null");
} else {
if (editModeConfig.isIngredientOnConfigBlacklist(ingredient, ingredientHelper)) {
editModeConfig.removeIngredientFromConfigBlacklist(ingredientFilter, ingredientManager, ingredient, blacklistType, ingredientHelper);
} else {
editModeConfig.addIngredientToConfigBlacklist(ingredientFilter, ingredientManager, ingredient, blacklistType, ingredientHelper);
}
}
}
}
public class ClickGlobalHandler implements IMouseHandler {
@Override
public IMouseHandler handleClick(Screen screen, double mouseX, double mouseY, int mouseButton, MouseClickState clickState) {
InputMappings.Input input = InputMappings.Type.MOUSE.getOrCreate(mouseButton);
if (handleGlobalKeybinds(input, clickState)) {
return this;
}
return null;
}
}
public class GuiAreaClickHandlerGenerator implements IMouseHandler {
@Nullable
@Override
public IMouseHandler handleClick(Screen screen, double mouseX, double mouseY, int mouseButton, MouseClickState clickState) {
if (screen instanceof ContainerScreen) {
ContainerScreen<?> guiContainer = (ContainerScreen<?>) screen;
IGuiClickableArea clickableArea = guiScreenHelper.getGuiClickableArea(guiContainer, mouseX - guiContainer.getGuiLeft(), mouseY - guiContainer.getGuiTop());
if (clickableArea != null) {
return new GuiAreaClickHandler(recipesGui, clickableArea, guiContainer);
}
}
return null;
}
}
private boolean hasKeyboardFocus() {
return ingredientListOverlay.hasKeyboardFocus();
}
private boolean handleCharTyped(char codePoint, int modifiers) {
return ingredientListOverlay.onCharTyped(codePoint, modifiers);
}
private boolean handleKeyEvent(int keyCode, int scanCode, int modifiers) {
InputMappings.Input input = InputMappings.getKey(keyCode, scanCode);
if (ingredientListOverlay.hasKeyboardFocus()) {
if (KeyBindings.isInventoryCloseKey(input) || KeyBindings.isEnterKey(keyCode)) {
ingredientListOverlay.clearKeyboardFocus();
return true;
} else if (ingredientListOverlay.onKeyPressed(keyCode, scanCode, modifiers)) {
return true;
}
}
if (handleGlobalKeybinds(input, MouseClickState.VANILLA)) {
return true;
}
if (!isContainerTextFieldFocused() && !ingredientListOverlay.hasKeyboardFocus()) {
IClickedIngredient<?> clicked = getIngredientUnderMouseForKey(MouseUtil.getX(), MouseUtil.getY());
if (clicked != null && handleFocusKeybinds(clicked, input, MouseClickState.VANILLA)) {
ingredientListOverlay.clearKeyboardFocus();
return true;
}
return ingredientListOverlay.onKeyPressed(keyCode, scanCode, modifiers);
}
return false;
}
private boolean handleGlobalKeybinds(InputMappings.Input input, MouseClickState clickState) {
if (KeyBindings.toggleOverlay.isActiveAndMatches(input)) {
if (!clickState.isSimulate()) {
worldConfig.toggleOverlayEnabled();
}
return true;
}
if (KeyBindings.toggleBookmarkOverlay.isActiveAndMatches(input)) {
if (!clickState.isSimulate()) {
worldConfig.toggleBookmarkEnabled();
}
return true;
}
return ingredientListOverlay.onGlobalKeyPressed(input, clickState);
}
public boolean handleFocusKeybinds(IClickedIngredient<?> clicked, InputMappings.Input input, MouseClickState clickState) {
final boolean showRecipe = KeyBindings.showRecipe.isActiveAndMatches(input);
final boolean showUses = KeyBindings.showUses.isActiveAndMatches(input);
if (showRecipe || showUses) {
if (!clickState.isSimulate()) {
IFocus.Mode mode = showRecipe ? IFocus.Mode.OUTPUT : IFocus.Mode.INPUT;
recipesGui.show(new Focus<Object>(mode, clicked.getValue()));
}
this.ingredientListOverlay.clearKeyboardFocus();
return true;
} else if (KeyBindings.bookmark.isActiveAndMatches(input)) {
if (!clickState.isSimulate()) {
if (bookmarkList.remove(clicked.getValue())) {
if (bookmarkList.isEmpty()) {
worldConfig.setBookmarkEnabled(false);
}
return true;
} else {
worldConfig.setBookmarkEnabled(true);
return bookmarkList.add(clicked.getValue());
}
}
this.ingredientListOverlay.clearKeyboardFocus();
return true;
}
return false;
}
private boolean isContainerTextFieldFocused() {
Minecraft minecraft = Minecraft.getInstance();
Screen screen = minecraft.screen;
if (screen == null) {
return false;
}
TextFieldWidget textField = ReflectionUtil.getFieldWithClass(screen, TextFieldWidget.class);
return textField != null && textField.isVisible() && textField.isFocused();
}
}
|
<reponame>Damian070/pimp-my-pr
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { ResolveHoursPipe } from './pipes/resolve-hours.pipe';
@NgModule({
imports: [CommonModule],
declarations: [ResolveHoursPipe],
exports: [ResolveHoursPipe]
})
export class PmpWebSharedUtilModule {}
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.load.sync;
import org.apache.doris.analysis.CreateDataSyncJobStmt;
import org.apache.doris.analysis.PauseSyncJobStmt;
import org.apache.doris.analysis.ResumeSyncJobStmt;
import org.apache.doris.analysis.StopSyncJobStmt;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.catalog.Database;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.io.Writable;
import org.apache.doris.load.sync.SyncJob.JobState;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.stream.Collectors;
public class SyncJobManager implements Writable {
private static final Logger LOG = LogManager.getLogger(SyncJobManager.class);
private Map<Long, SyncJob> idToSyncJob;
private Map<Long, Map<String, List<SyncJob>>> dbIdToJobNameToSyncJobs;
private ReentrantReadWriteLock lock;
public SyncJobManager() {
idToSyncJob = Maps.newConcurrentMap();
dbIdToJobNameToSyncJobs = Maps.newConcurrentMap();
lock = new ReentrantReadWriteLock(true);
}
public void addDataSyncJob(CreateDataSyncJobStmt stmt) throws DdlException {
long jobId = Catalog.getCurrentCatalog().getNextId();
SyncJob syncJob = SyncJob.fromStmt(jobId, stmt);
writeLock();
try {
unprotectedAddSyncJob(syncJob);
Catalog.getCurrentCatalog().getEditLog().logCreateSyncJob(syncJob);
} finally {
writeUnlock();
}
LOG.info("add sync job. {}", syncJob);
}
private void unprotectedAddSyncJob(SyncJob syncJob) {
idToSyncJob.put(syncJob.getId(), syncJob);
long dbId = syncJob.getDbId();
if (!dbIdToJobNameToSyncJobs.containsKey(dbId)) {
dbIdToJobNameToSyncJobs.put(syncJob.getDbId(), Maps.newConcurrentMap());
}
Map<String, List<SyncJob>> map = dbIdToJobNameToSyncJobs.get(dbId);
if (!map.containsKey(syncJob.getJobName())) {
map.put(syncJob.getJobName(), Lists.newArrayList());
}
map.get(syncJob.getJobName()).add(syncJob);
}
public void pauseSyncJob(PauseSyncJobStmt stmt) throws DdlException {
String dbName = stmt.getDbFullName();
String jobName = stmt.getJobName();
Database db = Catalog.getCurrentCatalog().getDb(dbName);
if (db == null) {
throw new DdlException("Db does not exist. name: " + dbName);
}
List<SyncJob> syncJobs = Lists.newArrayList();
readLock();
try {
List<SyncJob> matchJobs = getSyncJobsByDbAndJobName(db.getId(), jobName);
if (matchJobs.isEmpty()) {
throw new DdlException("Load job does not exist");
}
List<SyncJob> runningSyncJob = matchJobs.stream().filter(entity -> entity.isRunning())
.collect(Collectors.toList());
if (runningSyncJob.isEmpty()) {
throw new DdlException("There is no running job with jobName `"
+ stmt.getJobName() + "` to pause");
}
syncJobs.addAll(runningSyncJob);
} finally {
readUnlock();
}
for (SyncJob syncJob : syncJobs) {
syncJob.pause();
}
}
public void resumeSyncJob(ResumeSyncJobStmt stmt) throws DdlException {
String dbName = stmt.getDbFullName();
String jobName = stmt.getJobName();
Database db = Catalog.getCurrentCatalog().getDb(dbName);
if (db == null) {
throw new DdlException("Db does not exist. name: " + dbName);
}
List<SyncJob> syncJobs = Lists.newArrayList();
readLock();
try {
List<SyncJob> matchJobs = getSyncJobsByDbAndJobName(db.getId(), jobName);
if (matchJobs.isEmpty()) {
throw new DdlException("Load job does not exist");
}
List<SyncJob> pausedSyncJob = matchJobs.stream().filter(entity -> entity.isPaused())
.collect(Collectors.toList());
if (pausedSyncJob.isEmpty()) {
throw new DdlException("There is no paused job with jobName `"
+ stmt.getJobName() + "` to resume");
}
syncJobs.addAll(pausedSyncJob);
} finally {
readUnlock();
}
for (SyncJob syncJob : syncJobs) {
syncJob.resume();
}
}
public void stopSyncJob(StopSyncJobStmt stmt) throws DdlException {
String dbName = stmt.getDbFullName();
String jobName = stmt.getJobName();
Database db = Catalog.getCurrentCatalog().getDb(dbName);
if (db == null) {
throw new DdlException("Db does not exist. name: " + dbName);
}
// List of sync jobs waiting to be cancelled
List<SyncJob> syncJobs = Lists.newArrayList();
readLock();
try {
List<SyncJob> matchJobs = getSyncJobsByDbAndJobName(db.getId(), jobName);
if (matchJobs.isEmpty()) {
throw new DdlException("Load job does not exist");
}
List<SyncJob> uncompletedSyncJob = matchJobs.stream().filter(entity -> !entity.isCompleted())
.collect(Collectors.toList());
if (uncompletedSyncJob.isEmpty()) {
throw new DdlException("There is no uncompleted job with jobName `"
+ stmt.getJobName() + "`");
}
syncJobs.addAll(uncompletedSyncJob);
} finally {
readUnlock();
}
for (SyncJob syncJob : syncJobs) {
syncJob.cancel(SyncFailMsg.MsgType.USER_CANCEL, "user cancel");
}
}
// caller should hold the db lock
private List<SyncJob> getSyncJobsByDbAndJobName(long dbId, String jobName) {
List<SyncJob> syncJobs = Lists.newArrayList();
Map<String, List<SyncJob>> jobNameToSyncJobs = dbIdToJobNameToSyncJobs.get(dbId);
if (jobNameToSyncJobs != null) {
if (jobNameToSyncJobs.containsKey(jobName)) {
syncJobs.addAll(jobNameToSyncJobs.get(jobName));
}
}
return syncJobs;
}
public List<List<Comparable>> getSyncJobsInfoByDbId(long dbId) {
LinkedList<List<Comparable>> syncJobInfos = new LinkedList<List<Comparable>>();
readLock();
try {
if (!dbIdToJobNameToSyncJobs.containsKey(dbId)) {
return syncJobInfos;
}
Map<String, List<SyncJob>> jobNameToLoadJobs = dbIdToJobNameToSyncJobs.get(dbId);
List<SyncJob> syncJobs = Lists.newArrayList();
syncJobs.addAll(jobNameToLoadJobs.values()
.stream().flatMap(Collection::stream).collect(Collectors.toList()));
for (SyncJob syncJob : syncJobs) {
syncJobInfos.add(syncJob.getShowInfo());
}
return syncJobInfos;
} finally {
readUnlock();
}
}
public List<SyncJob> getSyncJobs(SyncJob.JobState state) {
List<SyncJob> result = Lists.newArrayList();
readLock();
try {
for (SyncJob job : idToSyncJob.values()) {
if (job.getJobState() == state) {
result.add(job);
}
}
} finally {
readUnlock();
}
return result;
}
public boolean isJobNameExist(String dbName, String jobName) throws DdlException {
Database db = Catalog.getCurrentCatalog().getDb(dbName);
if (db == null) {
throw new DdlException("Db does not exist. name: " + dbName);
}
boolean result = false;
readLock();
try {
Map<String, List<SyncJob>> jobNameToSyncJobs = dbIdToJobNameToSyncJobs.get(db.getId());
if (jobNameToSyncJobs != null && jobNameToSyncJobs.containsKey(jobName)) {
List<SyncJob> matchJobs = jobNameToSyncJobs.get(jobName);
for(SyncJob syncJob : matchJobs) {
if (!syncJob.isCancelled()) {
result = true;
}
}
}
} finally {
readUnlock();
}
return result;
}
public SyncJob getSyncJobById(long jobId) {
return idToSyncJob.get(jobId);
}
public void readLock() {
lock.readLock().lock();
}
public void readUnlock() {
lock.readLock().unlock();
}
private void writeLock() {
lock.writeLock().lock();
}
private void writeUnlock() {
lock.writeLock().unlock();
}
@Override
public void write(DataOutput out) throws IOException {
Collection<SyncJob> syncJobs = idToSyncJob.values();
out.writeInt(syncJobs.size());
for (SyncJob syncJob : syncJobs) {
syncJob.write(out);
}
}
public void readField(DataInput in) throws IOException {
int size = in.readInt();
for (int i = 0; i < size; i++) {
SyncJob syncJob = SyncJob.read(in);
if (!syncJob.isCompleted()) {
syncJob.updateState(JobState.PENDING, true);
}
unprotectedAddSyncJob(syncJob);
}
}
public void replayAddSyncJob(SyncJob syncJob) {
writeLock();
try {
unprotectedAddSyncJob(syncJob);
} finally {
writeUnlock();
}
}
public void replayUpdateSyncJobState(SyncJob.SyncJobUpdateStateInfo info) {
writeLock();
try {
long jobId = info.getId();
SyncJob job = idToSyncJob.get(jobId);
if (job == null) {
LOG.warn("replay update sync job state failed. Job was not found, id: {}", jobId);
return;
}
job.replayUpdateSyncJobState(info);
} finally {
writeUnlock();
}
}
} |
# frozen_string_literal: true
module GraphQL
# An Interface contains a collection of types which implement some of the same fields.
#
# Interfaces can have fields, defined with `field`, just like an object type.
#
# Objects which implement this field _inherit_ field definitions from the interface.
# An object type can override the inherited definition by redefining that field.
#
# @example An interface with three fields
# DeviceInterface = GraphQL::InterfaceType.define do
# name("Device")
# description("Hardware devices for computing")
#
# field :ram, types.String
# field :processor, ProcessorType
# field :release_year, types.Int
# end
#
# @example Implementing an interface with an object type
# Laptoptype = GraphQL::ObjectType.define do
# interfaces [DeviceInterface]
# end
#
class InterfaceType < GraphQL::BaseType
accepts_definitions :fields, field: GraphQL::Define::AssignObjectField
attr_accessor :fields
ensure_defined :fields
def initialize
super
@fields = {}
end
def initialize_copy(other)
super
@fields = other.fields.dup
end
def kind
GraphQL::TypeKinds::INTERFACE
end
# @return [GraphQL::Field] The defined field for `field_name`
def get_field(field_name)
fields[field_name]
end
# @return [Array<GraphQL::Field>] All fields on this type
def all_fields
fields.values
end
end
end
|
<gh_stars>10-100
package io.opensphere.mantle.crust;
import java.awt.Color;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
import io.opensphere.core.Toolbox;
import io.opensphere.core.data.DataRegistry;
import io.opensphere.core.data.util.DataModelCategory;
import io.opensphere.core.model.Altitude.ReferenceLevel;
import io.opensphere.core.model.LatLonAlt;
import io.opensphere.core.order.OrderManager;
import io.opensphere.core.order.OrderManagerRegistry;
import io.opensphere.core.order.impl.DefaultOrderCategory;
import io.opensphere.core.order.impl.DefaultOrderParticipantKey;
import io.opensphere.core.util.Constants;
import io.opensphere.core.util.collections.New;
import io.opensphere.mantle.MantleToolbox;
import io.opensphere.mantle.controller.DataTypeController;
import io.opensphere.mantle.data.DataGroupInfo;
import io.opensphere.mantle.data.DataTypeInfo;
import io.opensphere.mantle.data.LoadsTo;
import io.opensphere.mantle.data.MapVisualizationType;
import io.opensphere.mantle.data.MetaDataInfo;
import io.opensphere.mantle.data.element.DataElement;
import io.opensphere.mantle.data.element.impl.SimpleDataElementProvider;
import io.opensphere.mantle.data.impl.DefaultBasicVisualizationInfo;
import io.opensphere.mantle.data.impl.DefaultDataGroupInfo;
import io.opensphere.mantle.data.impl.DefaultDataGroupInfoAssistant;
import io.opensphere.mantle.data.impl.DefaultDataTypeInfo;
import io.opensphere.mantle.data.impl.DefaultMapFeatureVisualizationInfo;
/**
* Bundle together all of the things that comprise a loaded data set, and
* provide methods for managing the data. The containing class maintains a map
* of these, indexed by name.
*/
public class GenericDataSet
{
/** Layer name. */
private final String name;
/** Key for accessing the DataRegistry. */
private final DataModelCategory myDataModelCategory;
/** Containing data group (one per DataTypeInfo). */
private DefaultDataGroupInfo group;
/** The data type (one per request). */
private DataTypeInfo myType;
/** IDs in Mantle. We need these to delete records from Mantle. */
private long[] mantleIds;
/** Name of the latitude field. */
private String myLatitudeFieldName;
/** Name of the longitude field. */
private String myLongitudeFieldName;
/** The controller through which data types are accessed. */
private final DataTypeController myDataTypeController;
/** The registry through which data is accessed. */
private final DataRegistry myDataRegistry;
/** The supplier used to get the root name of the data set. */
private final Supplier<String> myRootNameSupplier;
/** The toolbox through which application state is accessed. */
private final Toolbox myToolbox;
/** The registry used to manage the z-order of the data layer. */
private final OrderManagerRegistry myOrderManagerRegistry;
/** The manager instance used to maintain named data sets. */
private final NamedDataSetManager myNamedDataSetManager;
/**
* Pass in the basic components.
*
* @param namedDataSetManager The manager instance used to maintain named
* data sets.
* @param toolbox the toolbox through which application state is accessed.
* @param dataModelCategory the category to which the data set will be
* assigned.
* @param typeId name of the layer
* @param rootNameSupplier the supplier used to get the root name of the
* data set.
*/
public GenericDataSet(NamedDataSetManager namedDataSetManager, Toolbox toolbox, DataModelCategory dataModelCategory,
String typeId, Supplier<String> rootNameSupplier)
{
myNamedDataSetManager = namedDataSetManager;
myToolbox = toolbox;
myDataRegistry = toolbox.getDataRegistry();
myOrderManagerRegistry = myToolbox.getOrderManagerRegistry();
MantleToolbox mantleTools = toolbox.getPluginToolboxRegistry().getPluginToolbox(MantleToolbox.class);
myDataTypeController = mantleTools.getDataTypeController();
mantleTools.getDataGroupController();
myDataModelCategory = dataModelCategory;
name = typeId;
myRootNameSupplier = rootNameSupplier;
}
/**
* Gets the value of the {@link #name} field.
*
* @return the value stored in the {@link #name} field.
*/
public String getName()
{
return name;
}
/**
* Specify the names of latitude and longitude fields.
*
* @param latitudeFieldName the name of the latitude field.
* @param longitudeFieldName the name of the longitude field.
*/
public void setMapSupport(String latitudeFieldName, String longitudeFieldName)
{
myLatitudeFieldName = latitudeFieldName;
myLongitudeFieldName = longitudeFieldName;
}
/**
* Construct and register Mantle support, including a data group and a data
* type.
*
* @param parent the parent group
* @param meta the object in which metadata structures are defined.
*/
public void create(DataGroupInfo parent, MetaDataInfo meta)
{
create(parent, name, name, meta);
}
/**
* Alternate create, allowing group and type names to differ.
*
* @param parent the parent group
* @param groupId the group name
* @param typeId the layer name
* @param meta the object in which metadata structures are defined.
*/
protected void create(DataGroupInfo parent, String groupId, String typeId, MetaDataInfo meta)
{
// Order of the operations in the implementation of this method is
// deceptively important. Moving things around here can cause things
// to stop working. Here are highlights (in no particular order):
// - DataGroupInfo and DataTypeInfo are created
// - the type is added to the group, which is added to its own parent
// - the DataTypeController is notified of the new DataTypeInfo
group = createDataGroup(parent, groupId);
group.setAssistant(new DefaultAssistant(myNamedDataSetManager));
myType = generateDataType(typeId, groupId, myRootNameSupplier.get(), meta);
group.addMember(myType, this);
parent.addChild(group, this);
group.activationProperty().setActive(true);
group.setGroupVisible(null, true, false, this);
// GBDXMantleController listens for deactivation and removes data
// group.activationProperty().addListener(myActivationListener);
myDataTypeController.addDataType("Your", "Mom", myType, this);
}
/**
* Creates a new data type, storing it in the order manager registry, and
* registering it with the supplied types.
*
* @param name layer name
* @param parent parent group name
* @param rootName I don't know what this is
* @param meta the object in which metadata structures are defined.
* @return the constructed DataTypeInfo instance
*/
protected DataTypeInfo generateDataType(String name, String parent, String rootName, MetaDataInfo meta)
{
// construct the type as a DefaultDataTypeInfo and endow it with stuff
DefaultDataTypeInfo type = new DefaultDataTypeInfo(myToolbox, rootName, parent, name, name, true, meta);
type.setBasicVisualizationInfo(new DefaultBasicVisualizationInfo(LoadsTo.STATIC,
DefaultBasicVisualizationInfo.LOADS_TO_STATIC_ONLY, Color.ORANGE, true));
type.setMapVisualizationInfo(new DefaultMapFeatureVisualizationInfo(MapVisualizationType.POINT_ELEMENTS, true));
type.setOrderKey(new DefaultOrderParticipantKey(DefaultOrderCategory.DEFAULT_FEATURE_LAYER_FAMILY,
DefaultOrderCategory.FEATURE_CATEGORY, type.getTypeKey()));
// do the z-order thing
OrderManager manager = myOrderManagerRegistry.getOrderManager(type.getOrderKey());
int zorder = manager.activateParticipant(type.getOrderKey());
type.getMapVisualizationInfo().setZOrder(zorder, null);
return type;
}
/**
* Create a group, with the new instance registered as a child of the
* supplied parent.
*
* @param parent parent group
* @param groupId its ID
* @return the group
*/
protected DefaultDataGroupInfo createDataGroup(DataGroupInfo parent, String groupId)
{
return new DataGroup(parent, groupId, myToolbox);
}
/**
* Raw data are converted for participation in the Mantle. To wit:
* <ul>
* <li>Wrap each basic record as a DataElement with reference to the
* resident DataTypeInfo</li>
* <li>Create a DataElementProvider, which is a glorified Iterator, to
* contain/introduce the DataElements</li>
* <li>Pass the DataElementProvider to the DataTypeController, causing the
* DataElements to be inserted and assigned unique IDs; those IDs are
* returned and can be kept for future management</li>
* </ul>
*
* @param recs the List of data records
* @param cols the set of field (column) names
*/
public void populate(List<Map<String, Object>> recs, Set<String> cols)
{
List<DataElement> elements = New.list();
Map<String, Object> previousData = null;
for (Map<String, Object> record : recs)
{
DataElement element = createElement(record, cols, previousData);
elements.add(element);
previousData = record;
}
List<Long> idList = myDataTypeController.addDataElements(new SimpleDataElementProvider(myType, elements.iterator()), null,
null, this);
if (idList == null)
{
return;
}
mantleIds = new long[idList.size()];
int i = 0;
for (Long id : idList)
{
mantleIds[i++] = id.longValue();
}
}
/**
* Create a {@link DataElement} from the specified data.
*
* @param data map of field name to field value
* @param cols all field names (including those with null value)
* @param previousData the previous element we created in order to make a
* polyline, or null if data is the first point.
* @return a DataElement generated from the specified data.
*/
public DataElement createElement(Map<String, Object> data, Set<String> cols, Map<String, Object> previousData)
{
if (myLatitudeFieldName == null || myLongitudeFieldName == null)
{
return DataUtil.createDataElement(data, cols);
}
Object latObj = data.get(myLatitudeFieldName);
Object lonObj = data.get(myLongitudeFieldName);
if (!(latObj instanceof Double && lonObj instanceof Double))
{
return DataUtil.createDataElement(data, cols);
}
LatLonAlt firstPoint = LatLonAlt.createFromDegreesMeters(((Double)latObj).doubleValue(), ((Double)lonObj).doubleValue(),
Constants.METERS_PER_FEET * ((Double)data.get("Altitude")).doubleValue(), ReferenceLevel.ELLIPSOID);
LatLonAlt secondPoint = null;
if (previousData != null)
{
secondPoint = LatLonAlt.createFromDegreesMeters(((Double)previousData.get(myLatitudeFieldName)).doubleValue(),
((Double)previousData.get(myLongitudeFieldName)).doubleValue(),
Constants.METERS_PER_FEET * ((Double)data.get("Altitude")).doubleValue(), ReferenceLevel.ELLIPSOID);
}
return DataUtil.createMapDataElement(new SimpleMetaDataProvider(data, cols), myType, firstPoint, secondPoint);
}
/**
* Listener for an event indicating that the user has requested to delete
* this dataset.
*/
private class DefaultAssistant extends DefaultDataGroupInfoAssistant
{
/**
* The manager instance used to handle named data sets.
*/
private final NamedDataSetManager namedDataSetManager;
/**
* Creates a new default assistant, populated with the supplied manager.
*
* @param pNamedDataSetManager The manager instance used to handle named
* data sets.
*/
public DefaultAssistant(NamedDataSetManager pNamedDataSetManager)
{
namedDataSetManager = pNamedDataSetManager;
}
@Override
public boolean canDeleteGroup(DataGroupInfo dgi)
{
return true;
}
/**
* {@inheritDoc}
*
* @see DefaultDataGroupInfoAssistant#deleteGroup(io.opensphere.mantle.data.DataGroupInfo,
* java.lang.Object)
*/
@Override
public void deleteGroup(DataGroupInfo dgi, Object source)
{
namedDataSetManager.removeDataSet(name);
group.activationProperty().setActive(false);
group.getParent().removeChild(group, null);
myDataRegistry.removeModels(myDataModelCategory, false);
if (mantleIds != null)
{
myDataTypeController.removeDataElements(myType, mantleIds);
}
myDataTypeController.removeDataType(myType, null);
}
}
}
|
using System;
using System.Collections;
using System.Collections.Generic;
public class EnumeratorIList<T> : IEnumerator<T>
{
private int _index;
private IList<T> _list;
public EnumeratorIList(IList<T> list)
{
_index = -1;
_list = list;
}
public T Current => _list[_index];
object IEnumerator.Current => Current;
public bool MoveNext()
{
_index++;
return _index < (_list?.Count ?? 0);
}
public void Reset()
{
_index = -1;
}
public void Dispose()
{
// Dispose implementation if needed
}
} |
<gh_stars>1-10
import React from 'react'
import ReactDOM from 'react-dom'
import './css/common.css'
class App extends React.Component {
state = {
comments: [
{ id: 1, name: 'Jack', content: '沙发!!' },
{ id: 2, name: 'rose', content: '板凳~~' },
{ id: 3, name: 'Tom', content: '楼主好人' }
],
username: '',
userContent: ''
}
// 处理事件
handleForm = e => {
const { name, value } = e.target
this.setState({
[name]: value
})
}
// 提交数据
addComment = () => {
const { comments, username, userContent } = this.state
if(username.trim() === '' || userContent.trim() === '') {
alert('请输入用户名或评论内容')
return
}
// 将数据添加到数组
const newComments = [{
id: Math.random(),
name: username,
content: userContent
}, ...comments]
// 更新状态与清空我文本框
this.setState({
comments: newComments,
username: '',
userContent: ''
})
}
// 渲染评论列表
rebderList() {
const { comments } = this.state
if (comments.length === 0) {
return <div className="box1-3">暂无评论,快去评论吧~</div>
}
return (<ul>
{
comments.map(item => (
<li key={item.id}>
<h3>{item.name}</h3>
<p>{item.content}</p>
</li>
))
}
</ul>)
}
render() {
const { username, userContent } = this.state
return (
<div className="box">
<div className="box1">
<input type="text" className="box1-1" name="username" value={username} onChange={this.handleForm}></input>
<textarea className="box1-2" name="userContent" value={userContent} onChange={this.handleForm}></textarea>
<button onClick={this.addComment}>发表评论</button>
</div>
{/* 通过条件渲染判断显示内容 */}
{this.rebderList()}
</div>
)
}
}
ReactDOM.render(<App />, document.getElementById('root')) |
<gh_stars>10-100
package com.nolanlawson.keepscore.db;
import java.util.Comparator;
import java.util.List;
import com.nolanlawson.keepscore.util.CollectionUtil;
import com.nolanlawson.keepscore.util.CollectionUtil.Function;
import com.nolanlawson.keepscore.util.Functions;
import com.nolanlawson.keepscore.util.LongUtil;
/**
* Smaller POJO representing a "Game" object. Designed so that the main page, with its list of all games,
* can be very fast.
* @author nolan
*
*/
public class GameSummary {
private int id = -1;
private String name;
private List<String> playerNames;
private int numRounds;
private long dateSaved;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<String> getPlayerNames() {
return playerNames;
}
public void setPlayerNames(List<String> playerNames) {
this.playerNames = playerNames;
}
public int getNumRounds() {
return numRounds;
}
public void setNumRounds(int numRounds) {
this.numRounds = numRounds;
}
public long getDateSaved() {
return dateSaved;
}
public void setDateSaved(long dateSaved) {
this.dateSaved = dateSaved;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + id;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
GameSummary other = (GameSummary) obj;
if (id != other.id)
return false;
return true;
}
public static Comparator<GameSummary> byRecentlySaved() {
return new Comparator<GameSummary>() {
@Override
public int compare(GameSummary object1, GameSummary object2) {
return LongUtil.compare(object2.getDateSaved(), object1.getDateSaved());
}
};
}
public static GameSummary fromGame(Game game) {
GameSummary result = new GameSummary();
result.setId(game.getId());
result.setName(game.getName());
result.setDateSaved(game.getDateSaved());
result.setPlayerNames(CollectionUtil.transform(game.getPlayerScores(), new Function<PlayerScore, String>(){
@Override
public String apply(PlayerScore obj) {
return obj.getName();
}
}));
result.setNumRounds(CollectionUtil.max(game.getPlayerScores(), Functions.PLAYER_SCORE_TO_HISTORY_SIZE));
return result;
}
public static final Function<GameSummary, Integer> GET_ID = new Function<GameSummary, Integer>() {
@Override
public Integer apply(GameSummary obj) {
return obj.getId();
}
};
}
|
import React, { useState, useEffect } from "react";
import axios from "axios";
const SearchRestaurants = () => {
const [query, setQuery] = useState("");
const [restaurants, setRestaurants] = useState([]);
useEffect(() => {
const fetchRestaurants = async () => {
const response = await axios.get(
`https://www.someapi.com/restaurants?q=${query}`
);
setRestaurants(response.data.restaurants);
};
fetchRestaurants();
}, [query]);
return (
<>
<h1>Search Restaurants</h1>
<input
type="text"
placeholder="Search Restaurant..."
value={query}
onChange={event => setQuery(event.target.value)}
/>
{restaurants.map(restaurant => (
<div key={restaurant.id}>
<h3>{restaurant.name}</h3>
<p>Rating: {restaurant.rating}</p>
</div>
))}
</>
);
};
export default SearchRestaurants; |
#include <stdio.h>
// Function to reverse the array
void reverse_array(int arr[], int start, int end)
{
int temp;
int i;
if (start >= end)
return;
for (i = 0; i < (end - start + 1)/2; i++)
{
temp = arr[start + i];
arr[start + i] = arr[end - i];
arr[end - i] = temp;
}
}
int main()
{
int arr[] = {1, 2, 3, 4};
// Calculating the size of the array
int n = sizeof(arr)/sizeof(arr[0]);
// Reversing the array
reverse_array(arr, 0, n-1);
// Printing the reversed array
int i;
for (i = 0; i < n; i++)
printf("%d ", arr[i]);
return 0;
} |
from groceries.configs.config_types import Settings
settings = Settings(
unit_formatting_variant='metric_imperial_formatting',
small_fractions=False
)
|
#!/bin/bash
set -e
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
bash "$DIR/zsh/install.sh"
bash "$DIR/vscode/install.sh"
bash "$DIR/git/install.sh"
|
#!/bin/sh
Xvfb -ac :99 &
|
def draw_rectangle(height, max_width):
if height < 3 or max_width < 3:
return "Invalid dimensions for rectangle."
result = ""
result += "*" * max_width + "\n"
for _ in range(height - 2):
result += "*" + " " * (max_width - 2) + "*" + "\n"
result += "*" * max_width
return result
# Example usage
HEIGHT = 5
MAX_WIDTH = 10
print(draw_rectangle(HEIGHT, MAX_WIDTH)) |
#!/usr/bin/env bash
export PYTHONPATH=./alexafsm:$PYTHONPATH
python ./tests/skillsearch/bin/print_machine.py > ./tests/skillsearch/machine.txt
|
<gh_stars>0
package main
import (
"errors"
"fmt"
"go-start/dao"
"log"
"net"
"net/http"
"net/rpc"
"os"
)
// https://studygolang.com/articles/14336
// 算数运算结构体
type Arith struct {
}
// 乘法运算方法
func (this *Arith) Multiply(req dao.ArithRequest, res *dao.ArithResponse) error {
res.Pro = req.A * req.B
return nil
}
// 除法运算方法
func (this *Arith) Divide(req dao.ArithRequest, res *dao.ArithResponse) error {
if req.B == 0 {
return errors.New("divide by zero")
}
res.Quo = req.A / req.B
res.Rem = req.A % req.B
return nil
}
func main() {
rpc.RegisterName("Arith", new(Arith)) // 注册rpc服务
rpc.HandleHTTP() // 采用http协议作为rpc载体
listener, err := net.Listen("tcp", "127.0.0.1:2181")
if err != nil {
log.Fatalln("ListenTCP error: ", err)
}
fmt.Fprintf(os.Stdout, "%s", "start connection")
http.Serve(listener, nil)
}
|
const path = require('path');
const chalk = require('chalk');
const { fork } = require('child_process');
function apply(options, compiler) {
let linterProcess;
function compileHook() {
if (linterProcess && linterProcess.kill) {
// Exits any outstanding child process if one exists
linterProcess.kill();
}
}
function doneHook() {
let { files = [] } = options;
if (!files.length) {
process.stdout.write(chalk.yellow.bold('\n[tslint-plugin] No `files` option specified.\n\n'));
return;
}
options.files = Array.isArray(files) ? files : [files];
// Spawn a child process to run the linter
linterProcess = fork(path.resolve(__dirname, 'linter.js'), [JSON.stringify(options)]);
// Clean up the linterProcess when finished
linterProcess.once('exit', () => delete linterProcess);
}
if (compiler.hooks) {
// Webpack 4
compiler.hooks.compile.tap('TSLintWebpackPlugin', compileHook);
compiler.hooks.done.tap('TSLintWebpackPlugin', doneHook);
} else {
// Backwards compatibility
compiler.plugin('compile', compileHook);
compiler.plugin('done', doneHook);
}
}
module.exports = function TSLintWebpackPlugin(options = {}) {
return {
apply: apply.bind(this, options)
};
};
|
#!/bin/bash
dieharder -d 205 -g 50 -S 736015011
|
class Program
{
static bool validateEmail(string email)
{
if (email.Length < 8) return false;
string pattern = @"^[a-zA-Z0-9]+@[A-Za-z0-9]+\.[A-Za-z]{2,3}$";
if (Regex.IsMatch(email, pattern))
return true;
else
return false;
}
static void Main(string[] args)
{
string email = "example@gmail.com";
Console.WriteLine(validateEmail(email));
// Output: true
}
} |
#!/usr/bin/env bash
# Copyright 2019 The KubeOne Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -euox pipefail
declare -A full_versions
full_versions["1.21"]="v1.21.12"
full_versions["1.22"]="v1.22.9"
full_versions["1.23"]="v1.23.6"
full_versions["1.24"]="v1.24.0"
root_dir=${KUBETESTS_ROOT:-"/opt/kube-test"}
tmp_root=${TMP_ROOT:-"/tmp/get-kube"}
for version in "${!full_versions[@]}"; do
full_version="${full_versions[${version}]}"
directory="${root_dir}/kubernetes-${version}"
tmp_dir="${tmp_root}/kubernetes-${version}"
if [[ ! -d "${directory}" ]]; then
mkdir -p "${tmp_dir}"
mkdir -p "${directory}"
curl -L https://gcsweb.k8s.io/gcs/kubernetes-release/release/"${full_version}"/kubernetes.tar.gz -o "${tmp_dir}"/kubernetes.tar.gz
tar -zxvf "${tmp_dir}"/kubernetes.tar.gz -C "${tmp_dir}"
mv "${tmp_dir}"/* "${directory}"/
cd ${directory}/kubernetes
KUBERNETES_SERVER_ARCH=amd64 KUBE_VERSION="${full_version}" KUBERNETES_DOWNLOAD_TESTS=true KUBERNETES_SKIP_CONFIRM=true ./cluster/get-kube-binaries.sh
cd -
find "${directory}" -name "*.tar.gz" -type f -delete
rm -rf "${directory}"/kubernetes/platforms/linux/arm
rm -rf "${directory}"/kubernetes/platforms/linux/arm64
rm -rf "${directory}"/kubernetes/platforms/linux/ppc64le
rm -rf "${directory}"/kubernetes/platforms/linux/s390x
rm "${directory}"/kubernetes/platforms/linux/amd64/gendocs
rm "${directory}"/kubernetes/platforms/linux/amd64/genkubedocs
rm "${directory}"/kubernetes/platforms/linux/amd64/genman
rm "${directory}"/kubernetes/platforms/linux/amd64/genswaggertypedocs
rm "${directory}"/kubernetes/platforms/linux/amd64/genyaml
rm "${directory}"/kubernetes/platforms/linux/amd64/kubemark
rm "${directory}"/kubernetes/platforms/linux/amd64/linkcheck
fi
done
rm -rf /tmp/get-kube*
|
#!/bin/bash
function one_line_pem {
echo "`awk 'NF {sub(/\\n/, ""); printf "%s\\\\\\\n",$0;}' $1`"
}
function json_ccp {
local PP=$(one_line_pem $4)
local CP=$(one_line_pem $5)
sed -e "s/\${ORG}/$1/" \
-e "s/\${P0PORT}/$2/" \
-e "s/\${CAPORT}/$3/" \
-e "s#\${PEERPEM}#$PP#" \
-e "s#\${CAPEM}#$CP#" \
organizations/ccp-template.json
}
function yaml_ccp {
local PP=$(one_line_pem $4)
local CP=$(one_line_pem $5)
sed -e "s/\${ORG}/$1/" \
-e "s/\${P0PORT}/$2/" \
-e "s/\${CAPORT}/$3/" \
-e "s#\${PEERPEM}#$PP#" \
-e "s#\${CAPEM}#$CP#" \
organizations/ccp-template.yaml | sed -e $'s/\\\\n/\\\n /g'
}
ORG=1
P0PORT=7051
CAPORT=7054
PEERPEM=organizations/peerOrganizations/CSP1.cloud.com/tlsca/tlsca.CSP1.cloud.com-cert.pem
CAPEM=organizations/peerOrganizations/CSP1.cloud.com/ca/ca.CSP1.cloud.com-cert.pem
echo "$(json_ccp $ORG $P0PORT $CAPORT $PEERPEM $CAPEM)" > organizations/peerOrganizations/CSP1.cloud.com/connection-CSP1.json
echo "$(yaml_ccp $ORG $P0PORT $CAPORT $PEERPEM $CAPEM)" > organizations/peerOrganizations/CSP1.cloud.com/connection-CSP1.yaml
ORG=2
P0PORT=9051
CAPORT=8054
PEERPEM=organizations/peerOrganizations/CSP2.cloud.com/tlsca/tlsca.CSP2.cloud.com-cert.pem
CAPEM=organizations/peerOrganizations/CSP2.cloud.com/ca/ca.CSP2.cloud.com-cert.pem
echo "$(json_ccp $ORG $P0PORT $CAPORT $PEERPEM $CAPEM)" > organizations/peerOrganizations/CSP2.cloud.com/connection-CSP2.json
echo "$(yaml_ccp $ORG $P0PORT $CAPORT $PEERPEM $CAPEM)" > organizations/peerOrganizations/CSP2.cloud.com/connection-CSP2.yaml
|
package com.redislabs.lettusearch.aggregate.reducer;
import com.redislabs.lettusearch.aggregate.Reducer;
import com.redislabs.lettusearch.protocol.RediSearchCommandArgs;
import lombok.Getter;
import lombok.Setter;
public abstract @Getter @Setter class AbstractPropertyReducer extends Reducer {
private String property;
protected AbstractPropertyReducer(String as, String property) {
super(as);
this.property = property;
}
@Override
protected <K, V> void buildFunction(RediSearchCommandArgs<K, V> args) {
buildFunction(args, property);
}
protected abstract <K, V> void buildFunction(RediSearchCommandArgs<K, V> args, String property);
}
|
<filename>src/main/java/com/github/blindpirate/gogradle/core/dependency/produce/external/gpm/GpmDependencyFactory.java
/*
* Copyright 2016-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.github.blindpirate.gogradle.core.dependency.produce.external.gpm;
import com.github.blindpirate.gogradle.core.dependency.produce.ExternalDependencyFactory;
import javax.inject.Singleton;
import java.io.File;
import java.util.List;
import java.util.Map;
// https://github.com/pote/gpm
@Singleton
public class GpmDependencyFactory extends ExternalDependencyFactory {
private GodepsParser parser = new GodepsParser();
@Override
public String identityFileName() {
return "Godeps";
}
@Override
protected List<Map<String, Object>> adapt(File file) {
return parser.parse(file);
}
}
|
<gh_stars>0
/*
* Project: FullereneViewer
* Version: 1.0
* Copyright: (C) 2011-14 Dr.Sc.KAWAMOTO,Takuji (Ext)
*/
#ifndef __OBJECTSTRING_H__
#define __OBJECTSTRING_H__
#include "Object.h"
#include "MyString.h"
class ObjectString : public Object, MyString {
// friend classes & functions
// members
// private tools
// constructors & the destructor
public:
ObjectString();
ObjectString(const char* value);
ObjectString(const ObjectString& you);
#if 0
ObjectString& operator = (const char* value);
ObjectString& operator = (const ObjectString& you);
#endif
~ObjectString();
// comparators
public:
int compare(const ObjectString* you) const
{ return MyString::compare((const MyString*)you); }
// I/O
// member accessing methods
public:
operator char*() const { return MyString::operator char*(); }
};
#endif /* __OBJECTSTRING_H__ */
/* Local Variables: */
/* mode: c++ */
/* End: */
|
#!/bin/sh
scp -i ${2} -o "ProxyCommand ssh -i ${2} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -W %h:%p -o BatchMode=yes alpine@${3}" -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null alpine@${1}:/home/alpine/.kube/config admin.conf
sed -i -e "s/\(server:\).*/\1\ https:\/\/${4}:6443/" admin.conf
|
<reponame>oueya1479/OpenOLAT
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <hr>
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* This file has been modified by the OpenOLAT community. Changes are licensed
* under the Apache 2.0 license as the original file.
* <p>
*/
package org.olat.course.nodes.en;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Random;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.apache.logging.log4j.Logger;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.olat.basesecurity.BaseSecurity;
import org.olat.basesecurity.GroupRoles;
import org.olat.basesecurity.OrganisationService;
import org.olat.core.commons.persistence.DB;
import org.olat.core.commons.persistence.DBFactory;
import org.olat.core.gui.translator.Translator;
import org.olat.core.gui.util.WindowControlMocker;
import org.olat.core.id.Identity;
import org.olat.core.id.IdentityEnvironment;
import org.olat.core.id.Organisation;
import org.olat.core.id.Roles;
import org.olat.core.logging.Tracing;
import org.olat.core.logging.activity.OlatResourceableType;
import org.olat.core.logging.activity.ThreadLocalUserActivityLogger;
import org.olat.core.logging.activity.ThreadLocalUserActivityLoggerInstaller;
import org.olat.core.util.SessionInfo;
import org.olat.core.util.UserSession;
import org.olat.core.util.Util;
import org.olat.course.CourseFactory;
import org.olat.course.CourseModule;
import org.olat.course.groupsandrights.CourseGroupManager;
import org.olat.course.nodes.ENCourseNode;
import org.olat.course.properties.CoursePropertyManager;
import org.olat.course.run.environment.CourseEnvironment;
import org.olat.course.run.userview.UserCourseEnvironment;
import org.olat.course.run.userview.UserCourseEnvironmentImpl;
import org.olat.group.BusinessGroup;
import org.olat.group.BusinessGroupService;
import org.olat.repository.RepositoryEntry;
import org.olat.repository.RepositoryEntryStatusEnum;
import org.olat.repository.RepositoryService;
import org.olat.resource.OLATResource;
import org.olat.resource.OLATResourceManager;
import org.olat.test.JunitTestHelper;
import org.olat.test.OlatTestCase;
import org.olat.util.logging.activity.LoggingResourceable;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Description: <BR/>
* Test the enrollment
* <P/> Initial Date: Jul 28, 2004
*
* @author patrick
* @author srosse, <EMAIL>, http://www.frentix.com
*/
public class EnrollmentManagerConcurrentTest extends OlatTestCase {
//
private static final Logger log = Tracing.createLoggerFor(EnrollmentManagerConcurrentTest.class);
/*
* ::Test Setup::
*/
private static Identity id1;
// For WaitingGroup tests
private static Identity wg1, wg2,wg3;
private static Roles wg1Roles, wg2Roles, wg3Roles;
// For WaitingGroup tests
private static Translator testTranslator = null;
private static BusinessGroup bgWithWaitingList = null;
@Autowired
private RepositoryService repositoryService;
@Autowired
private OLATResourceManager resourceManager;
@Autowired
private OrganisationService organisationService;
@Autowired
private BusinessGroupService businessGroupService;
@Autowired
private EnrollmentManager enrollmentManager;
@Autowired
private BaseSecurity securityManager;
@Autowired
private DB dbInstance;
/**
* @see junit.framework.TestCase#setUp()
*/
@Before public void setup() throws Exception {
// Identities
id1 = JunitTestHelper.createAndPersistIdentityAsUser("id1");
DBFactory.getInstance().closeSession();
// create business-group with waiting-list
String bgWithWaitingListName = "Group with WaitingList";
String bgWithWaitingListDesc = "some short description for Group with WaitingList";
Boolean enableWaitinglist = Boolean.TRUE;
Boolean enableAutoCloseRanks = Boolean.TRUE;
RepositoryEntry resource = JunitTestHelper.createAndPersistRepositoryEntry();
log.info("testAddToWaitingListAndFireEvent: resource=" + resource);
bgWithWaitingList = businessGroupService.createBusinessGroup(id1, bgWithWaitingListName,
bgWithWaitingListDesc, BusinessGroup.BUSINESS_TYPE,
-1, -1, enableWaitinglist, enableAutoCloseRanks, resource);
bgWithWaitingList.setMaxParticipants(Integer.valueOf(2));
log.info("TEST bgWithWaitingList=" + bgWithWaitingList);
log.info("TEST bgWithWaitingList.getMaxParticipants()=" + bgWithWaitingList.getMaxParticipants() );
log.info("TEST bgWithWaitingList.getWaitingListEnabled()=" + bgWithWaitingList.getWaitingListEnabled() );
// create mock objects
testTranslator = Util.createPackageTranslator(EnrollmentManagerConcurrentTest.class, new Locale("de"));
// Identities
wg1 = JunitTestHelper.createAndPersistIdentityAsUser("wg1");
wg1Roles = securityManager.getRoles(wg1);
wg2 = JunitTestHelper.createAndPersistIdentityAsUser("wg2");
wg2Roles = securityManager.getRoles(wg2);
wg3 = JunitTestHelper.createAndPersistIdentityAsUser("wg3");
wg3Roles = securityManager.getRoles(wg3);
DBFactory.getInstance().closeSession();
}
// Test for WaitingList
///////////////////////
/**
* Enroll 3 identities (group with max-size=2 and waiting-list).
* Cancel enrollment. Check size after each step.
*/
@Test
public void testEnroll() throws Exception {
log.info("testEnroll: start...");
ENCourseNode enNode = new ENCourseNode();
OLATResource resource = resourceManager.createOLATResourceInstance(CourseModule.class);
Organisation defOrganisation = organisationService.getDefaultOrganisation();
RepositoryEntry addedEntry = repositoryService.create(null, "Ayanami", "-", "Enrollment test course 1", "A JUnit course",
resource, RepositoryEntryStatusEnum.trash, defOrganisation);
CourseEnvironment cenv = CourseFactory.createCourse(addedEntry, "Test", "Test").getCourseEnvironment();
// 1. enroll wg1 user
IdentityEnvironment ienv = new IdentityEnvironment();
ienv.setIdentity(wg1);
UserCourseEnvironment userCourseEnv = new UserCourseEnvironmentImpl(ienv, cenv);
CoursePropertyManager coursePropertyManager = userCourseEnv.getCourseEnvironment().getCoursePropertyManager();
log.info("enrollmentManager=" + enrollmentManager);
log.info("bgWithWaitingList=" + bgWithWaitingList);
assertTrue("bgWithWaitingList is null",bgWithWaitingList != null);
log.info("userCourseEnv=" + userCourseEnv);
log.info("userCourseEnv.getCourseEnvironment()=" + userCourseEnv.getCourseEnvironment());
enrollmentManager.doEnroll(userCourseEnv, wg1Roles, bgWithWaitingList, enNode, coursePropertyManager, new WindowControlMocker(), testTranslator,
new ArrayList<Long>()/*enrollableGroupNames*/, new ArrayList<Long>()/*enrollableAreaNames*/, userCourseEnv.getCourseEnvironment().getCourseGroupManager());
assertTrue("Enrollment failed, user='wg1'", businessGroupService.isIdentityInBusinessGroup(wg1,bgWithWaitingList));
int participantsCounter = businessGroupService.countMembers(bgWithWaitingList, GroupRoles.participant.name());
assertTrue("Wrong number of participants," + participantsCounter , participantsCounter == 1);
// 2. enroll wg2 user
ienv = new IdentityEnvironment();
ienv.setIdentity(wg2);
userCourseEnv = new UserCourseEnvironmentImpl(ienv, cenv);
coursePropertyManager = userCourseEnv.getCourseEnvironment().getCoursePropertyManager();
enrollmentManager.doEnroll(userCourseEnv, wg2Roles, bgWithWaitingList, enNode, coursePropertyManager, new WindowControlMocker(),testTranslator,
new ArrayList<Long>()/*enrollableGroupNames*/, new ArrayList<Long>()/*enrollableAreaNames*/, userCourseEnv.getCourseEnvironment().getCourseGroupManager());
assertTrue("Enrollment failed, user='wg2'", businessGroupService.isIdentityInBusinessGroup(wg2,bgWithWaitingList));
assertTrue("Enrollment failed, user='wg1'", businessGroupService.isIdentityInBusinessGroup(wg1,bgWithWaitingList));
participantsCounter = businessGroupService.countMembers(bgWithWaitingList, GroupRoles.participant.name());
assertTrue("Wrong number of participants," + participantsCounter , participantsCounter == 2);
// 3. enroll wg3 user => list is full => waiting-list
ienv = new IdentityEnvironment();
ienv.setIdentity(wg3);
userCourseEnv = new UserCourseEnvironmentImpl(ienv, cenv);
coursePropertyManager = userCourseEnv.getCourseEnvironment().getCoursePropertyManager();
enrollmentManager.doEnroll(userCourseEnv, wg3Roles, bgWithWaitingList, enNode, coursePropertyManager, new WindowControlMocker(), testTranslator,
new ArrayList<Long>()/*enrollableGroupNames*/, new ArrayList<Long>()/*enrollableAreaNames*/, userCourseEnv.getCourseEnvironment().getCourseGroupManager());
assertFalse("Wrong enrollment, user='wg3' is in PartipiciantGroup, must be on waiting-list", businessGroupService.isIdentityInBusinessGroup(wg3,bgWithWaitingList));
assertFalse("Wrong enrollment, user='wg3' is in PartipiciantGroup, must be on waiting-list", businessGroupService.hasRoles(wg3, bgWithWaitingList, GroupRoles.participant.name()));
assertTrue("Wrong enrollment, user='wg3' must be on waiting-list", businessGroupService.hasRoles(wg3, bgWithWaitingList, GroupRoles.waiting.name()));
assertTrue("Enrollment failed, user='wg2'", businessGroupService.isIdentityInBusinessGroup(wg2,bgWithWaitingList));
assertTrue("Enrollment failed, user='wg1'", businessGroupService.isIdentityInBusinessGroup(wg1,bgWithWaitingList));
participantsCounter = businessGroupService.countMembers(bgWithWaitingList, GroupRoles.participant.name());
assertTrue("Wrong number of participants," + participantsCounter , participantsCounter == 2);
int waitingListCounter = businessGroupService.countMembers(bgWithWaitingList, GroupRoles.waiting.name());
assertTrue("Wrong number of waiting-list, must be 1, is " + waitingListCounter , waitingListCounter == 1);
// cancel enrollment for wg2 => transfer wg3 from waiting-list to participants
ienv = new IdentityEnvironment();
ienv.setIdentity(wg2);
userCourseEnv = new UserCourseEnvironmentImpl(ienv, cenv);
coursePropertyManager = userCourseEnv.getCourseEnvironment().getCoursePropertyManager();
enrollmentManager.doCancelEnrollment(userCourseEnv,bgWithWaitingList, addedEntry, enNode, coursePropertyManager, new WindowControlMocker(), testTranslator);
assertFalse("Cancel enrollment failed, user='wg2' is still participants.", businessGroupService.isIdentityInBusinessGroup(wg2,bgWithWaitingList));
assertTrue("Enrollment failed, user='wg3'", businessGroupService.isIdentityInBusinessGroup(wg3,bgWithWaitingList));
assertTrue("Enrollment failed, user='wg1'", businessGroupService.isIdentityInBusinessGroup(wg1,bgWithWaitingList));
participantsCounter = businessGroupService.countMembers(bgWithWaitingList, GroupRoles.participant.name());
assertTrue("Wrong number of participants, must be 2, is " + participantsCounter , participantsCounter == 2);
waitingListCounter = businessGroupService.countMembers(bgWithWaitingList, GroupRoles.waiting.name());
assertTrue("Wrong number of waiting-list, must be 0, is " + waitingListCounter , waitingListCounter == 0);
// cancel enrollment for wg1
ienv = new IdentityEnvironment();
ienv.setIdentity(wg1);
userCourseEnv = new UserCourseEnvironmentImpl(ienv, cenv);
coursePropertyManager = userCourseEnv.getCourseEnvironment().getCoursePropertyManager();
enrollmentManager.doCancelEnrollment(userCourseEnv,bgWithWaitingList, addedEntry, enNode, coursePropertyManager, new WindowControlMocker(), testTranslator);
assertFalse("Cancel enrollment failed, user='wg2' is still participants.", businessGroupService.isIdentityInBusinessGroup(wg2,bgWithWaitingList));
assertFalse("Cancel enrollment failed, user='wg1' is still participants.", businessGroupService.isIdentityInBusinessGroup(wg1,bgWithWaitingList));
assertTrue("Enrollment failed, user='wg3'", businessGroupService.isIdentityInBusinessGroup(wg3,bgWithWaitingList));
participantsCounter = businessGroupService.countMembers(bgWithWaitingList, GroupRoles.participant.name());
assertTrue("Wrong number of participants, must be 1, is " + participantsCounter , participantsCounter == 1);
waitingListCounter = businessGroupService.countMembers(bgWithWaitingList, GroupRoles.waiting.name());
assertTrue("Wrong number of waiting-list, must be 0, is " + waitingListCounter , waitingListCounter == 0);
// cancel enrollment for wg3
ienv = new IdentityEnvironment();
ienv.setIdentity(wg3);
userCourseEnv = new UserCourseEnvironmentImpl(ienv, cenv);
coursePropertyManager = userCourseEnv.getCourseEnvironment().getCoursePropertyManager();
enrollmentManager.doCancelEnrollment(userCourseEnv,bgWithWaitingList, addedEntry, enNode, coursePropertyManager, new WindowControlMocker(), testTranslator);
assertFalse("Cancel enrollment failed, user='wg3' is still participants.", businessGroupService.isIdentityInBusinessGroup(wg3,bgWithWaitingList));
assertFalse("Cancel enrollment failed, user='wg2' is still participants.", businessGroupService.isIdentityInBusinessGroup(wg2,bgWithWaitingList));
assertFalse("Cancel enrollment failed, user='wg1' is still participants.", businessGroupService.isIdentityInBusinessGroup(wg1,bgWithWaitingList));
participantsCounter = businessGroupService.countMembers(bgWithWaitingList, GroupRoles.participant.name());
assertTrue("Wrong number of participants, must be 0, is " + participantsCounter , participantsCounter == 0);
waitingListCounter = businessGroupService.countMembers(bgWithWaitingList, GroupRoles.waiting.name());
assertTrue("Wrong number of waiting-list, must be 0, is " + waitingListCounter , waitingListCounter == 0);
log.info("testEnroll: done...");
}
@Test
public void testConcurrentEnrollmentWithWaitingList() {
int numOfUsers = isOracleConfigured() ? 12 : 30;
List<Identity> ids = new ArrayList<>(numOfUsers);
for(int i=0; i<numOfUsers; i++) {
Identity id = JunitTestHelper.createAndPersistIdentityAsUser("enroll-a-" + i + "-" + UUID.randomUUID().toString());
ids.add(id);
}
ENCourseNode enNode = new ENCourseNode();
Identity author = JunitTestHelper.createAndPersistIdentityAsAuthor("enroller");
RepositoryEntry addedEntry = JunitTestHelper.deployBasicCourse(author);
CourseEnvironment cenv = CourseFactory.createCourse(addedEntry, "Test-Enroll", "Test").getCourseEnvironment();
BusinessGroup group = businessGroupService.createBusinessGroup(id1, "Enrollment", "Enroll", BusinessGroup.BUSINESS_TYPE,
Integer.valueOf(1), Integer.valueOf(10), true, false, null);
Assert.assertNotNull(group);
dbInstance.commitAndCloseSession();
final CountDownLatch doneSignal = new CountDownLatch(ids.size());
EnrollThread[] threads = new EnrollThread[numOfUsers];
int t = 0;
for(Identity id:ids) {
threads[t++] = new EnrollThread(id, addedEntry, group, enNode, cenv, doneSignal);
}
for(EnrollThread thread:threads) {
thread.start();
}
try {
boolean interrupt = doneSignal.await(360, TimeUnit.SECONDS);
assertTrue("Test takes too long (more than 10s)", interrupt);
} catch (InterruptedException e) {
fail("" + e.getMessage());
}
dbInstance.commitAndCloseSession();
List<Identity> enrolledIds = businessGroupService.getMembers(group, GroupRoles.participant.name());
Assert.assertNotNull(enrolledIds);
Assert.assertEquals(10, enrolledIds.size());
List<Identity> waitingIds = businessGroupService.getMembers(group, GroupRoles.waiting.name());
Assert.assertNotNull(waitingIds);
Assert.assertEquals(ids.size() - 10, waitingIds.size());
}
private class EnrollThread extends Thread {
private final ENCourseNode enNode;
private final Identity identity;
private final CourseEnvironment cenv;
private final BusinessGroup group;
private final RepositoryEntry courseEntry;
private final CountDownLatch doneSignal;
public EnrollThread(Identity identity, RepositoryEntry courseEntry, BusinessGroup group,
ENCourseNode enNode, CourseEnvironment cenv, CountDownLatch doneSignal) {
this.enNode = enNode;
this.group = group;
this.courseEntry = courseEntry;
this.identity = identity;
this.cenv = cenv;
this.doneSignal = doneSignal;
}
@Override
public void run() {
try {
UserSession session = new UserSession();
session.setIdentity(identity);
session.setSessionInfo(new SessionInfo(identity.getKey()));
ThreadLocalUserActivityLoggerInstaller.initUserActivityLogger(session);
IdentityEnvironment ienv = new IdentityEnvironment();
ienv.setIdentity(identity);
UserCourseEnvironment userCourseEnv = new UserCourseEnvironmentImpl(ienv, cenv);
CoursePropertyManager coursePropertyManager = userCourseEnv.getCourseEnvironment().getCoursePropertyManager();
CourseGroupManager courseGroupManager = userCourseEnv.getCourseEnvironment().getCourseGroupManager();
ThreadLocalUserActivityLogger.addLoggingResourceInfo(LoggingResourceable.wrap(courseEntry.getOlatResource(), OlatResourceableType.course));
ThreadLocalUserActivityLogger.addLoggingResourceInfo(LoggingResourceable.wrap(enNode));
ThreadLocalUserActivityLogger.addLoggingResourceInfo(LoggingResourceable.wrap(group));
sleep(Math.round(new Random().nextDouble() * 100l));
enrollmentManager.doEnroll(userCourseEnv, Roles.userRoles(), group, enNode, coursePropertyManager, new WindowControlMocker(), testTranslator,
new ArrayList<Long>()/*enrollableGroupNames*/, new ArrayList<Long>()/*enrollableAreaNames*/, courseGroupManager);
DBFactory.getInstance().commit();
} catch (Exception e) {
log.error("", e);
} finally {
ThreadLocalUserActivityLoggerInstaller.resetUserActivityLogger();
DBFactory.getInstance().commitAndCloseSession();
doneSignal.countDown();
}
}
}
} |
#!/usr/bin/env bash
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
APPNAME="$(basename "$0")"
VERSION="031220211542-git"
USER="${SUDO_USER:-${USER}}"
HOME="${USER_HOME:-${HOME}}"
SRC_DIR="${BASH_SOURCE%/*}"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
#set opts
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
##@Version : 031220211542-git
# @Author : Jason Hempstead
# @Contact : jason@casjaysdev.com
# @License : LICENSE.md
# @ReadME : template --help
# @Copyright : Copyright: (c) 2021 Jason Hempstead, CasjaysDev
# @Created : Friday, Apr 02, 2021 13:11 EDT
# @File : template
# @Description : Linux setup script
# @TODO : Refactor the code
# @Other :
# @Resource :
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
PATH=/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games:/usr/local/sbin:/usr/sbin:/sbin
#Modify and set if using the auth token
AUTHTOKEN=""
# either http https or git
GITPROTO="https://"
#Your git repo
GITREPO="github.com/casjay-systems/linux"
# Git Command - Private Repo
#GITURL="$GITPROTO$AUTHTOKEN:x-oauth-basic@$GITREPO"
#Public Repo
GITURL="$GITPROTO$GITREPO"
# Default NTP Server
NTPSERVER="0.casjay.pool.ntp.org"
# Set the temp directory
DOTTEMP="/tmp/dotfiles-desktop-$USER"
# Default dotfiles dir
# Set primary dir - not used
DOTFILES="$HOME/.local/dotfiles/linux"
SUDO_PROMPT="$(printf "\033[1;36m") • [sudo]$(printf "\033[0m") password for %p: "
dotfilesDirectory="$DOTFILES"
srcdir="$dotfilesDirectory/src"
linuxosdir="$srcdir/os/linux"
backupsdir="$HOME/.local/backups/dotfiles/linux"
export DOTFILES DOTTEMP dotfilesDirectory srcdir linuxosdir backupsdir SUDO_PROMPT
##################################################################################################
# Define colors
PURPLE='\033[0;35m'
BLUE='\033[0;34m'
RED='\033[0;31m'
GREEN='\033[32m'
YELLOW='\033[0;33m'
NC='\033[0m'
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if [[ ! "$OSTYPE" =~ ^linux ]]; then
printf "\n\t\t${RED} This script is for Linux ${NC}\n\n"
exit 1
fi
# Grab the OS detection script if it doesn't exist script
if [ -f "$srcdir/os/osdetect.sh" ] && [ -f "$srcdir/os/utils.sh" ]; then
source "$srcdir/os/utils.sh"
source "$srcdir/os/osdetect.sh"
else
curl -Lsq "https://$GITREPO/raw/main/src/os/utils.sh" -o /tmp/utils.sh
curl -Lsq "https://$GITREPO/raw/main/src/os/osdetect.sh" -o /tmp/osdetect.sh
if [ -f "/tmp/osdetect.sh" ] && [ -f "/tmp/utils.sh" ]; then
source /tmp/utils.sh
source /tmp/osdetect.sh
rm -Rf /tmp/utils.sh /tmp/osdetect.sh
else
clear
printf "\n\n\n\n${BLUE} Could not source the files needed${NC}\n\n\n\n"
exit 1
fi
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if [[ "$(python3 -V 2>/dev/null)" =~ "Python 3" ]]; then
PYTHONVER="python3"
PIP="pip3"
export PATH="${PATH}:$(python3 -c 'import site; print(site.USER_BASE)')/bin"
elif [[ "$(python2 -V 2>/dev/null)" =~ "Python 2" ]]; then
PYTHONVER="python"
PIP="pip"
export PATH="${PATH}:$(python -c 'import site; print(site.USER_BASE)')/bin"
fi
##################################################################################################
if [ -f "$HOME/.config/dotfiles/env" ]; then
source "$HOME/.config/dotfiles/env"
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Automatic linux install
###############################################################################################
clear #
printf "\n\n\n\n\n${BLUE} *** Initializing the installer please wait *** ${NC}\n" #
###############################################################################################
if (sudo -vn && sudo -ln) 2>&1 | grep -v 'may not' >/dev/null; then
printf "\n${RED} • Getting root privileges •${NC}\n" &&
ask_for_sudo
if [ "$?" -eq 0 ]; then
printf "${GREEN} • Received root privileges •${NC}\n\n"
else
printf "${GREEN} • Can not get access to sudo •${NC}\n\n"
exit 1
fi
else
printf "${GREEN} • Can not get access to sudo •${NC}\n\n"
exit 1
fi
# Remove previous installs
if [ ! -d "$DOTFILES/.git" ]; then
rm -Rf "$DOTFILES"
fi
if [ -d $HOME/.config/bash/profile ]; then rm -Rf $HOME/.config/bash/profile/zz-*; fi
if [ ! -d "$HOME/.config/dotfiles/backups/configs" ]; then mkdir -p "$HOME/.config/dotfiles/backups/configs"; fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if [ -z $UPDATE ]; then
if (sudo -vn && sudo -ln) 2>&1 | grep -v 'may not' >/dev/null; then
#Define the package manager and install option
if [ -f /usr/bin/apt ]; then
LSBPAC=lsb-release
pkgmgr="DEBIAN_FRONTEND=noninteractive apt-get -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" --ignore-missing --allow-unauthenticated --assume-yes"
instoption="-yy -qq install"
instupdateoption="update && sudo $pkgmgr dist-upgrade -yy -qq"
instchkupdatecmd="$(sudo apt-get update >/dev/null && apt-get --just-print upgrade | grep "Inst " | wc -l)"
##
elif [ -f /usr/bin/yum ]; then
LSBPAC=redhat-lsb
pkgmgr="yum"
instoption="install -y -q"
instupdateoption="install -y -q --skip-broken"
instchkupdatecmd="$(sudo yum check-update -q | grep -v Security | wc -l)"
##
elif [ -f /usr/bin/dnf ]; then
LSBPAC=redhat-lsb
pkgmgr="dnf"
instoption="install -y -q --skip-broken"
instupdateoption="update -y -q"
instchkupdatecmd="$(sudo dnf check-update -q | grep -v Security | wc -l)"
##
elif [ -f /usr/bin/pacman ]; then
LSBPAC=lsb-release
pkgmgr="pacman"
instoption="-Syy --needed --noconfirm"
instupdateoption="--Syyu --noconfirm"
instchkupdatecmd="$(checkupdates 2>/dev/null | wc -l)"
fi
if [[ "$instchkupdatecmd" != 0 ]]; then
printf "\n${RED} *** Please update your system before runng this installer ***${NC}\n"
printf "\n${RED} *** You have $instchkupdatecmd update available ***${NC}\n\n\n\n"
exit 1
fi
fi
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Welcome message
wait_time=10 # seconds
temp_cnt=${wait_time}
printf "\n\n\n${GREEN} *** ${RED}•${GREEN} Welcome to my dotfiles Installer for linux ${RED}•${GREEN} ***${NC}\n"
printf "${YELLOW} *** • Your Distro is $distroname and is based on $DISTRO • ***${NC}\n\n\n"
while [[ ${temp_cnt} -gt 0 ]]; do
printf "\r${GREEN} *** ${RED}•${GREEN} You have %2d second(s) remaining to hit Ctrl+C to cancel ${RED}•${GREEN} ***" ${temp_cnt}
sleep 1
((temp_cnt--))
done
printf "${NC}\n\n\n"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
##### for when I'm forgetful
if [ -z $dotfilesDirectory ]; then printf "\n${RED} *** dotfiles directory not specified ***${NC}\n"; fi
if [ -z $srcdir ]; then printf "\n${RED} *** dotfiles src directory not specified ***${NC}\n"; fi
if [ -z $linuxosdir ]; then printf "\n${RED} *** dotfiles linuxos directory not specified ***${NC}\n"; fi
#####
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
GIT=$(command -v git 2>/dev/null)
CURL=$(command -v curl 2>/dev/null)
WGET=$(command -v wget 2>/dev/null)
VIM=$(command -v vim 2>/dev/null)
TMUX=$(command -v tmux 2>/dev/null)
ZSH=$(command -v zsh 2>/dev/null)
FISH=$(command -v fish 2>/dev/null)
SUDO=$(command -v sudo 2>/dev/null)
LSBR=$(command -v lsb_release 2>/dev/null)
POLYBAR=$(command -v polybar 2>/dev/null)
JGMENU=$(command -v jgmenu 2>/dev/null)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# no sudo can't continue
if ! (sudo -vn && sudo -ln) 2>&1 | grep -v 'may not' >/dev/null; then
if [[ -z "$SUDU" ]] && [[ -z "$UPDATE" ]]; then
printf "\n${GREEN} *** ${RED}•${GREEN} UPDATE=yes bash -c "$(curl -LsS https://$GITREPO/raw/main/src/os/setup.sh)" ${RED}•${GREEN} ***${NC}\n"
printf "\n${GREEN} *** ${RED}•${GREEN} to install just the dotfiles ${RED}•${GREEN} ***${NC}\n"
printf "\n${RED} *** ${RED}•${GREEN} No sudo or root privileges ${RED}•${GREEN} ***${NC}\n\n"
exit
fi
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if [ -z "$UPDATE" ]; then
if ! (sudo -vn && sudo -ln) 2>&1 | grep -v 'may not' >/dev/null && [ -z "$POLYBAR" ]; then
printf "
${RED}\n • Please run one of the following commands as root:${NC}
${GREEN}if running Arch you can just do${RED} su -c $srcdir/os/linux/pkgs/lists/arch.sh${NC}
${GREEN}if running Centos you can just do${RED} su -c $srcdir/os/linux/pkgs/lists/rhel.sh${NC}
${GREEN}if running Debian you can just do${RED} su -c $srcdir/os/linux/pkgs/lists/debian-sys.sh${NC}
${GREEN}if running Fedora you can just do${RED} su -c $srcdir/os/linux/pkgs/lists/fedora-sys.sh${NC}
${GREEN}if running Ubuntu you can just do${RED} su -c $srcdir/os/linux/pkgs/lists/ubuntu-sys.sh${NC}
${GREEN}if running Raspbian you can just do${RED} su -c $srcdir/os/linux/pkgs/lists/raspbian-sys.sh${NC}
\n${RED}then come back to this installer ${NC}\n\n"
exit
fi
fi
# Lets check for git, curl, wget
unset MISSING
if [[ ! "$GIT" ]]; then MISSING="$MISSING git"; fi
if [[ ! "$CURL" ]]; then MISSING="$MISSING curl"; fi
if [[ ! "$WGET" ]]; then MISSING="$MISSING wget"; fi
if [[ ! "$VIM" ]]; then MISSING="$MISSING vim"; fi
if [[ ! "$TMUX" ]]; then MISSING="$MISSING tmux"; fi
if [[ ! "$ZSH" ]]; then MISSING="$MISSING zsh"; fi
if [[ ! "$FISH" ]]; then MISSING="$MISSING fish"; fi
if [[ ! "$SUDO" ]]; then MISSING="$MISSING sudo"; fi
if [[ ! "$LSBR" ]]; then MISSING="$MISSING $LSBPAC"; fi
if [ -z "$LSBR" ] || [ -z "$GIT" ] || [ -z "$CURL" ] || [ -z "$WGET" ] || [ -z "$VIM" ] || [ -z "$TMUX" ] || [ -z "$ZSH" ] || [ -z "$FISH" ] || [ -z "$SUDO" ]; then
printf "${RED} *** • The following are needed: • ***${NC}\n"
printf "${RED} *** • ${MISSING} • ***${NC}\n"
if (sudo -vn && sudo -ln) 2>&1 | grep -v 'may not' >/dev/null; then
execute "sudo $pkgmgr $instoption ${MISSING}" " *** • Attempting to install the missing package[s] • ***"
else
printf "${RED} *** • I can't get root access You will have to manually install the missing programs • ***${NC}\n"
printf "${RED} *** • ${MISSING} • ***${NC}\n\n\n"
exit
fi
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Set version from git
CURDOTFVERSION="$(curl -LSsq "https://$GITREPO/raw/main/version.txt" | grep -v "#" | head)"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Print info
printf "\n${GREEN} *** • git, curl, wget, vim, tmux, zsh, fish, sudo are present • ***${NC}\n\n"
printf "${GREEN} *** • Installing version $CURDOTFVERSION • ***${NC}\n\n"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Setup the dotfiles Directory
if [ -d "$dotfilesDirectory/.git" ]; then
printf "\n${PURPLE} • Updating the git repo - $dotfilesDirectory${NC}\n"
cd "$srcdir/os" && source "utils.sh"
execute \
"git -C $dotfilesDirectory reset --hard -q >/dev/null && \
git -C $dotfilesDirectory pull --recurse-submodules -q" \
"Updating dotfiles"
NEWVERSION="$(cat "$DOTFILES/version.txt" | tail -n 1)"
REVER="$(cd $dotfilesDirectory && git rev-parse --short HEAD)"
printf "${GREEN} [✔] Updated to $NEWVERSION - revision: $REVER${NC}\n"
printf "${PURPLE} • Updating the git repo completed${NC}\n\n"
else
printf "\n${PURPLE} • Cloning the git repo - $dotfilesDirectory${NC}\n"
rm -Rf $dotfilesDirectory
git clone --recursive -q $GITURL $dotfilesDirectory >/dev/null 2>&1
printf "${GREEN} [✔] cloned $GITURL → $dotfilesDirectory \n"
NEWVERSION="$(cat "$DOTFILES/version.txt" | tail -n 1)"
REVER="$(cd $dotfilesDirectory && git rev-parse --short HEAD)"
printf "${GREEN} [✔] downloaded version $NEWVERSION - revision: $REVER${NC}\n"
cd "$srcdir/os" && source "utils.sh"
printf "${PURPLE} • Cloning the git repo completed${NC}\n\n"
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Make Directories and fix permissions
mkdir -p ~/.gnupg ~/.ssh 2>/dev/null
find "$HOME" -xtype l -delete 2>/dev/null
find ~/.gnupg ~/.ssh -type f -exec chmod 600 {} \; 2>/dev/null
find ~/.gnupg ~/.ssh -type d -exec chmod 700 {} \; 2>/dev/null
find "$dotfilesDirectory/" -iname "*.sh" -exec chmod 755 {} \; 2>/dev/null
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Check for then get root permissions
if [ -z "$UPDATE" ] || [ "$1" = "--force" ]; then
if (sudo true && sudo -ln) 2>&1 | grep -v 'may not' >/dev/null; then
printf "\n${RED} • Getting root privileges${NC}\n"
ask_for_sudo
printf "${GREEN} • Received root privileges${NC}\n\n"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Install Packages
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# MacOS setup
printf "\n${PURPLE} • Setting up for ${DISTRO} $(get_os_version) ${NC}\n"
source "$linuxosdir/install_packages.sh"
printf "\n${PURPLE} • Done Setting up for the ${DISTRO}${NC}\n\n"
fi
fi
###################################################################
# grab the modules
printf "\n${PURPLE} *** • Downloading additional configuration files • ***${NC}\n"
if (sudo -vn && sudo -ln) 2>&1 | grep -v 'may not' >/dev/null; then
systemmgr_inst() {
sudo bash -c "$(curl -LSs https://github.com/systemmgr/installer/raw/main/install.sh)" >/dev/null 2>&1 &&
systemmgr install installer >/dev/null 2>&1
}
if [ -z "$(command -v systemmgr 2>/dev/null)" ]; then
execute "systemmgr_inst" "installing system scripts"
else
execute "systemmgr_inst" "Updating system scripts"
fi
fi
for config in bash geany git htop neofetch fish tmux terminology Thunar transmission variety vifm vim zsh; do
if [ -d "$dotfilesDirectory/src/config/$config/.git" ]; then
execute \
"git -C $dotfilesDirectory/src/config/$config reset --hard -q && \
git -C $dotfilesDirectory/src/config/$config pull -q" \
"Updating $config module"
else
rm -Rf "$dotfilesDirectory/src/config/$config"
execute \
"git clone -q https://github.com/dfmgr/$config $dotfilesDirectory/src/config/$config" \
"Installing $config module"
fi
done
printf "${PURPLE} *** • Downloading additional configuration files completed • ***${NC}\n\n"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Install additional system files if root
if (sudo -vn && sudo -ln) 2>&1 | grep -v 'may not' >/dev/null; then
print_in_purple "\n • Installing system files\n"
sudo bash -c "$linuxosdir/install_system_files.sh"
print_in_purple " • Installing system files completed\n\n"
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Create user directories
print_in_purple "\n • Creating directories\n"
bash -c $linuxosdir/create_directories.sh
print_in_purple " • Creating directories completed\n"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Create user .local files
print_in_purple "\n • Create local config files\n"
bash -c $linuxosdir/create_local_config_files.sh
print_in_purple " • Create local config files completed\n\n"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Create user dotfile symlinks
print_in_purple "\n • Backing up and creating user files\n"
bash -c $linuxosdir/create_symbolic_links.sh
print_in_purple " • Create user files completed\n\n"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Create user themes/fonts/icons or install to system if root
print_in_purple "\n • Installing Customizations\n"
bash -c $linuxosdir/install_customizations.sh
print_in_purple " • Installing Customizations completed\n\n"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Create and Setup git
GIT=$(which git 2>/dev/null)
if [ -z "$GIT" ]; then print_in_red "\n • The git package is not installed\n\n"; else
print_in_purple "\n • Installing GIT\n"
bash -c $linuxosdir/install_git.sh
print_in_purple " • Installing GIT completed\n\n"
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Create and Setup vim
VIM=$(which vim 2>/dev/null)
if [ -z "$VIM" ]; then print_in_red "\n • The vim package is not installed\n\n"; else
print_in_purple "\n • Installing vim with plugins\n"
bash -c $linuxosdir/install_vim.sh
print_in_purple " • Installing vim with plugins completed\n\n"
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Create and Setup tmux
TMUX=$(which tmux 2>/dev/null)
if [ -z "$TMUX" ]; then print_in_red "\n • The tmux package is not installed\n\n"; else
print_in_purple "\n • Installing tmux plugins\n"
bash -c $linuxosdir/install_tmux.sh
print_in_purple " • Installing tmux plugins completed\n\n"
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Create and Setup zsh
ZSH=$(which zsh 2>/dev/null)
if [ -z "$ZSH" ]; then print_in_red "\n • The zsh package is not installed\n\n"; else
print_in_purple "\n • Installing zsh with plugins\n"
bash -c $linuxosdir/install_ohmyzsh.sh
print_in_purple " • Installing zsh with plugins completed\n\n"
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Create and Setup fish
FISH=$(which fish 2>/dev/null)
if [ -z "$FISH" ]; then print_in_red "\n • The fish package is not installed\n\n"; else
print_in_purple "\n • Installing fish shell and plugins\n"
bash -c $linuxosdir/install_ohmyfish.sh
print_in_purple " • Installing fish shell and plugins completed\n\n"
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Create and Setup Visual Studio code
CODE=$(command -v code 2>/dev/null)
if [ -z "$CODE" ]; then print_in_red "\n • The Visual Studio code package is not installed\n\n"; else
print_in_purple "\n • Installing Visual Studio code and plugins\n"
$linuxosdir/install_vscode.sh
print_in_purple " • Installing Visual Studio code shell and plugins completed\n"
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
#No point in running if no desktop
if [ -n "$DESKTOP_SESSION" ]; then
# Compile and Install polybar
POLYBAR=$(which polybar 2>/dev/null)
if [ -z $UPDATE ]; then
print_in_purple "\n • polybar install\n\n"
if [ -n "$POLYBAR" ]; then print_in_green " • polybar already installed\n"; else
sudo bash -c $linuxosdir/make_polybar.sh
fi
print_in_purple " • polybar install complete\n\n"
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Compile and Install jgmenu
JGMENU=$(which jgmenu 2>/dev/null)
if [ -z $UPDATE ]; then
print_in_purple " • jgmenu install\n\n"
if [ ! -z "$JGMENU" ]; then print_in_green " • jgmenu already installed\n"; else
sudo bash -c $linuxosdir/make_jgmenu.sh
fi
print_in_purple " • jgmenu install complete\n\n"
fi
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if [ -z "$(command -v shodan 2>/dev/null)" ] || [ -z "$(command -v ytmdl 2>/dev/null)" ] || [ -z "$(command -v toot 2>/dev/null)" ] ||
[ -z "$(command -v castero 2>/dev/null)" ] || [ -z "$(command -v rainbowstream 2>/dev/null)" ]; then
print_in_purple "\n • Installing terminal tools\n"
for PIPTOOLS in git+https://github.com/sixohsix/python-irclib shodan ytmdl toot castero rainbowstream; do
if "(sudo -vn && sudo -ln)" 2>&1 | grep -v 'may not' >/dev/null; then
execute \
"sudo sh -c $PIP install $PIPTOOLS >/dev/null 2>&1" \
"Installing pip package: $PIPTOOLS"
else
execute \
"sh -c $PIP install --user $PIPTOOLS >/dev/null 2>&1" \
"Installing pip package: $PIPTOOLS"
fi
done
print_in_purple " • Installing terminal tools completed\n\n"
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Update configs
print_in_purple "\n • Update configs\n"
for confsetup in $(ls -d $srcdir/config/*); do
if [ -f "$confsetup/install.sh" ] && [ ! -f "$confsetup/.installed" ]; then
execute "$confsetup/install.sh" "Finalizing $confsetup setup"
fi
done
print_in_purple " • Update configs completed\n\n"
# Install additional
print_in_purple "\n • Installing additional tools\n"
if [ -f "$(command -v dfmgr 2>/dev/null)" ]; then
execute "dfmgr install misc"
fi
print_in_purple " • Installing additional tools completed\n\n"
if [ -n "$DESKTOP_SESSION" ]; then
case "$DESKTOP_SESSION" in
awesome) execute "dfmgr install awesome" "Setting up for awesome" ;;
bspwm) execute "dfmgr install bspwm" "Setting up for bspwm" ;;
i3 | i3wm) execute "dfmgr install i3" "Setting up for i3" ;;
qtile) execute "dfmgr install qtile" "Setting up for qtile" ;;
xfce) execute "dfmgr install xfce" "Setting up for xfce" ;;
openbox) execute "dfmgr install openbox" "Setting up for openbox" ;;
xmonad) execute "dfmgr install xmonad" "Setting up for xmonad" ;;
#jwm) execute "dfmgr install jwm" "Setting up for jwm";;
#lxde) execute "dfmgr install lxde" "Setting up for lxde";;
#lxqt) execute "dfmgr install lxqt" "Setting up for lxqt";;
esac
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Go home
cd "$HOME" || false
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Fix permissions again
find "$HOME" -xtype l -delete
find ~/.gnupg ~/.ssh -type f -exec chmod 600 {} \; 2>/dev/null
find ~/.gnupg ~/.ssh -type d -exec chmod 700 {} \; 2>/dev/null
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Create env file
if [ ! -d ~/.config/dotfiles ]; then mkdir -p ~/.config/dotfiles; fi
if [ ! -f ~/.config/dotfiles/env ]; then
echo "" >~/.config/dotfiles/env
echo "UPDATE="yes"" >>~/.config/dotfiles/env
echo "dotfilesDirectory="$dotfilesDirectory"" >>~/.config/dotfiles/env
echo "srcdir="$dotfilesDirectory/src"" >>~/.config/dotfiles/env
echo "linuxosdir="$srcdir/os/linux"" >>~/.config/dotfiles/env
echo "INSTALLEDVER="$NEWVERSION"" >>~/.config/dotfiles/env
echo "DISTRO="$DISTRO"" >>~/.config/dotfiles/env
echo "CODENAME="$CODENAME"" >>~/.config/dotfiles/env
echo "GIT="$GIT"" >>~/.config/dotfiles/env
echo "CURL="$CURL"" >>~/.config/dotfiles/env
echo "WGET="$WGET"" >>~/.config/dotfiles/env
echo "VIM="$VIM"" >>~/.config/dotfiles/env
echo "TMUX="$TMUX"" >>~/.config/dotfiles/env
echo "ZSH="$ZSH"" >>~/.config/dotfiles/env
echo "FISH="$FISH"" >>~/.config/dotfiles/env
echo "POLYBAR="$POLYBAR"" >>~/.config/dotfiles/env
echo "JGMENU="$JGMENU"" >>~/.config/dotfiles/env
fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
#Reconfigure lxdm
#if (sudo -vn && sudo -ln) 2>&1 | grep -v 'may not' > /dev/null; then
# if [[ "$distroname" =~ "Kali" ]] || [[ "$distroname" =~ "Parrot" ]] || [[ "$distroname" =~ "Debian" ]] || [[ "$distroname" =~ "Raspbian" ]] || [[ "$distroname" =~ "Ubuntu" ]] ; then
#sudo dpkg-reconfigure lxdm
# fi
#fi
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# run clean up
print_in_purple "\n • Running cleanup\n"
# remove unwanted system files
if (sudo -vn && sudo -ln) 2>&1 | grep -v 'may not' >/dev/null; then
for sudocleanfile in /usr/share/xsessions/*-shmlog.desktop; do
if [ -e "$sudocleanfile" ]; then
execute \
"sudo rm -Rf $sudocleanfile" \
"Clean up cleaning up $sudocleanfile"
fi
done
fi
# remove unwanted user temp files
for homecleanfile in "$HOME/.cache/yay"/*; do
if [ -e "$homecleanfile" ]; then
execute \
"rm -Rf $homecleanfile 2>/dev/null" \
"cleaning up $homecleanfile"
fi
done
print_in_purple " • Running cleanup complete\n\n"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Print installed version
NEWVERSION="$(cat "$DOTFILES/version.txt" | tail -n 1)"
cp -Rf "$DOTFILES/version.txt" "$srcdir/os/version.txt"
# End Install
#RESULT=$?
printf "\n${GREEN} *** 😃 installation of dotfiles completed 😃 *** ${NC}\n"
printf "${GREEN} *** 😃 You now have version number: "$NEWVERSION" 😃 *** ${NC}\n\n"
printf "${RED} *** For the configurations to take effect *** ${NC} \n "
printf "${RED} *** you should logoff or reboot your system *** ${NC} \n\n\n\n "
##################################################################################################
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.