text stringlengths 1 1.05M |
|---|
package operations
import (
"fmt"
"time"
"gitlab.com/gitlab-org/gitaly/v14/proto/go/gitalypb"
"google.golang.org/protobuf/types/known/timestamppb"
)
type cherryPickOrRevertRequest interface {
GetUser() *gitalypb.User
GetCommit() *gitalypb.GitCommit
GetBranchName() []byte
GetMessage() []byte
}
func validateCherryPickOrRevertRequest(req cherryPickOrRevertRequest) error {
if req.GetUser() == nil {
return fmt.Errorf("empty User")
}
if req.GetCommit() == nil {
return fmt.Errorf("empty Commit")
}
if len(req.GetBranchName()) == 0 {
return fmt.Errorf("empty BranchName")
}
if len(req.GetMessage()) == 0 {
return fmt.Errorf("empty Message")
}
return nil
}
type userTimestampProto interface {
GetUser() *gitalypb.User
GetTimestamp() *timestamppb.Timestamp
}
func dateFromProto(p userTimestampProto) (time.Time, error) {
date := time.Now()
if timestamp := p.GetTimestamp(); timestamp != nil {
date = timestamp.AsTime()
}
if user := p.GetUser(); user != nil {
location, err := time.LoadLocation(user.GetTimezone())
if err != nil {
return time.Time{}, err
}
date = date.In(location)
}
return date, nil
}
|
# -*- coding: utf-8 -*-
def test_whitespace():
from rasa_nlu.tokenizers.whitespace_tokenizer import WhitespaceTokenizer
tk = WhitespaceTokenizer()
assert tk.tokenize(u"Hi. My name is rasa") == [u'Hi.', u'My', u'name', u'is', u'rasa']
assert tk.tokenize(u"hello ńöñàśçií") == [u'hello', u'ńöñàśçií']
def test_spacy(spacy_nlp_en):
def tokenize_sentence(sentence, expected_result):
from rasa_nlu.tokenizers.spacy_tokenizer import SpacyTokenizer
tk = SpacyTokenizer(spacy_nlp_en)
assert tk.tokenize(sentence) == expected_result
tokenize_sentence(u"Hi. My name is rasa", [u'Hi', u'.', u'My', u'name', u'is', u'rasa'])
tokenize_sentence(u"hello ńöñàśçií", [u'hello', u'ńöñàśçií'])
def test_mitie():
from rasa_nlu.tokenizers.mitie_tokenizer import MitieTokenizer
tk = MitieTokenizer()
assert tk.tokenize(u"Hi. My name is rasa") == [u'Hi', u'My', u'name', u'is', u'rasa']
assert tk.tokenize(u"ὦ ἄνδρες ᾿Αθηναῖοι") == [u'ὦ', u'ἄνδρες', u'᾿Αθηναῖοι']
assert tk.tokenize_with_offsets(u"Forecast for lunch") == ([u'Forecast', u'for', u'lunch'], [0, 9, 13])
assert tk.tokenize_with_offsets(u"hey ńöñàśçií how're you?") == ([u'hey', u'ńöñàśçií', u'how', u'\'re', 'you', '?'],
[0, 4, 13, 16, 20, 23])
|
import React, { useState } from 'react';
import logo from './erlenmeyer-flask.jpeg';
import ExamsList from './components/ExamsList';
import axios from "axios";
import Questionnaire from './components/Questionnaire'
import './App.css';
import 'bootstrap/dist/css/bootstrap.min.css';
import Modal from 'react-bootstrap/Modal';
import Button from 'react-bootstrap/Button';
function App() {
const [questionnaire, setQuestionnaire] = useState({ exam: {}, questions: [] })
const [showModal, setShowModal] = useState(false);
const [completeStatus, setCompleteStatus] = useState("");
const [score, setScore] = useState(0);
const [exam, setExam] = useState([]);
const examChanged = (examination) => {
axios.get(window._env_.API_URL.concat('/questions/').concat(examination.name)).then(response => {
setQuestionnaire(response.data);
setExam(examination)
}).catch(err => console.log(err))
}
const handleReset = () => {
setQuestionnaire({ exam: {}, questions: [] })
setExam({})
setShowModal(false);
};
const handleClose = () => setShowModal(false);
const testComlpleted = (status) => {
setCompleteStatus(status)
setShowModal(true);
}
const changeScore = (e) => {
if (e.target.checked) {
setScore(score + 1);
} else {
setScore(score - 1);
}
}
return (
<div className="App">
<header className="App-header">
<img src={logo} className="App-logo" alt="logo" />
<p>
Practice Labs
</p>
</header>
<div className="body">
<ExamsList examChanged={examChanged} completed={testComlpleted} exam={exam} />
{questionnaire.exam.name ?
(<Questionnaire questions={questionnaire} changeScore={changeScore} />) : (<div>no exam</div>)
}
</div>
<Modal
show={showModal}
onHide={handleClose}
backdrop="static"
keyboard={false}
>
<Modal.Header closeButton>
<Modal.Title>{completeStatus}</Modal.Title>
</Modal.Header>
<Modal.Body>
You got a score of {Math.floor((score / questionnaire.questions.length) * 100)}%
</Modal.Body>
<Modal.Footer>
<Button variant="secondary" onClick={handleReset}>
Retake the Test
</Button>
<Button variant="primary" onClick={handleClose}>Revise</Button>
</Modal.Footer>
</Modal>
</div>
);
}
export default App;
|
#!/bin/bash
phpize
./configure --enable-jsonpath --enable-code-coverage
make
|
import aioredis
R_OUTPUT = 'output'
class RedisHTMLGenerator:
def __init__(self, redis_url):
self.redis_url = redis_url
self.redis = None
self.session = None
async def connect(self):
self.redis = await aioredis.create_redis_pool(self.redis_url)
self.session = await aioredis.create_redis(self.redis_url)
async def push_to_redis(self, output):
await self.redis.rpush(R_OUTPUT, output.encode())
async def calculate_total_size(self):
total_size = await self.redis.llen(R_OUTPUT)
return total_size
async def shutdown(self):
self.redis.close()
await self.redis.wait_closed()
html_template = """
<h1>arq demo</h1>
"""
# Example usage:
async def main():
redis_html_generator = RedisHTMLGenerator('redis://localhost')
await redis_html_generator.connect()
await redis_html_generator.push_to_redis('data1')
await redis_html_generator.push_to_redis('data2')
size = await redis_html_generator.calculate_total_size()
print(f"Total size of Redis list: {size}")
await redis_html_generator.shutdown()
await main() |
<filename>gopium/struct.go
package gopium
// Field defines single structure field
// data transfer object abstraction
type Field struct {
Name string `gopium:"filter_pads,struct_annotate_comment,add_tag_group_force"`
Type string `gopium:"filter_pads,struct_annotate_comment,add_tag_group_force"`
Size int64 `gopium:"filter_pads,struct_annotate_comment,add_tag_group_force"`
Align int64 `gopium:"filter_pads,struct_annotate_comment,add_tag_group_force"`
Tag string `gopium:"filter_pads,struct_annotate_comment,add_tag_group_force"`
Exported bool `gopium:"filter_pads,struct_annotate_comment,add_tag_group_force"`
Embedded bool `gopium:"filter_pads,struct_annotate_comment,add_tag_group_force"`
Doc []string `gopium:"filter_pads,struct_annotate_comment,add_tag_group_force"`
Comment []string `gopium:"filter_pads,struct_annotate_comment,add_tag_group_force"`
} // struct size: 114 bytes; struct align: 8 bytes; struct aligned size: 120 bytes; - 🌺 gopium @1pkg
// Struct defines single structure
// data transfer object abstraction
type Struct struct {
Name string `gopium:"filter_pads,struct_annotate_comment,add_tag_group_force"`
Doc []string `gopium:"filter_pads,struct_annotate_comment,add_tag_group_force"`
Comment []string `gopium:"filter_pads,struct_annotate_comment,add_tag_group_force"`
Fields []Field `gopium:"filter_pads,struct_annotate_comment,add_tag_group_force"`
} // struct size: 88 bytes; struct align: 8 bytes; struct aligned size: 88 bytes; - 🌺 gopium @1pkg
|
<filename>client/users-permissions-app/src/app/services/user-store.service.ts
import { Injectable } from '@angular/core';
import { SimpleGlobal } from 'ng2-simple-global';
import {BehaviorSubject, Observable} from 'rxjs';
@Injectable({
providedIn: 'root'
})
export class UserStoreService {
private users: BehaviorSubject<any>;
constructor(private sg: SimpleGlobal) {
this.sg['user'] = null;
this.users = new BehaviorSubject<any>([]);
}
setUsers(updateUsers): void {
this.users.next(updateUsers);
}
getUsers(): Observable<any> {
return this.users.asObservable();
}
get(){
return this.sg['user'];
}
set(user){
this.sg['user']=user;
}
}
|
import tensorflow as tf
import numpy as np
# Input data
X = np.array([1.0, 2.0, 3.0, 4.0, 5.0])
y = np.array([2.0, 4.0, 6.0, 8.0, 10.0])
# Create a model
model = tf.keras.Sequential()
model.add(tf.keras.layers.Dense(1, input_shape=(1,)))
# Compile the model
model.compile(optimizer='sgd', loss='mean_squared_error')
# Fit the model
model.fit(X, y, epochs=200)
# Make predictions
print(model.predict([7.0])) # Prints [[14.000074]] |
#!/bin/sh
# make a jwt token signed with provided private key
CLAIMS_FILE=${1:-claims/example_claim.json}
DURATION=${2:-600}
PRIVATE_KEY=${3:-privateKey.pem}
HEADER='{"kid":"/privateKey.pem","typ":"JWT","alg":"RS256"}'
# header
jwt_header=$(echo "$HEADER" | tr -d '\r\n' | base64 | sed s/\+/-/g | sed 's/\//_/g' | sed -E s/=+$//)
# payload
NOW=$(date +%s)
DATA=$(jq --argjson iat "$NOW" \
--argjson exp "$((NOW+DURATION))" \
--arg uuid "$(cat /proc/sys/kernel/random/uuid)" \
'. +{iat:$iat,auth_time:$iat,exp:$exp,jti:$uuid}' \
"$CLAIMS_FILE")
payload=$(echo "$DATA" | tr -d '\r\n' | base64 | tr -d '\r\n' | sed s/\+/-/g |sed 's/\//_/g' | sed -E s/=+$//)
# sign w rsa private key
signature=$(echo "${jwt_header}.${payload}" | tr -d '\r\n' | openssl dgst -sha256 -binary -sign "$PRIVATE_KEY" | openssl enc -base64 -A | tr -d '\n=' | tr -- '+/' '-_')
# assemble token
echo "${jwt_header}.${payload}.${signature}" | tr -d '\r\n'
|
<gh_stars>0
/**
* Copyright 2021 Red Hat, Inc. and/or its affiliates.
*
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.quarkiverse.operatorsdk.samples.joke;
import javax.inject.Inject;
import io.javaoperatorsdk.operator.Operator;
import io.quarkus.runtime.Quarkus;
import io.quarkus.runtime.QuarkusApplication;
import io.quarkus.runtime.annotations.QuarkusMain;
@QuarkusMain
public class Main implements QuarkusApplication {
@Inject
Operator operator;
public static void main(String... args) {
Quarkus.run(Main.class, args);
}
@Override
public int run(String... args) throws Exception {
operator.start();
Quarkus.waitForExit();
return 0;
}
}
|
#!/bin/bash
apt update
apt install -y shellinabox zsh
echo "root:toor" | chpasswd
export CHSH=yes
echo Y | sh -c "$(wget -O- https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh)"
sed -i 's/bash/zsh/g' /etc/passwd
shellinaboxd --disable-ssl --css /black_on_white.css
|
<filename>syweb/webclient/app/app-filter.js
/*
Copyright 2014 OpenMarket Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
'use strict';
angular.module('matrixWebClient')
.filter('duration', function() {
return function(time) {
if (!time) return;
var t = parseInt(time / 1000);
var s = t % 60;
var m = parseInt(t / 60) % 60;
var h = parseInt(t / (60 * 60)) % 24;
var d = parseInt(t / (60 * 60 * 24));
if (t < 60) {
if (t < 0) {
return "0s";
}
return s + "s";
}
if (t < 60 * 60) {
return m + "m"; // + s + "s";
}
if (t < 24 * 60 * 60) {
return h + "h"; // + m + "m";
}
return d + "d "; // + h + "h";
};
})
.filter('orderMembersList', function($sce) {
return function(members) {
var filtered = [];
angular.forEach(members, function(member, key) {
member["id"] = key;
// do not add members who have left.
var ignoreList = ["leave", "kick", "ban"];
if (ignoreList.indexOf(member.event.content.membership) != -1) {
return;
}
filtered.push(member);
});
filtered.sort(function (a, b) {
// Sort members on their last_active absolute time
a = a.user;
b = b.user;
var aLastActiveTS = 0, bLastActiveTS = 0;
if (a && a.event && a.event.content && a.event.content.last_active_ago !== undefined) {
aLastActiveTS = a.last_updated - a.event.content.last_active_ago;
}
if (b && b.event && b.event.content && b.event.content.last_active_ago !== undefined) {
bLastActiveTS = b.last_updated - b.event.content.last_active_ago;
}
if (aLastActiveTS || bLastActiveTS) {
return bLastActiveTS - aLastActiveTS;
}
else {
// If they do not have last_active_ago, sort them according to their presence state
// Online users go first amongs members who do not have last_active_ago
var presenceLevels = {
offline: 1,
unavailable: 2,
online: 4,
free_for_chat: 3
};
var aPresence = (a && a.event && a.event.content.presence in presenceLevels) ? presenceLevels[a.event.content.presence] : 0;
var bPresence = (b && b.event && b.event.content.presence in presenceLevels) ? presenceLevels[b.event.content.presence] : 0;
return bPresence - aPresence;
}
});
return filtered;
};
})
.filter('unsafe', ['$sce', function($sce) {
return function(text) {
return $sce.trustAsHtml(text);
};
}])
// Exactly the same as ngSanitize's linky but instead of pushing sanitized
// text in the addText function, we just push the raw text.
.filter('unsanitizedLinky', ['$sanitize', function($sanitize) {
var LINKY_URL_REGEXP =
/((ftp|https?):\/\/|(mailto:)?[A-Za-z0-9._%+-]+@)\S*[^\s.;,(){}<>"]/,
MAILTO_REGEXP = /^mailto:/;
return function(text, target) {
if (!text) return text;
var match;
var raw = text;
var html = [];
var url;
var i;
while ((match = raw.match(LINKY_URL_REGEXP))) {
// We can not end in these as they are sometimes found at the end of the sentence
url = match[0];
// if we did not match ftp/http/mailto then assume mailto
if (match[2] == match[3]) url = 'mailto:' + url;
i = match.index;
addText(raw.substr(0, i));
addLink(url, match[0].replace(MAILTO_REGEXP, ''));
raw = raw.substring(i + match[0].length);
}
addText(raw);
return $sanitize(html.join(''));
function addText(text) {
if (!text) {
return;
}
html.push(text);
}
function addLink(url, text) {
html.push('<a ');
if (angular.isDefined(target)) {
html.push('target="');
html.push(target);
html.push('" ');
}
html.push('href="');
html.push(url);
html.push('">');
addText(text);
html.push('</a>');
}
};
}]);
|
<reponame>ChristopherChudzicki/mathbox<filename>config/webpack.config.base.js
const path = require("path");
const webpack = require("webpack");
const TerserPlugin = require("terser-webpack-plugin");
module.exports = {
resolve: {
extensions: [".ts", ".js"],
},
plugins: [
new webpack.ProvidePlugin({
Buffer: ["buffer", "Buffer"],
process: "process/browser",
}),
],
module: {
rules: [
{
test: /\.ts$/,
loader: "ts-loader",
exclude: /node_modules/,
options: {
configFile: path.resolve(__dirname, "../tsconfig.json"),
},
},
],
},
// Activate source maps for the bundles in order to preserve the original
// source when the user debugs the application
devtool: "source-map",
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
test: /\.min\.js$/,
}),
],
},
};
|
<gh_stars>0
object D3Main extends App {
val t1:Tree = D3.makeTree(2)
println(t1+" [corretto: [[-,2,-],1,[-,3,-]]]")
val t2 = D3.makeTree(1)
println(t2+" [corretto: [-,1,-]]")
val t3 = D3.makeTree(3)
println(t3+" [corretto: [[[-,4,-],2,[-,5,-]],1,[[-,6,-],3,[-,7,-]]]]")
val t4 = D3.makeTree(0)
println(t4+" [corretto: -]")
}
|
<gh_stars>0
import argparse
import traceback
import os
import csv
class Annotations:
infile=""
outfile=""
def __init__ (self,inf,outf):
self.infile=inf
self.outfile=outf
def is_data_line (self,list_of_fields):
if len(list_of_fields)>=9:
if not list_of_fields[0].startswith("#"):
return True
return False
def extract_transcript(self,col9):
words=col9.split(';')
for word in words:
if word.startswith("ID=ENST"):
return word[3:]
if word.startswith("Parent=ENST"):
return word[7:]
return None
def parse(self):
print ("Parsing...")
with open(self.infile,"r") as infile, open(self.outfile,"w") as outfile:
tsvin = csv.reader(infile, delimiter='\t')
tsvout = csv.writer(outfile, delimiter='\t')
outs=('tr_id','feature','strand',
'genomic_start','genomic_end',
'transcriptomic_start','transcriptomic_end')
tsvout.writerow(outs)
# keep genomic start/end and transcriptomic start/end
this_feature={'gs':0,'ge':0,'ts':0,'te':0}
prev_exon={'gs':0,'ge':0,'ts':0,'te':0}
for oneline in tsvin:
if self.is_data_line(oneline):
comments=oneline[8]
tr_id=self.extract_transcript(comments)
feature=oneline[2]
gs=int(oneline[3])
ge=int(oneline[4])
strand="pos"
if oneline[6]=="-": strand="neg"
length=ge-gs+1
if feature=="transcript":
ts=1
te=ts+length-1
this_feature={'gs':gs,'ge':ge,'ts':ts,'te':te}
prev_exon={'gs':0,'ge':0,'ts':0,'te':0}
elif feature=="exon":
ts=prev_exon['te']+1
te=ts+length-1
this_feature={'gs':gs,'ge':ge,'ts':ts,'te':te}
prev_exon={'gs':gs,'ge':ge,'ts':ts,'te':te}
else:
ts=prev_exon['ts']+(gs-prev_exon['gs'])
te=ts+length-1
this_feature={'gs':gs,'ge':ge,'ts':ts,'te':te}
if tr_id: # genes do not have a transcript id, so ignore them
outs=(tr_id,feature,strand,
this_feature['gs'],this_feature['ge'],
this_feature['ts'],this_feature['te'])
tsvout.writerow(outs)
def arg_parser():
parser = argparse.ArgumentParser(description="Extract intervals from gff3 file.")
parser.add_argument('infile', help='annotation file (gff3)', type=str)
parser.add_argument('outfile', help='output file (tsv)', type=str)
parser.add_argument('--debug', help='See tracebacks', action='store_true')
global args
args = parser.parse_args()
def demo(self):
pass
if __name__ == '__main__':
try:
Annotations.arg_parser()
ann = Annotations(args.infile,args.outfile)
ann.parse()
except Exception as e:
print("\nERROR!\n")
if args.debug:
print(traceback.format_exc())
else:
print ('Run with --debug for traceback.')
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
BCSYMBOLMAP_DIR="BCSymbolMaps"
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
if [ -d "${source}/${BCSYMBOLMAP_DIR}" ]; then
# Locate and install any .bcsymbolmaps if present, and remove them from the .framework before the framework is copied
find "${source}/${BCSYMBOLMAP_DIR}" -name "*.bcsymbolmap"|while read f; do
echo "Installing $f"
install_bcsymbolmap "$f" "$destination"
rm "$f"
done
rmdir "${source}/${BCSYMBOLMAP_DIR}"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
warn_missing_arch=${2:-true}
if [ -r "$source" ]; then
# Copy the dSYM into the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .dSYM "$source")"
binary_name="$(ls "$source/Contents/Resources/DWARF")"
binary="${DERIVED_FILES_DIR}/${basename}.dSYM/Contents/Resources/DWARF/${binary_name}"
# Strip invalid architectures from the dSYM.
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary" "$warn_missing_arch"
fi
if [[ $STRIP_BINARY_RETVAL == 0 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.dSYM"
fi
fi
}
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
warn_missing_arch=${2:-true}
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
if [[ "$warn_missing_arch" == "true" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
fi
STRIP_BINARY_RETVAL=1
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=0
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${PODS_XCFRAMEWORKS_BUILD_DIR}/FlyBuy/FlyBuy.framework"
install_framework "${PODS_XCFRAMEWORKS_BUILD_DIR}/FlyBuyPickup/FlyBuyPickup.framework"
install_framework "${PODS_XCFRAMEWORKS_BUILD_DIR}/FlyBuyNotify/FlyBuyNotify.framework"
install_framework "${PODS_XCFRAMEWORKS_BUILD_DIR}/FlyBuyPresence/FlyBuyPresence.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${PODS_XCFRAMEWORKS_BUILD_DIR}/FlyBuy/FlyBuy.framework"
install_framework "${PODS_XCFRAMEWORKS_BUILD_DIR}/FlyBuyPickup/FlyBuyPickup.framework"
install_framework "${PODS_XCFRAMEWORKS_BUILD_DIR}/FlyBuyNotify/FlyBuyNotify.framework"
install_framework "${PODS_XCFRAMEWORKS_BUILD_DIR}/FlyBuyPresence/FlyBuyPresence.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
#!/usr/bin/env sh
CONFIG_DIR=${BYPY_CONFIG_DIR:-""}
# 同步类型 up down
SYNC_TYPE=${BYPY_SYNC_TYPE:-up}
# 同步间隔
SYNC_INTERVAL=${BYPY_SYNC_INTERVAL:-6h}
# 同步的本地目录列表
# 无论是向上同步还是向下同步,百度云盘上的目录都以本地目录名为准
# 如 同步本地目录 /mnt/media 则百度盘上的目录为 /apps/bypy/media
SYNC_DIRS=${BYPY_SYNC_DIRS:-""}
# 同步时是否删除源不存在的文件
SYNC_DELETE=${BYPY_SYNC_DELETE-true}
# 同步命令参数
SYNC_OPTS=${BYPY_SYNC_OPTS:-"-vvvv"}
is_trap_sync=
BYPY="bypy"
[[ -n "$CONFIG_DIR" ]] && BYPY="$BYPY --config-dir $CONFIG_DIR"
echo_line() {
echo "========================================"
}
log() {
echo "$(date +%Y-%m-%d\ %H:%M:%S) $@"
}
log_line() {
log $@
echo_line
}
backup() {
local cfg_dir="$CONFIG_DIR"
[[ -z "$cfg_dir" ]] && cfg_dir=$HOME/.bypy
bak_dir="$cfg_dir/bak/$(date +%Y%m%d%H%M%S)"
mkdir -p $bak_dir
cp $cfg_dir/*.* $bak_dir/
# 删除旧的备份
find $(dirname $bak_dir) -maxdepth 1 -mindepth 1 -type d -mtime +2 -exec rm -rf {} \;
}
pre_sync() {
backup
echo_line
log "Sync $SYNC_TYPE start..."
$BYPY info
echo_line
}
post_sync() {
echo
echo_line
$BYPY info
log "Sync $SYNC_TYPE finished, wait $SYNC_INTERVAL for next."
echo_line
echo
}
sync() {
pre_sync
for path in $SYNC_DIRS; do
name=$(basename $path)
# 只处理目录
if [ ! -d "$path" ]; then
log_line "ERROR: $d is not a directory." 2>&1
continue
fi
if [ "$SYNC_TYPE" == 'down' ]; then
log_line "SYNC DOWN: /app/bypy/$name => $path"
$BYPY $SYNC_OPTS syncdown $name $path $SYNC_DELETE
else
log_line "SYNC UP: $path => /app/bypy/$name"
$BYPY $SYNC_OPTS syncup $path $name $SYNC_DELETE
fi
done
post_sync
}
trap_sync() {
echo_line
log "trap sync signal..."
echo_line
echo
is_trap_sync=true
sync
}
echo "SYNC_TYPE: $SYNC_TYPE"
echo "SYNC_DIRS: $SYNC_DIRS"
echo "SYNC_INTERVAL: $SYNC_INTERVAL"
echo "CONFIG_DIR: $CONFIG_DIR"
echo "SYNC_OPTS: $SYNC_OPTS"
echo "SYNC_DELETE: $SYNC_DELETE"
if [ -z "$SYNC_DIRS" ]; then
log_line "ERROR: \$SYNC_DIRS is empty." 2>&1
exit 1
fi
# sync 信号
trap trap_sync SIGUSR1
while :; do
if [ "$is_trap_sync" != "true" ]; then
sync
fi
is_trap_sync=
sleep $SYNC_INTERVAL &
pid=$!
wait $pid
kill -9 $pid 1>/dev/null 2>&1
done
|
<reponame>ChanghunKang/kibana
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { DynamicStyleProperty } from './dynamic_style_property';
import _ from 'lodash';
import { getComputedFieldName } from '../style_util';
import { getColorRampStops } from '../../color_utils';
import { ColorGradient } from '../../components/color_gradient';
import React from 'react';
import { EuiFlexGroup, EuiFlexItem, EuiSpacer, EuiText, EuiToolTip } from '@elastic/eui';
import { VectorIcon } from '../components/legend/vector_icon';
import { VECTOR_STYLES } from '../vector_style_defaults';
export class DynamicColorProperty extends DynamicStyleProperty {
syncCircleColorWithMb(mbLayerId, mbMap, alpha) {
const color = this._getMbColor();
mbMap.setPaintProperty(mbLayerId, 'circle-color', color);
mbMap.setPaintProperty(mbLayerId, 'circle-opacity', alpha);
}
syncIconColorWithMb(mbLayerId, mbMap) {
const color = this._getMbColor();
mbMap.setPaintProperty(mbLayerId, 'icon-color', color);
}
syncHaloBorderColorWithMb(mbLayerId, mbMap) {
const color = this._getMbColor();
mbMap.setPaintProperty(mbLayerId, 'icon-halo-color', color);
}
syncCircleStrokeWithMb(pointLayerId, mbMap, alpha) {
const color = this._getMbColor();
mbMap.setPaintProperty(pointLayerId, 'circle-stroke-color', color);
mbMap.setPaintProperty(pointLayerId, 'circle-stroke-opacity', alpha);
}
syncFillColorWithMb(mbLayerId, mbMap, alpha) {
const color = this._getMbColor();
mbMap.setPaintProperty(mbLayerId, 'fill-color', color);
mbMap.setPaintProperty(mbLayerId, 'fill-opacity', alpha);
}
syncLineColorWithMb(mbLayerId, mbMap, alpha) {
const color = this._getMbColor();
mbMap.setPaintProperty(mbLayerId, 'line-color', color);
mbMap.setPaintProperty(mbLayerId, 'line-opacity', alpha);
}
syncLabelColorWithMb(mbLayerId, mbMap, alpha) {
const color = this._getMbColor();
mbMap.setPaintProperty(mbLayerId, 'text-color', color);
mbMap.setPaintProperty(mbLayerId, 'text-opacity', alpha);
}
isCustomColorRamp() {
return this._options.useCustomColorRamp;
}
supportsFeatureState() {
return true;
}
isScaled() {
return !this.isCustomColorRamp();
}
isRanged() {
return !this.isCustomColorRamp();
}
hasBreaks() {
return this.isCustomColorRamp();
}
_getMbColor() {
const isDynamicConfigComplete =
_.has(this._options, 'field.name') && _.has(this._options, 'color');
if (!isDynamicConfigComplete) {
return null;
}
if (
this._options.useCustomColorRamp &&
(!this._options.customColorRamp || !this._options.customColorRamp.length)
) {
return null;
}
return this._getMBDataDrivenColor({
targetName: getComputedFieldName(this._styleName, this._options.field.name),
colorStops: this._getMBColorStops(),
isSteps: this._options.useCustomColorRamp,
});
}
_getMBDataDrivenColor({ targetName, colorStops, isSteps }) {
if (isSteps) {
const firstStopValue = colorStops[0];
const lessThenFirstStopValue = firstStopValue - 1;
return [
'step',
['coalesce', ['feature-state', targetName], lessThenFirstStopValue],
'rgba(0,0,0,0)', // MB will assign the base value to any features that is below the first stop value
...colorStops,
];
}
return [
'interpolate',
['linear'],
['coalesce', ['feature-state', targetName], -1],
-1,
'rgba(0,0,0,0)',
...colorStops,
];
}
_getMBColorStops() {
if (this._options.useCustomColorRamp) {
return this._options.customColorRamp.reduce((accumulatedStops, nextStop) => {
return [...accumulatedStops, nextStop.stop, nextStop.color];
}, []);
}
return getColorRampStops(this._options.color);
}
renderRangeLegendHeader() {
if (this._options.color) {
return <ColorGradient colorRampName={this._options.color} />;
} else {
return null;
}
}
_renderStopIcon(color, isLinesOnly, isPointsOnly, symbolId) {
if (this.getStyleName() === VECTOR_STYLES.LABEL_COLOR) {
const style = { color: color };
return (
<EuiText size={'xs'} style={style}>
Tx
</EuiText>
);
}
const loadIsLinesOnly = () => {
return isLinesOnly;
};
const loadIsPointsOnly = () => {
return isPointsOnly;
};
const getColorForProperty = (styleProperty, isLinesOnly) => {
if (isLinesOnly) {
return color;
}
return this.getStyleName() === styleProperty ? color : 'none';
};
return (
<VectorIcon
symbolId={symbolId}
loadIsPointsOnly={loadIsPointsOnly}
loadIsLinesOnly={loadIsLinesOnly}
getColorForProperty={getColorForProperty}
/>
);
}
_renderColorbreaks({ isLinesOnly, isPointsOnly, symbolId }) {
if (!this._options.customColorRamp) {
return null;
}
return this._options.customColorRamp.map((config, index) => {
const value = this.formatField(config.stop);
return (
<EuiFlexItem key={index}>
<EuiFlexGroup direction={'row'} gutterSize={'none'}>
<EuiFlexItem>
<EuiText size={'xs'}>{value}</EuiText>
</EuiFlexItem>
<EuiFlexItem>
{this._renderStopIcon(config.color, isLinesOnly, isPointsOnly, symbolId)}
</EuiFlexItem>
</EuiFlexGroup>
</EuiFlexItem>
);
});
}
renderBreakedLegend({ fieldLabel, isPointsOnly, isLinesOnly, symbolId }) {
return (
<div>
<EuiSpacer size="s" />
<EuiFlexGroup direction={'column'} gutterSize={'none'}>
{this._renderColorbreaks({
isPointsOnly,
isLinesOnly,
symbolId,
})}
</EuiFlexGroup>
<EuiFlexGroup gutterSize="xs" justifyContent="spaceAround">
<EuiFlexItem grow={false}>
<EuiToolTip position="top" title={this.getDisplayStyleName()} content={fieldLabel}>
<EuiText className="eui-textTruncate" size="xs" style={{ maxWidth: '180px' }}>
<small>
<strong>{fieldLabel}</strong>
</small>
</EuiText>
</EuiToolTip>
</EuiFlexItem>
</EuiFlexGroup>
</div>
);
}
}
|
'use strict';
const prompts = require('prompts');
const createPage = require('../utils/createPage');
module.exports = function () {
(async () => {
const page = await prompts({
type: 'text',
name: 'name',
message: 'Enter the name of the page:',
});
const options = {
pageName: page.name,
dryRun: false,
};
createPage(options);
})();
};
|
#!/usr/bin/env bash
#-------------------------------------------------------------------------
#_ _ _ _ _ _ _ _
#| | | | __ _ ___| | __ | |_| |__ ___ __ _____ _ __| | __| | |
#| |_| |/ _` |/ __| |/ / | __| '_ \ / _ \ \ \ /\ / / _ \| '__| |/ _` | |
#| _ | (_| | (__| < | |_| | | | __/ \ V V / (_) | | | | (_| |_|
#|_| |_|\__,_|\___|_|\_\ \__|_| |_|\___| \_/\_/ \___/|_| |_|\__,_(_)
#-------------------------------------------------------------------------
echo -e "\nFINAL SETUP AND CONFIGURATION"
echo "--------------------------------------"
echo "-- GRUB EFI Bootloader Install&Check--"
echo "--------------------------------------"
if [[ -d "/sys/firmware/efi" ]]; then
grub-install --efi-directory=/boot ${DISK}
fi
grub-mkconfig -o /boot/grub/grub.cfg
# ------------------------------------------------------------------------
echo -e "\nEnabling Login Display Manager"
systemctl enable sddm.service
echo -e "\nSetup SDDM Theme"
cat <<EOF > /etc/sddm.conf
[Theme]
Current=Nordic
EOF
# ------------------------------------------------------------------------
echo -e "\nEnabling essential services"
systemctl enable cups.service
ntpd -qg
systemctl enable ntpd.service
systemctl disable dhcpcd.service
systemctl stop dhcpcd.service
systemctl enable NetworkManager.service
systemctl enable bluetooth
echo "
###############################################################################
# Cleaning
###############################################################################
"
# Remove no password sudo rights
sed -i 's/^%wheel ALL=(ALL) NOPASSWD: ALL/# %wheel ALL=(ALL) NOPASSWD: ALL/' /etc/sudoers
# Add sudo rights
sed -i 's/^# %wheel ALL=(ALL) ALL/%wheel ALL=(ALL) ALL/' /etc/sudoers
# Replace in the same state
cd $pwd
echo "
###############################################################################
# Done - Please Eject Install Media and Reboot
###############################################################################
"
|
import { RegularExpression } from "../RegExp";
/**The namespace that governs molang variables*/
export namespace Context {
/**The pattern used to find the variables*/
export const getUsedPatt: RegExp = /\b(?:c|context)\.([a-z0-9_]+)\b(?![ \t]+=)/gim;
/**Spits through all the provided strings searching for all instances of using variables
* @param data The string(s)/container to look through
* @param receiver The receiving array*/
export function getUsing(data: any, receiver: string[]): void {
RegularExpression.harvest(data, getUsedPatt, receiver);
}
}
|
/*
* Copyright (c) 2014 Spotify AB.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.helios.system;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import com.spotify.helios.client.HeliosClient;
import com.spotify.helios.common.descriptors.Deployment;
import com.spotify.helios.common.descriptors.Goal;
import com.spotify.helios.common.descriptors.HostStatus;
import com.spotify.helios.common.descriptors.Job;
import com.spotify.helios.common.descriptors.JobId;
import com.spotify.helios.common.descriptors.PortMapping;
import com.spotify.helios.common.descriptors.TaskStatus;
import com.spotify.helios.common.protocol.CreateJobResponse;
import com.spotify.helios.common.protocol.JobDeleteResponse;
import com.spotify.helios.common.protocol.JobDeployResponse;
import com.spotify.helios.common.protocol.JobUndeployResponse;
import org.junit.Test;
import java.util.Map;
import static com.spotify.helios.common.descriptors.Goal.START;
import static com.spotify.helios.common.descriptors.HostStatus.Status.UP;
import static com.spotify.helios.common.descriptors.TaskStatus.State.RUNNING;
import static java.util.concurrent.TimeUnit.MINUTES;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class DeploymentTest extends SystemTestBase {
private static final JobId BOGUS_JOB = new JobId("bogus", "job", Strings.repeat("0", 40));
private static final String BOGUS_HOST = "BOGUS_HOST";
private final int externalPort = temporaryPorts.localPort("external");
@Test
public void test() throws Exception {
final Map<String, PortMapping> ports = ImmutableMap.of(
"foos", PortMapping.of(17, externalPort));
startDefaultMaster();
final HeliosClient client = defaultClient();
startDefaultAgent(testHost());
// Create a job
final Job job = Job.newBuilder()
.setName(testJobName)
.setVersion(testJobVersion)
.setImage(BUSYBOX)
.setCommand(IDLE_COMMAND)
.setPorts(ports)
.build();
final JobId jobId = job.getId();
final CreateJobResponse created = client.createJob(job).get();
assertEquals(CreateJobResponse.Status.OK, created.getStatus());
final CreateJobResponse duplicateJob = client.createJob(job).get();
assertEquals(CreateJobResponse.Status.JOB_ALREADY_EXISTS, duplicateJob.getStatus());
// Try querying for the job
final Map<JobId, Job> noMatchJobs = client.jobs(testJobName + "not_matching").get();
assertTrue(noMatchJobs.isEmpty());
final Map<JobId, Job> matchJobs1 = client.jobs(testJobName).get();
assertEquals(ImmutableMap.of(jobId, job), matchJobs1);
final Map<JobId, Job> matchJobs2 = client.jobs(testJobName + ":" + testJobVersion).get();
assertEquals(ImmutableMap.of(jobId, job), matchJobs2);
final Map<JobId, Job> matchJobs3 = client.jobs(job.getId().toString()).get();
assertEquals(ImmutableMap.of(jobId, job), matchJobs3);
// Wait for agent to come up
awaitHostRegistered(client, testHost(), LONG_WAIT_MINUTES, MINUTES);
awaitHostStatus(client, testHost(), UP, LONG_WAIT_MINUTES, MINUTES);
// Deploy the job on the agent
final Deployment deployment = Deployment.of(jobId, START);
final JobDeployResponse deployed = client.deploy(deployment, testHost()).get();
assertEquals(JobDeployResponse.Status.OK, deployed.getStatus());
final JobDeployResponse deployed2 = client.deploy(deployment, testHost()).get();
assertEquals(JobDeployResponse.Status.JOB_ALREADY_DEPLOYED, deployed2.getStatus());
final JobDeployResponse deployed3 = client.deploy(Deployment.of(BOGUS_JOB, START),
testHost()).get();
assertEquals(JobDeployResponse.Status.JOB_NOT_FOUND, deployed3.getStatus());
final JobDeployResponse deployed4 = client.deploy(deployment, BOGUS_HOST).get();
assertEquals(JobDeployResponse.Status.HOST_NOT_FOUND, deployed4.getStatus());
// undeploy and redeploy to make sure things still work in the face of the tombstone
JobUndeployResponse undeployResp = client.undeploy(jobId, testHost()).get();
assertEquals(JobUndeployResponse.Status.OK, undeployResp.getStatus());
final JobDeployResponse redeployed = client.deploy(deployment, testHost()).get();
assertEquals(JobDeployResponse.Status.OK, redeployed.getStatus());
// Check that the job is in the desired state
final Deployment fetchedDeployment = client.deployment(testHost(), jobId).get();
assertEquals(deployment, fetchedDeployment);
// Wait for the job to run
TaskStatus taskStatus;
taskStatus = awaitJobState(client, testHost(), jobId, RUNNING, LONG_WAIT_MINUTES, MINUTES);
assertEquals(job, taskStatus.getJob());
assertEquals(JobDeleteResponse.Status.STILL_IN_USE, client.deleteJob(jobId).get().getStatus());
// Wait for a while and make sure that the container is still running
Thread.sleep(5000);
final HostStatus hostStatus = client.hostStatus(testHost()).get();
taskStatus = hostStatus.getStatuses().get(jobId);
assertEquals(RUNNING, taskStatus.getState());
// Undeploy the job
final JobUndeployResponse undeployed = client.undeploy(jobId, testHost()).get();
assertEquals(JobUndeployResponse.Status.OK, undeployed.getStatus());
// Make sure that it is no longer in the desired state
final Deployment undeployedJob = client.deployment(testHost(), jobId).get();
assertTrue(undeployedJob == null || undeployedJob.getGoal() == Goal.UNDEPLOY);
// Wait for the task to disappear
awaitTaskGone(client, testHost(), jobId, LONG_WAIT_MINUTES, MINUTES);
// Verify that the job can be deleted
assertEquals(JobDeleteResponse.Status.OK, client.deleteJob(jobId).get().getStatus());
// Verify that a nonexistent job returns JOB_NOT_FOUND
assertEquals(JobDeleteResponse.Status.JOB_NOT_FOUND, client.deleteJob(jobId).get().getStatus());
}
}
|
<reponame>bsato212/lambda-101
/*
Sample event object for file "2019-08-26_Tactic_Updates.csv" uploaded to bucket "bsato212-test":
{
"bucket":"bsato212-test",
"contentType":"text/csv",
"crc32c":"EBF78Q==",
"etag":"CMnjtr+M1uQCEAE=",
"generation":"1568662849696201",
"id":"bsato212-test/2019-08-26_Tactic_Updates.csv/1568662849696201",
"kind":"storage#object",
"md5Hash":"UwRWQgxOelWIl5kmPzt9xA==",
"mediaLink":"https://www.googleapis.com/download/storage/v1/b/bsato212-test/o/2019-08-26_Tactic_Updates.csv?generation=1568662849696201&alt=media",
"metageneration":"1",
"name":"2019-08-26_Tactic_Updates.csv",
"selfLink":"https://www.googleapis.com/storage/v1/b/bsato212-test/o/2019-08-26_Tactic_Updates.csv",
"size":"4854",
"storageClass":"REGIONAL",
"timeCreated":"2019-09-16T19:40:49.695Z",
"timeStorageClassUpdated":"2019-09-16T19:40:49.695Z",
"updated":"2019-09-16T19:40:49.695Z"
}
*/
/**
* Triggered from a change to a Cloud Storage bucket.
*
* @param {!Object} event Event payload.
* @param {!Object} context Metadata for the event.
*/
exports.gcsEvent = (event, context) => {
console.log(`Processing file: [${event.name}]`);
};
|
<gh_stars>1-10
var jsface = require('jsface');
var CsvHelper = jsface.Class({
$singleton: true,
CSV: function () {
var a = /^\d+$/,
b = /^\d*\.\d+$|^\d+\.\d*$/,
c = /^\s|\s$|,|"|\n/,
d = function () {
return String.prototype.trim ? function (a) {
return a.trim()
} : function (a) {
return a.replace(/^\s*/, "").replace(/\s*$/, "")
}
}(),
e = function (a) {
return "[object Number]" === Object.prototype.toString.apply(a)
},
f = function (a) {
return "[object String]" === Object.prototype.toString.apply(a)
},
g = function (a) {
return "\n" !== a.charAt(a.length - 1) ? a : a.substring(0, a.length - 1)
},
h = function (d) {
return f(d) ? (d = d.replace(/"/g, '""'), c.test(d) || a.test(d) || b.test(d) ? d = '"' + d + '"' : "" === d && (d = '""')) : d = e(d) ? d.toString(10) : null === d || void 0 === d ? "" : d.toString(), d
},
i = {
arrayToCsv: function (a) {
var b, c, d, e, f = "";
for (d = 0; d < a.length; d += 1) {
for (c = a[d], e = 0; e < c.length; e += 1) b = c[e], b = h(b), f += e < c.length - 1 ? b + "," : b;
f += "\n"
}
return f
},
csvToArray: function (c, e) {
c = g(c), e = e === !0 ? {
trim: !0
} : e || {};
var f, h = "",
i = !1,
j = !1,
k = "",
l = [],
m = [],
n = e.trim === !0 ? !0 : !1,
o = function (c) {
var e = d(c);
return j !== !0 && ("" === c ? c = null : n === !0 && (c = e), (a.test(e) || b.test(e)) && (c = +e)), c
};
for (f = 0; f < c.length; f += 1) h = c.charAt(f), i !== !1 || "," !== h && "\n" !== h ? '"' !== h ? k += h : i ? '"' === c.charAt(f + 1) ? (k += '"', f += 1) : i = !1 : (i = !0, j = !0) : (k = o(k), l.push(k), "\n" === h && (m.push(l), l = []), k = "", j = !1);
return k = o(k), l.push(k), m.push(l), m
},
csvToObject: function (a, b) {
b = void 0 !== b ? b : {};
var c = b.columns,
d = !!b.trim,
e = this.csvToArray(a, d);
return c || (c = e.shift()), e.map(function (a) {
for (var b = {}, d = 0, e = c.length; e > d; d += 1) b[c[d]] = a[d];
return b
})
},
objectToCsv: function (a, b) {
b = void 0 !== b ? b : {};
var c = b.columns,
d = b.includeColumns,
e = "",
f = "",
g = function (b) {
var d, e, f, g = "",
i = a.length,
j = c.length;
for (e = 0; i > e; e += 1) {
for (b = a[e], f = 0; j > f; f += 1) d = c[f], g += h(b[d]), g += j - 1 > f ? "," : "";
g += "\n"
}
return g
},
i = function () {
var b, d, e, f, g, i, j, k = [],
l = a.length,
m = [];
for (g = 0; l > g; g += 1) {
e = a[g], j = [];
for (f in e) e.hasOwnProperty(f) && (i = k.indexOf(f), -1 === i && (i = k.push(f), i -= 1), j[i] = h(e[f]));
0 === g && (b = j.length), m.push(j)
}
return d = k.length, b !== d && m.forEach(function (a) {
a.length = d
}), c = k, m.map(function (a) {
return a.join(",")
}).join("\n") + "\n"
};
return d = void 0 === d ? !0 : !!d, e = void 0 !== c ? g() : i(), d && (c.forEach(function (a) {
f += h(a) + ","
}), f = f.substring(0, f.length - 1), e = f + "\n" + e), e
}
};
return "object" == typeof exports && (exports.arrayToCsv = i.arrayToCsv, exports.csvToArray = i.csvToArray, exports.objectToCsv = i.objectToCsv, exports.csvToObject = i.csvToObject), i
}()
});
module.exports = CsvHelper;
|
<reponame>FelipeAlafy/EstudonautaApp<filename>app/src/main/java/com/felipealafy/estudonautaapp/ActivityMidia.java
/**
MIT License
Copyright (c) 2020 <NAME>.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
*/
package com.felipealafy.estudonautaapp;
import androidx.appcompat.app.AppCompatActivity;
import android.net.Uri;
import android.os.Bundle;
import android.view.View;
import android.widget.VideoView;
public class ActivityMidia extends AppCompatActivity {
VideoView video;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_midia);
video = findViewById(R.id.video);
//Confingurando o caminho do vídeo, e convertendo o mesmo para Uri.
String cam = "android.resource://com.felipealafy.estudonautaapp/" + R.raw.estudonauta;
Uri arq = Uri.parse(cam);
//Colocando o vídeo e iniciando o mesmo.
video.setVideoURI(arq);
video.start();
}
public void back(View view) {
finish();
}
}
|
# File: D (Python 2.4)
from direct.fsm import FSM
from direct.showbase.PythonUtil import report
from pirates.instance import DistributedInstanceWorld
from pirates.quest import QuestHolder
from pirates.quest import DistributedQuest
from pirates.quest.MiniQuestItemGui import MiniQuestItemGui
from pirates.piratesbase import PiratesGlobals
from pirates.world import WorldGlobals
class DistributedTreasureMapInstance(DistributedInstanceWorld.DistributedInstanceWorld, QuestHolder.QuestHolder, FSM.FSM):
notify = directNotify.newCategory('DistributedTreasureMapInstance')
def __init__(self, cr):
DistributedInstanceWorld.DistributedInstanceWorld.__init__(self, cr)
FSM.FSM.__init__(self, 'DistributedTreasureMapInstance')
QuestHolder.QuestHolder.__init__(self)
self.pendingObjectiveRequest = None
self.pendingShipRequest = None
self.objectives = []
def announceGenerate(self):
DistributedInstanceWorld.DistributedInstanceWorld.announceGenerate(self)
base.cr.treasureMap = self
localAvatar.guiMgr.crewHUD.setTM(True)
localAvatar.guiMgr.showTMUI(self)
localAvatar.guiMgr.hideTrackedQuestInfo()
localAvatar.b_setTeleportFlag(PiratesGlobals.TFTreasureMap)
localAvatar.guiMgr.disableLookoutPanel(True)
def disable(self):
base.cr.treasureMap = None
FSM.FSM.cleanup(self)
localAvatar.guiMgr.crewHUD.setTM(False)
localAvatar.guiMgr.hideTMUI()
if localAvatar.guiMgr.crewHUD.crew:
localAvatar.guiMgr.crewHUD.leaveCrew()
localAvatar.guiMgr.crewHUD.setHUDOn()
localAvatar.guiMgr.showTrackedQuestInfo()
localAvatar.b_clearTeleportFlag(PiratesGlobals.TFTreasureMap)
localAvatar.guiMgr.disableLookoutPanel(False)
if self.pendingObjectiveRequest:
base.cr.relatedObjectMgr.abortRequest(self.pendingObjectiveRequest)
self.pendingObjectiveRequest = None
if self.pendingShipRequest:
base.cr.relatedObjectMgr.abortRequest(self.pendingShipRequest)
self.pendingShipRequest = None
DistributedInstanceWorld.DistributedInstanceWorld.disable(self)
def enterWaitClientsReady(self):
pass
def setBarrierData(self, data):
DistributedInstanceWorld.DistributedInstanceWorld.setBarrierData(self, data)
self.doneBarrier(self.uniqueName('allAvatarsReady'))
setBarrierData = report(types = [
'frameCount',
'deltaStamp',
'args'], dConfigParam = 'blackpearl')(setBarrierData)
def enterOff(self):
pass
def exitOff(self):
pass
def enterReward(self):
pass
def exitReward(self):
pass
def filterReward(self, request, args = []):
if request in [
'Completed']:
return self.defaultFilter(request, args)
def enterNotCompleted(self):
pass
def exitNotCompleted(self):
pass
def filterNotCompleted(self, request, args = []):
if request in [
'Completed']:
return self.defaultFilter(request, args)
def enterCompleted(self):
pass
def exitCompleted(self):
pass
def getItemList(self):
return self.getObjectives()
def getObjectives(self):
return self.objectives
def setObjectives(self, objectives):
self.objectives = []
for currObjective in objectives:
self.objectives.append({
'Type': 'ObjectId',
'Value': currObjective })
self.pendingObjectiveRequest = base.cr.relatedObjectMgr.requestObjects([
currObjective], eachCallback = self.tagAsObjective)
print 'got new objectives list %s' % objectives
messenger.send(self.getItemChangeMsg())
def getItemChangeMsg(self):
return self.taskName('objectiveChanged')
def tagAsObjective(self, quest):
quest.type = DistributedQuest.QUEST_TYPE_TM
def setTMComplete(self, instanceResults, playerResults):
guiMgr = base.localAvatar.guiMgr
guiMgr.hideTrays()
guiMgr.hideTMUI()
guiMgr.showTMCompleteUI(self, playerResults)
def createNewItem(self, item, parent, itemType = None, columnWidths = [], color = None):
return MiniQuestItemGui(item, parent)
def requestTreasureMapLeave(self):
localAvatar.guiMgr.crewHUD.leaveCrew()
localAvatar.guiMgr.hideTMCompleteUI()
localAvatar.guiMgr.showTrays()
self.sendUpdate('requestLeave', [
0])
def requestLeaveApproved(self, parentId, zoneId, shipId):
localAvatar.setInterest(parentId, zoneId, [
'tmExit'])
self.pendingShipRequest = base.cr.relatedObjectMgr.requestObjects([
shipId], eachCallback = self.goToShip)
def goToShip(self, pendingObj):
pendingObj.localAvatarBoardShip()
self.cr.teleportMgr.initiateTeleport(PiratesGlobals.INSTANCE_TM, WorldGlobals.PiratesWorldSceneFileBase)
|
module.exports = {
run: async (param) => {
console.log('member added');
const { futox, Discord, member } = param;
const d = futox.global.db.guilds[member.guild.id];
if (d.channels.member_logs) {
const embed = new Discord.RichEmbed()
.setTitle('User Joined')
.setDescription(member.user)
.setColor(futox.colors.botGold)
member.guild.channels.get(d.channels.member_logs).send(embed);
}
if (!member.user.bot) {
console.log('here');
if (d.roles.member) {
let role0 = member.guild.roles.get(d.roles.member);
member.addRole(role0.id);
}
if (d.channels.welcome) {
console.log('welcome');
if (d.channels.rules) {
member.guild.channels.get(d.channels.welcome).send(`${member}, Welcome to **${member.guild.name}**! Please make sure you\'ve read the <#${d.channels.rules}>`);
} else {
member.guild.channels.get(d.channels.welcome).send(`Welcome to **${member.guild.name}**, ${member}!`);
}
}
} else {
if (d.roles.bot) {
let role2 = member.guild.roles.get(d.roles.bot);
member.addRole(role2.id);
}
}
},
}; |
import java.util.PriorityQueue;
public class CostOptimizedSearch {
public static void search(int[] cost, int[] time) {
PriorityQueue<int[]> queue = new PriorityQueue<>(
(a, b) -> a[0] * b[1] - b[0] * a[1]
);
for (int i = 0; i < cost.length; i++) {
queue.add(new int[]{cost[i], time[i]});
}
while(!queue.isEmpty()) {
int[] current = queue.poll();
int c = current[0], t = current[1];
System.out.println("Optimal cost: " + c + " with time: " + t);
}
}
public static void main(String[] args) {
int[] cost = {1, 4, 10, 15};
int[] time = {5, 4, 2, 1};
search(cost, time);
}
} |
#!/bin/bash
# SPDX-License-Identifier: GPL-2.0
#
# Script will generate one flow per thread (-t N)
# - Same destination IP
# - Fake source IPs for each flow (fixed based on thread number)
#
# Useful for scale testing on receiver, to see whether silo'ing flows
# works and scales. For optimal scalability (on receiver) each
# separate-flow should not access shared variables/data. This script
# helps magnify any of these scaling issues by overloading the receiver.
#
basedir=`dirname $0`
source ${basedir}/functions.sh
root_check_run_with_sudo "$@"
# Parameter parsing via include
source ${basedir}/parameters.sh
# Set some default params, if they didn't get set
[ -z "$DEST_IP" ] && DEST_IP="198.18.0.42"
[ -z "$DST_MAC" ] && DST_MAC="90:e2:ba:ff:ff:ff"
[ -z "$CLONE_SKB" ] && CLONE_SKB="0"
[ -z "$BURST" ] && BURST=32
[ -z "$COUNT" ] && COUNT="0" # Zero means indefinitely
if [ -n "$DEST_IP" ]; then
validate_addr $DEST_IP
read -r DST_MIN DST_MAX <<< $(parse_addr $DEST_IP)
fi
if [ -n "$DST_PORT" ]; then
read -r UDP_DST_MIN UDP_DST_MAX <<< $(parse_ports $DST_PORT)
validate_ports $UDP_DST_MIN $UDP_DST_MAX
fi
# General cleanup everything since last run
[ -z "$APPEND" ] && pg_ctrl "reset"
# Threads are specified with parameter -t value in $THREADS
for ((thread = $F_THREAD; thread <= $L_THREAD; thread++)); do
dev=${DEV}@${thread}
# Add remove all other devices and add_device $dev to thread
[ -z "$APPEND" ] && pg_thread $thread "rem_device_all"
pg_thread $thread "add_device" $dev
# Base config
pg_set $dev "flag QUEUE_MAP_CPU"
pg_set $dev "count $COUNT"
pg_set $dev "clone_skb $CLONE_SKB"
pg_set $dev "pkt_size $PKT_SIZE"
pg_set $dev "delay $DELAY"
pg_set $dev "flag NO_TIMESTAMP"
# Single destination
pg_set $dev "dst_mac $DST_MAC"
pg_set $dev "dst_min $DST_MIN"
pg_set $dev "dst_max $DST_MAX"
if [ -n "$DST_PORT" ]; then
# Single destination port or random port range
pg_set $dev "flag UDPDST_RND"
pg_set $dev "udp_dst_min $UDP_DST_MIN"
pg_set $dev "udp_dst_max $UDP_DST_MAX"
fi
# Setup source IP-addresses based on thread number
pg_set $dev "src_min 198.18.$((thread+1)).1"
pg_set $dev "src_max 198.18.$((thread+1)).1"
# Setup burst, for easy testing -b 0 disable bursting
# (internally in pktgen default and minimum burst=1)
if [[ ${BURST} -ne 0 ]]; then
pg_set $dev "burst $BURST"
else
info "$dev: Not using burst"
fi
done
# Run if user hits control-c
function print_result() {
# Print results
for ((thread = $F_THREAD; thread <= $L_THREAD; thread++)); do
dev=${DEV}@${thread}
echo "Device: $dev"
cat /proc/net/pktgen/$dev | grep -A2 "Result:"
done
}
# trap keyboard interrupt (Ctrl-C)
trap true SIGINT
if [ -z "$APPEND" ]; then
echo "Running... ctrl^C to stop" >&2
pg_ctrl "start"
print_result
else
echo "Append mode: config done. Do more or use 'pg_ctrl start' to run"
fi
|
#!/bin/bash
# Sourcing all the environment variables
source .env
TEMPSTR1=$1;
TEMPSTR2=$2;
TEMPSTR3=$3;
# Checking to see if verbose option was specified, else it will be left blank
if [ "$4" == "v" ] || [ "$3" == "v" ]; then
#TEMPSTR4="-vvvv"
TEMPSTR4="-vvv"
fi
if [ -z "$TEMPSTR1" ] || [ -z "$TEMPSTR2" ]; then
if [ "$TEMPSTR1" != "rebuild" ] || [ "$TEMPSTR1" != "build" ]; then
echo " "
echo " This script is to be able to Re-build (by destroying the VM and building all over)"
echo " Or to just build a VM (as in, re-run the yml's and fix anything if there has been a change"
echo " "
echo " The script server.sh accepts 4 parameters"
echo " 1st: 'rebuild' or 'build' a VM"
echo " 2nd: Identify the vm that should be configured (ex: postgresql)"
echo " 3rd: Specify if you want the vm to be built with a go-agent installed, options are:"
echo " with_gocd"
echo " without_gocd"
echo " If this argument is left blank then the GoCD server and agents will not be installed"
echo " 4th: Verbosity level of the ansible playbook executions, options are:"
echo " v => this would be at verbose level -vvv"
echo " If this argument is left blank then the verbosity level would be default"
echo " "
echo " Examples: "
echo " 1. Tear down and rebuild one particular vm, for Ex:"
echo " ./server.sh rebuild postgresql with_gocd v"
echo " "
echo " 2. DO NOT Destroy but just re run the ansible scripts:"
echo " ./server.sh build postgresql with_gocd v"
echo " "
echo " 3. Tear down and rebuild all the VMs at once:"
echo " ./server.sh rebuild all with_gocd"
echo " "
echo " 4. DO NOT Destroy but just re run the ansible scripts on all VMs:"
echo " ./server.sh build all with_gocd v"
echo " "
echo " 5. In all the above examples if you exclude the word "with_gocd" or if you"
echo " replace it with "without_gocd" then the GoCD Server and agents will not be installed"
echo " "
exit;
fi
fi
case "$TEMPSTR2" in
postgresql)
VMNAME=$POSTGRESQL_VM_NAME
;;
mysql)
VMNAME=$MYSQL_VM_NAME
;;
citusdata)
VMNAME=$CITUSDATA_VM_NAME
;;
all)
VMNAME=all
;;
*)
echo $"Usage: $0 {postgresql|mysql|citusdata|all}"
exit 1
esac
# Destroy/Tear down and boot up the server/s (without or without GoCD as per parameter 3:
if [ "$TEMPSTR1" == "rebuild" ]; then
echo " "
echo " ************************************************** "
echo " About to Rebuild the VM/s "
echo deploy_server/scripts/destroy_all_vm.sh $VMNAME $TEMPSTR3;
echo " ************************************************** "
echo " "
deploy_server/scripts/destroy_all_vm.sh $VMNAME $TEMPSTR3;
fi
echo " "
echo " ************************************************** "
echo " About to Boot up the VM/s "
echo deploy_server/scripts/vagrant_up.sh $VMNAME $TEMPSTR3;
echo " ************************************************** "
echo " "
deploy_server/scripts/vagrant_up.sh $VMNAME $TEMPSTR3;
# Executing the yml files to download, install and configure the selected server
chmod 600 filesForVMs/insecure_citus1_pvt_key
sleep 3
#----------------------------------------------------------------------
# If with_gocd option is chosen as a first step install the GoCD server
# And install GoCD agent on the GoCD Server:
#----------------------------------------------------------------------
if [ "$TEMPSTR3" == "with_gocd" ]; then
echo " "
echo " ************************************************** "
echo " About to deploy GoCD server "
echo ansible-playbook -i deploy_server/ansible_hosts deploy_server/deploy_servers_playbook.yml --limit "${GOCD_SERVER_VM_NAME},${GOCD_SERVER_VM_NAME}_goagent" $TEMPSTR4
echo " ************************************************** "
echo " "
ansible-playbook -i deploy_server/ansible_hosts deploy_server/deploy_servers_playbook.yml --limit "${GOCD_SERVER_VM_NAME},${GOCD_SERVER_VM_NAME}_goagent" $TEMPSTR4
fi
#----------------------------------------------------------------------
# If "all" option is chosen:
#----------------------------------------------------------------------
if [ "$TEMPSTR2" == "all" ]; then
echo " "
echo " ************************************************** "
echo " About to deploy the servers "
echo ansible-playbook -i deploy_server/ansible_hosts deploy_server/deploy_servers_playbook.yml --limit db_servers $TEMPSTR4
echo " ************************************************** "
echo " "
ansible-playbook -i deploy_server/ansible_hosts deploy_server/deploy_servers_playbook.yml $TEMPSTR4 --limit "db_servers"
#----------------------------------------------------------------------
# Along with "all" if "with_gocd" option is chosen, then install
# GoCD agent on the database servers
#----------------------------------------------------------------------
if [ "$TEMPSTR3" == "with_gocd" ]; then
echo " "
echo " ************************************************** "
echo " About to install Go Agent on servers "
echo ansible-playbook -i deploy_server/ansible_hosts deploy_server/deploy_servers_playbook.yml $TEMPSTR4 --limit "VMsToInstallAgents"
echo " ************************************************** "
echo " "
ansible-playbook -i deploy_server/ansible_hosts deploy_server/deploy_servers_playbook.yml $TEMPSTR4 --limit "VMsToInstallAgents"
fi
else
#----------------------------------------------------------------------
# Instead, if individual database server was chosen:
#----------------------------------------------------------------------
echo " "
echo " ************************************************** "
echo " About to deploy vm server "
echo ansible-playbook -i deploy_server/ansible_hosts deploy_server/deploy_servers_playbook.yml --limit ${TEMPSTR2}_db $TEMPSTR4
echo " ************************************************** "
echo " "
ansible-playbook -i deploy_server/ansible_hosts deploy_server/deploy_servers_playbook.yml --limit ${TEMPSTR2}_db $TEMPSTR4
#----------------------------------------------------------------------
# Along with the individual database server if "with_gocd" option is choosen, then install
# GoCD agent on the selected database server
#----------------------------------------------------------------------
if [ "$TEMPSTR3" == "with_gocd" ]; then
echo " "
echo " ************************************************** "
echo " About to deploy vm server "
echo ansible-playbook -i deploy_server/ansible_hosts deploy_server/deploy_servers_playbook.yml --limit ${TEMPSTR2}_goagent $TEMPSTR4
echo " ************************************************** "
echo " "
ansible-playbook -i deploy_server/ansible_hosts deploy_server/deploy_servers_playbook.yml --limit ${TEMPSTR2}_goagent $TEMPSTR4
fi
fi
|
def extract_views(urlpatterns):
views = []
for pattern in urlpatterns:
if hasattr(pattern, 'url_patterns'):
views.extend(extract_views(pattern.url_patterns))
elif hasattr(pattern, 'callback'):
views.append(pattern.callback.__module__ + '.' + pattern.callback.__name__)
elif hasattr(pattern, 'lookup_str'):
views.append(pattern.lookup_str)
return views |
public class Sorting {
public static void main(String[] args) {
int[] numbers = {5,1,4,2,3};
for (int i=0; i<numbers.length; i++) {
for (int j=0; j<numbers.length-1; j++) {
if(numbers[j] > numbers[j+1]) {
int temp = numbers[j];
numbers[j] = numbers[j+1];
numbers[j+1] = temp;
}
}
}
for(int i : numbers) {
System.out.print(i + " ");
}
}
} |
for element in list1:
if element in list2:
print(element) |
SELECT *
FROM users
WHERE last_login < DATEADD(month, -3, CURRENT_DATE); |
#!/bin/bash
set -o nounset
set -o pipefail
set -o xtrace
# When running in prow, the working directory is the root of the test-infra
# repository.
# Pre-pull all the test images.
SCRIPT_ROOT=$(cd `dirname $0` && pwd)
PREPULL_YAML=${PREPULL_YAML:-loadtest-prepull.yaml}
kubectl create -f ${SCRIPT_ROOT}/${PREPULL_YAML}
# Wait a while for the test images to be pulled onto the nodes.
timeout ${PREPULL_TIMEOUT:-10m} kubectl wait --for=condition=ready pod -l prepull-test-images=loadtest --timeout -1s
# Check the status of the pods.
kubectl get pods -o wide
kubectl describe pods
# Delete the pods anyway since pre-pulling is best-effort
kubectl delete -f ${SCRIPT_ROOT}/${PREPULL_YAML}
# Wait a few more minutes for the pod to be cleaned up.
timeout 3m kubectl wait --for=delete pod -l prepull-test-images=loadtest --timeout -1s
$GOPATH/src/k8s.io/perf-tests/run-e2e.sh $@
|
import os
def cleanup_directory(dir_path: str) -> bool:
file_path = os.path.join(dir_path, "config/autoload/yawik.config.global.php")
if os.path.exists(file_path):
os.remove(file_path)
if not os.path.exists(file_path): # Check if the file is successfully deleted
autoload_dir = os.path.join(dir_path, "config/autoload")
if os.path.exists(autoload_dir):
os.rmdir(autoload_dir)
if not os.path.exists(autoload_dir): # Check if the directory is successfully removed
return True
return False |
struct MenuItem {
let name: String
let price: Double
}
struct MenuRowView: View {
let row: MenuItem
var body: some View {
HStack {
Text(row.name)
Spacer()
Text(String(format: "$%.2f", row.price))
}
.padding(10)
}
}
struct MenuRow_Previews: PreviewProvider {
static var previews: some View {
MenuRowView(row: MenuItem(name: "Burger", price: 9.99))
}
} |
<reponame>brains-and-beards/react-native-animated-code-input
import React, { useCallback } from "react";
import {
TextInput,
NativeSyntheticEvent,
TextInputFocusEventData,
Keyboard,
InteractionManager,
StyleSheet,
} from "react-native";
export interface IInputProps {
autoFocus?: boolean;
value: string;
onBlur?: (e: NativeSyntheticEvent<TextInputFocusEventData>) => void;
onChangeText?: (text: string) => void;
onSubmit?: () => void;
textContentType?:
| "none"
| "URL"
| "addressCity"
| "addressCityAndState"
| "addressState"
| "countryName"
| "creditCardNumber"
| "emailAddress"
| "familyName"
| "fullStreetAddress"
| "givenName"
| "jobTitle"
| "location"
| "middleName"
| "name"
| "namePrefix"
| "nameSuffix"
| "nickname"
| "organizationName"
| "postalCode"
| "streetAddressLine1"
| "streetAddressLine2"
| "sublocality"
| "telephoneNumber"
| "username"
| "password";
}
interface IProps extends IInputProps {
codeMaxLength: number;
testID?: string;
textInputRef: React.RefObject<TextInput>;
}
const NON_NUMBER_REGEX = /[^0-9]/g;
const InputField: React.FC<IProps> = (props: IProps) => {
const {
autoFocus,
codeMaxLength,
value,
onBlur,
onChangeText,
onSubmit,
testID,
textContentType,
textInputRef,
} = props;
const onChangeTextCallback = useCallback(
(text: string) => {
const numbersFromText = text.replace(NON_NUMBER_REGEX, "");
const codeChanged = numbersFromText !== value;
if (onChangeText) {
onChangeText(numbersFromText);
}
if (codeChanged) {
if (numbersFromText.length === codeMaxLength) {
Keyboard.dismiss();
}
}
},
[codeMaxLength, value, onChangeText]
);
const onBlurCallback = useCallback(
(e: NativeSyntheticEvent<TextInputFocusEventData>) => {
InteractionManager.runAfterInteractions(() => {
if (onSubmit && value.length === codeMaxLength) {
onSubmit();
}
});
if (onBlur) {
onBlur(e);
}
},
[onSubmit, onBlur]
);
return (
<TextInput
autoFocus={autoFocus || true}
caretHidden={true}
keyboardType="number-pad"
onBlur={onBlurCallback}
onChangeText={onChangeTextCallback}
maxLength={codeMaxLength}
ref={textInputRef}
style={styles.input}
testID={testID}
textContentType={textContentType ? textContentType : "oneTimeCode"}
value={value}
/>
);
};
const styles = StyleSheet.create({
input: {
fontSize: 1,
height: 1,
margin: 0,
opacity: 0,
padding: 0,
},
});
export default InputField;
|
<reponame>zenaptix-lab/atlas
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.migration;
import org.apache.atlas.model.typedef.AtlasTypesDef;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.util.ArrayList;
public class Exporter {
private static final Logger LOG = LoggerFactory.getLogger(Exporter.class);
private static final String ATLAS_TYPE_REGISTRY = "atlasTypeRegistry";
private static final String APPLICATION_CONTEXT = "migrationContext.xml";
private static final String MIGRATION_TYPESDEF_FILENAME = "atlas-migration-typesdef.json";
private static final String MIGRATION_DATA_FILENAME = "atlas-migration-data.json";
private static final String LOG_MSG_PREFIX = "atlas-migration-export: ";
private static final int PROGRAM_ERROR_STATUS = -1;
private static final int PROGRAM_SUCCESS_STATUS = 0;
private final String typesDefFileName;
private final String dataFileName;
private final AtlasTypeRegistry typeRegistry;
public static void main(String args[]) {
int result;
try {
String logFileName = System.getProperty("atlas.log.dir") + File.separatorChar + System.getProperty("atlas.log.file");
displayMessage("starting migration export. Log file location " + logFileName);
Options options = new Options();
options.addOption("d", "outputdir", true, "Output directory");
CommandLine cmd = (new BasicParser()).parse(options, args);
String outputDir = cmd.getOptionValue("d");
if (StringUtils.isEmpty(outputDir)) {
outputDir = System.getProperty("user.dir");
}
String typesDefFileName = outputDir + File.separatorChar + MIGRATION_TYPESDEF_FILENAME;
String dataFileName = outputDir + File.separatorChar + MIGRATION_DATA_FILENAME;
Exporter exporter = new Exporter(typesDefFileName, dataFileName, APPLICATION_CONTEXT);
exporter.perform();
result = PROGRAM_SUCCESS_STATUS;
displayMessage("completed migration export!");
} catch (Exception e) {
displayError("Failed", e);
result = PROGRAM_ERROR_STATUS;
}
System.exit(result);
}
public Exporter(String typesDefFileName, String dataFileName, String contextXml) throws Exception {
validate(typesDefFileName, dataFileName);
displayMessage("initializing");
ApplicationContext applicationContext = new ClassPathXmlApplicationContext(contextXml);
this.typesDefFileName = typesDefFileName;
this.dataFileName = dataFileName;
this.typeRegistry = applicationContext.getBean(ATLAS_TYPE_REGISTRY, AtlasTypeRegistry.class);;
displayMessage("initialized");
}
public void perform() throws Exception {
exportTypes();
exportData();
}
private void validate(String typesDefFileName, String dataFileName) throws Exception {
File typesDefFile = new File(typesDefFileName);
File dataFile = new File(dataFileName);
if (typesDefFile.exists()) {
throw new Exception("output file " + typesDefFileName + " already exists");
}
if (dataFile.exists()) {
throw new Exception("output file " + dataFileName + " already exists");
}
}
private void exportTypes() throws Exception {
displayMessage("exporting typesDef to file " + typesDefFileName);
AtlasTypesDef typesDef = getTypesDef(typeRegistry);
FileUtils.write(new File(typesDefFileName), AtlasType.toJson(typesDef));
displayMessage("exported typesDef to file " + typesDefFileName);
}
private void exportData() throws Exception {
displayMessage("exporting data to file " + dataFileName);
OutputStream os = null;
try {
os = new FileOutputStream(dataFileName);
} finally {
if (os != null) {
try {
os.close();
} catch (Exception excp) {
// ignore
}
}
}
displayMessage("exported data to file " + dataFileName);
}
private AtlasTypesDef getTypesDef(AtlasTypeRegistry registry) {
return new AtlasTypesDef(new ArrayList<>(registry.getAllEnumDefs()),
new ArrayList<>(registry.getAllStructDefs()),
new ArrayList<>(registry.getAllClassificationDefs()),
new ArrayList<>(registry.getAllEntityDefs()));
}
private static void displayMessage(String msg) {
LOG.info(LOG_MSG_PREFIX + msg);
System.out.println(LOG_MSG_PREFIX + msg);
System.out.flush();
}
private static void displayError(String msg, Throwable t) {
LOG.error(LOG_MSG_PREFIX + msg, t);
System.out.println(LOG_MSG_PREFIX + msg);
System.out.flush();
if (t != null) {
System.out.println("ERROR: " + t.getMessage());
}
System.out.flush();
}
}
|
package com.groupon.nakala.core;
import com.groupon.nakala.normalization.StringNormalizer;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
/**
* Created by nickp on 1/29/14.
*/
public abstract class AbstractTokenizerStream implements TokenizerStream {
@Override
public Set<String> getUniqueTokens(String s) {
setText(s);
Set<String> uniques = new HashSet<String>();
String tok = null;
while ((tok = next()) != null)
uniques.add(tok);
return uniques;
}
@Override
public List<String> getTokens(String s) {
setText(s);
List<String> tokens = new LinkedList<String>();
String tok = null;
while ((tok = next()) != null)
tokens.add(tok);
return tokens;
}
@Override
public Set<String> getUniqueTokens(String s, List<StringNormalizer> normalizers) {
if (normalizers != null) {
for(StringNormalizer normalizer: normalizers) {
s = normalizer.normalize(s);
}
}
setText(s);
Set<String> uniques = new HashSet<String>();
String tok;
while ((tok = next()) != null) {
uniques.add(tok);
}
return uniques;
}
@Override
public List<String> getTokens(String s, List<StringNormalizer> normalizers) {
if (normalizers != null) {
for(StringNormalizer normalizer: normalizers) {
s = normalizer.normalize(s);
}
}
setText(s);
List<String> tokens = new LinkedList<String>();
String tok;
while ((tok = next()) != null) {
tokens.add(tok);
}
return tokens;
}
}
|
const generateUniqueKeys = (arr) => {
let map = new Map();
let keys = []
arr.forEach(element => {
let key = 'key_' + element;
if (!keys.includes(key)) {
map.set(key, element);
keys.push(key);
}
else {
while(keys.includes(key)) {
key = 'key_' + Math.random();
}
keys.push(key);
map.set(key, element);
}
});
return map;
}
console.log(`Unique Keys: ${generateUniqueKeys([3,5,6,9])}`);
Output
Unique Keys: Map
{"key_3" => 3, "key_5" => 5, "key_6" => 6, "key_9" => 9} |
<gh_stars>0
import chai from "chai"
import React from 'react'
import equalJSX from 'chai-equal-jsx'
import {createRenderer} from 'react-addons-test-utils'
import {Tetris, Board} from '../src/client/components/test'
chai.should()
chai.use(equalJSX)
describe('Fake react test', function(){
it('works', function(){
const renderer = createRenderer()
renderer.render(React.createElement(Tetris))
const output = renderer.getRenderOutput()
output.should.equalJSX(<Board/>)
})
})
|
<gh_stars>10-100
interface Element {
msMatchesSelector: (selectors: string) => boolean;
}
|
<filename>src/controllers/Config.js
const { get } = require('lodash');
const core = require('cyberway-core-service');
const { Basic } = core.controllers;
const { isLowerThan } = require('../utils/versions');
const { getGlobalSettings } = require('../utils/settings');
const minimalVersions = {
app: {
ios: '1.0.0',
android: '1.0.0',
},
};
class Config extends Basic {
async getConfig({}, {}, { platform, version, clientType, deviceType }) {
const minVersion = get(minimalVersions, [clientType, platform]);
if (minVersion && isLowerThan(version, minVersion)) {
throw {
code: 5000,
message: 'Need update application version',
};
}
const globalSettings = await getGlobalSettings();
return {
features: {
ftueCommunityBunus: true,
},
domain: 'https://commun.com',
ftueCommunityBonus: 10,
isMaintenance: false,
// params above can be overriden by globalSettings
...globalSettings,
};
}
}
module.exports = Config;
|
<filename>src/store/modules/minder.js<gh_stars>0
const state = {
count: 2,
minder: {},
editor: {},
working: {
editing: false,
saving: false,
draging: false
},
callbackQueue: [],
config: {
// 右侧面板最小宽度
ctrlPanelMin: 250,
// 右侧面板宽度
ctrlPanelWidth: parseInt(window.localStorage.__dev_minder_ctrlPanelWidth) || 250,
// 分割线宽度
dividerWidth: 3,
// 默认语言
defaultLang: 'zh-cn',
// 放大缩小比例
zoom: [
10,
20,
30,
50,
80,
100,
120,
150,
200
]
}
}
const mutations = {
changeDrag(state, bool) {
state.working.draging = bool
},
setMinder(state, data) {
state.minder = data
},
setEditor(state, data) {
state.editor = data
},
changeSave(state, bool) {
state.working.saving = bool
},
changeCount(state) {
state.count++
},
increment(state) {
state.count++
},
decrement(state) {
state.count--
},
registerEvent(state, callback) {
state.callbackQueue.push(callback)
},
setConfig(state) {
var supported = Object.keys(state.config)
var configObj = {}
// 支持全配置
if (typeof key === 'object') {
configObj = key
} else {
configObj[key] = value
}
for (var i in configObj) {
if (configObj.hasOwnProperty(i) && supported.indexOf(i) !== -1) {
state.config[i] = configObj[i]
} else {
console.error('Unsupported config key: ', key, ', please choose in :', supported.join(', '))
return false
}
}
return true
}
}
const actions = {
changeCount: ({
commit
}) => commit('changeCount'),
increment: ({
commit
}) => commit('increment'),
decrement: ({
commit
}) => commit('decrement'),
incrementIfOdd({
commit,
state
}) {
if ((state.count + 1) % 2 === 0) {
commit('increment')
}
},
incrementAsync({
commit
}) {
return new Promise((resolve, reject) => {
setTimeout(() => {
commit('increment')
resolve()
}, 1000)
})
},
setConfig: ({
commit
}) => commit('setConfig'),
registerEvent: ({
commit
}) => commit('registerEvent', callback),
executeCallback({
commit,
state
}) {
state.callbackQueue.forEach(function(ele) {
ele.apply(this, arguments)
})
},
isQuotaExceeded(e) {
var quotaExceeded = false
if (e) {
if (e.code) {
switch (e.code) {
case 22:
quotaExceeded = true
break
case 1014:
// Firefox
if (e.name === 'NS_ERROR_DOM_QUOTA_REACHED') {
quotaExceeded = true
}
break
}
} else if (e.number === -2147024882) {
// Internet Explorer 8
quotaExceeded = true
}
}
return quotaExceeded
},
getMemory({
commit,
state
}, key) {
var value = window.localStorage.getItem(key)
var result = null || JSON.parse(value)
return result
},
setMemory({
commit,
state
}, data) {
try {
window.localStorage.setItem(data.key, JSON.stringify(data.value))
return true
} catch (e) {
if (this.isQuotaExceeded(e)) {
return false
}
}
},
removeMemory(key) {
var value = window.localStorage.getItem(key)
window.localStorage.removeItem(key)
return value
},
clearMemory() {
window.localStorage.clear()
}
}
export default {
namespaced: true,
state,
mutations,
actions
}
|
import { InputType, Int, Float, Field } from '@nestjs/graphql';
import { PageSearchDto } from 'src/shared/entities/pageinfo.entity';
@InputType({
description: '키워드 기반으로 spot을 검색합니다. ',
})
export class SearchSpotDto extends PageSearchDto {
@Field(() => String, { description: '비워질 경우 필터링 없이 검색됩니다.', nullable: true })
keyword?: string;
}
@InputType({
description: '주어진 x,y 기준으로 근처 spot을 검색합니다.',
})
export class SearchNearSpotDto extends PageSearchDto {
@Field(() => String, { description: '비워질 경우 x,y 기준으로만 검색됩니다.', nullable: true })
keyword?: string;
@Field(() => Float, { nullable: true })
x: number;
@Field(() => Float, { nullable: true })
y: number;
@Field(() => Int, {
description:
'단위 meter, 0~20000 사이의 값으로 중심 좌표부터의 반경거리. 특정 지역을 중심으로 검색하려고 할 경우 중심좌표로 쓰일 x,y와 함께 사용.',
nullable: true,
defaultValue: 1000,
})
radius?: number;
}
|
<reponame>andrewwatson/machine
/*
* Copyright 2014 VMware, Inc. All rights reserved. Licensed under the Apache v2 License.
*/
package govcloudair
var orgvdcnetExample = `
<?xml version="1.0" encoding="UTF-8"?>
<OrgVdcNetwork xmlns="http://www.vmware.com/vcloud/v1.5" status="1" name="networkName" id="urn:vcloud:network:cb0f4c9e-1a46-49d4-9fcb-d228000a6bc1" href="http://localhost:4444/api/network/cb0f4c9e-1a46-49d4-9fcb-d228000a6bc1" type="application/vnd.vmware.vcloud.orgVdcNetwork+xml" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.vmware.com/vcloud/v1.5 http://10.6.32.3/api/v1.5/schema/master.xsd">
<Link rel="up" href="http://localhost:4444/api/vdc/214cd6b2-3f7a-4ee5-9b0a-52b4001a4a84" type="application/vnd.vmware.vcloud.vdc+xml"/>
<Link rel="down" href="http://localhost:4444/api/network/cb0f4c9e-1a46-49d4-9fcb-d228000a6bc1/metadata" type="application/vnd.vmware.vcloud.metadata+xml"/>
<Link rel="down" href="http://localhost:4444/api/network/cb0f4c9e-1a46-49d4-9fcb-d228000a6bc1/allocatedAddresses/" type="application/vnd.vmware.vcloud.allocatedNetworkAddress+xml"/>
<Description>This routed network was created with Create VDC.</Description>
<Configuration>
<IpScopes>
<IpScope>
<IsInherited>false</IsInherited>
<Gateway>192.168.109.1</Gateway>
<Netmask>255.255.255.0</Netmask>
<IsEnabled>true</IsEnabled>
<IpRanges>
<IpRange>
<StartAddress>192.168.109.2</StartAddress>
<EndAddress>192.168.109.100</EndAddress>
</IpRange>
</IpRanges>
</IpScope>
</IpScopes>
<FenceMode>natRouted</FenceMode>
<RetainNetInfoAcrossDeployments>false</RetainNetInfoAcrossDeployments>
</Configuration>
<IsShared>false</IsShared>
</OrgVdcNetwork>
`
|
#!/bin/bash
/usr/bin/update-rancher-ssl
METADATA_SERVER=${METADATA_SERVER:-localhost}
RANCHER_METADATA_ANSWER=${RANCHER_METADATA_ANSWER:-169.254.169.250}
NEVER_RECURSE_TO=${NEVER_RECURSE_TO:-169.254.169.250}
AGENT_IP=
load_agent_ip() {
# loop until metadata is available
while [ "$AGENT_IP" == "" ] || [ "$AGENT_IP" == "Not found" ]; do
AGENT_IP=$(curl -s $METADATA_SERVER/2016-07-29/self/host/agent_ip)
sleep 1
done
}
if [ "$RANCHER_METADATA_ANSWER" == "agent_ip" ]; then
load_agent_ip
RANCHER_METADATA_ANSWER=$AGENT_IP
fi
if [ "$NEVER_RECURSE_TO" == "agent_ip" ]; then
load_agent_ip
NEVER_RECURSE_TO=$AGENT_IP
fi
exec $@ \
-rancher-metadata-answer="$RANCHER_METADATA_ANSWER" \
-never-recurse-to="$NEVER_RECURSE_TO"
|
SELECT *
FROM users
WHERE email = 'jane.doe@gmail.com'; |
// Copyright (c) 2022 <NAME>. All Rights Reserved.
// https://github.com/cinar/indicatorts
import { deepStrictEqual } from 'assert';
import { roundDigitsAll } from '../../helper/numArray';
import { chaikinOscillator } from './chaikinOscillator';
describe('Chaikin Oscillator', () => {
it('should be able to compute co', () => {
const highs = [10, 11, 12, 13, 14, 15, 16, 17];
const lows = [1, 2, 3, 4, 5, 6, 7, 8];
const closings = [5, 6, 7, 8, 9, 10, 11, 12];
const volumes = [100, 200, 300, 400, 500, 600, 700, 800];
const expected = [0, -7.41, -18.52, -31.69, -46.09, -61.27, -76.95, -92.97];
const actual = chaikinOscillator(2, 5, highs, lows, closings, volumes);
deepStrictEqual(roundDigitsAll(2, actual.co), expected);
});
});
|
import os
import argparse
def process_template_files(srcdir, prefix, var, debug):
if debug:
print("Debug: Processing started")
for filename in os.listdir(srcdir):
if filename.endswith(".template"): # Assuming template files have .template extension
with open(os.path.join(srcdir, filename), 'r') as file:
content = file.read()
modified_content = content.replace('<VAR>', var) # Replace <VAR> with the user-defined value
new_filename = prefix + filename.replace('.template', '') # Construct new filename
with open(os.path.join(srcdir, new_filename), 'w') as new_file:
new_file.write(modified_content)
if debug:
print("Debug: Processing finished")
# Command-line argument parsing
parser = argparse.ArgumentParser(description='Process template files with user-defined variables')
parser.add_argument('--srcdir', '-r', help='force source dir to be this')
parser.add_argument('--prefix', '-p', default='', help='prefix for filename')
parser.add_argument('--var', '-v', default='', help='define a template variable')
parser.add_argument('--debug', action='store_true', help='enable debug mode')
args = parser.parse_args()
# Call the function with command-line arguments
process_template_files(args.srcdir, args.prefix, args.var, args.debug) |
#!/bin/sh
#
# Copyright 2013-2018 Guardtime, Inc.
#
# This file is part of the Guardtime client SDK.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES, CONDITIONS, OR OTHER LICENSES OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
# "Guardtime" and "KSI" are trademarks or registered trademarks of
# Guardtime, Inc., and no license to trademarks is granted; Guardtime
# reserves and retains all trademark rights.
./rebuild.sh --build-rpm $* |
<filename>src/components/Core/Tabs/index.ts
export { default } from './Tabs';
export { default as Tab } from './Tabs.tab';
export * from './Tabs.interface';
export * from './Tabs.constant';
|
<filename>java/ql/test/query-tests/NonSerializableInnerClass/NonSerializableInnerClassTest.java
import java.io.Serializable;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.ObjectOutput;
import java.io.ObjectInput;
public class NonSerializableInnerClassTest {
public static class S implements Serializable{}
public int field;
public static class Outer1{
public class Problematic1 implements Serializable{ }
public class Problematic2 extends S{ }
@SuppressWarnings("serial")
public class Ok1 implements Serializable{ }
public class Ok2 extends S{
private void readObject(ObjectInputStream oos){}
private void writeObject(ObjectOutputStream oos){}
}
public class Ok3 extends S{
private void writeObject(ObjectOutputStream oos){}
}
public static class Ok4 extends S{ }
public class Ok5 { }
// in static contexts enclosing instances don't exist!
static{
Serializable ok6 = new Serializable(){ };
}
public static Serializable ok7 = new Serializable(){ };
public static void staticMethod(){
Serializable ok8 = new Serializable(){ };
}
}
public static class Outer2 extends S {
public class Ok9 implements Serializable{ }
}
public class Problematic3 extends S {
public class Problematic4 implements Serializable{ } // because NonSerializableInnerClassTest is not serializable
}
// we currently ignore anonymous classes
public void instanceMethod(){
Serializable ok_ish1 = new Serializable(){
public void test(){
Serializable ok_ish2 = new Serializable(){
public void test(){
field = 5;
}
};
}
};
}
// the class is not used anywhere, but the serialVersionUID field is an indicator for later serialization
private class Problematic7 implements Serializable{
public static final long serialVersionUID = 123;
}
// the class is not used anywhere
private class Ok10 implements Serializable{ }
// instantiations of this class are only assigned to non-serializable variables
private class Ok11 implements Serializable{ }
public void test(){
Object o = new Ok11();
System.out.println(new Ok11());
}
}
|
<gh_stars>10-100
import mixin from '../utils/mixin';
/**
* Represents a node in a LinkedList.
* Initializes a new instance of the LinkedListNode class, containing the specified value.
* @param {Object} value The value to contain in the LinkedListNode
*/
export default function LinkedListNode(value) {
this._value = value;
this._list = null;
this._next = null;
this._prev = null;
}
mixin(LinkedListNode, {
/**
* Gets the value contained in the node.
* @returns {Object}
*/
value: function () {
return this._value;
},
/**
* Gets the LinkedList that the LinkedListNode belongs to.
* @returns {LinkedList}
*/
list: function () {
return this._list;
},
/**
* Gets the next node in the LinkedList.
* @returns {LinkedListNode}
*/
next: function () {
return this._next === null || this._next === this._list.head ? null : this._next;
},
/**
* Gets the previous node in the LinkedList.
* @returns {LinkedListNode}
*/
previous: function () {
return this._prev === null || this === this._list.head ? null : this._prev;
},
toString: function () {
return '[LinkedList Node]';
}
});
|
<gh_stars>1-10
package main
import "log"
import "flag"
import "time"
import "strings"
import "net"
import "net/http"
import "io/ioutil"
import "strconv"
import "reflect"
import "encoding/json"
import "github.com/streadway/amqp"
import "github.com/marpaia/graphite-golang"
func rabbitmqConnect(uri string, queueName string) (queueConnection *amqp.Connection, queueChannel *amqp.Channel, err error) {
queueConnection, err = amqp.Dial(uri)
if err == nil {
queueChannel, err = queueConnection.Channel();
}
return
}
func nonFatalError(msg string, err error, pauseMsec int) bool {
if err == nil {
return false
}
log.Printf("non-fatal error - %s: %s", msg, err)
time.Sleep(time.Millisecond * time.Duration(pauseMsec))
return true
}
func fetchUrl(requestUrl string) (body []byte, statusCode int, err error) {
resp, err := http.Get(requestUrl)
if err != nil {
return
}
defer resp.Body.Close()
statusCode = resp.StatusCode
body, err = ioutil.ReadAll(resp.Body)
return
}
func findObject(query string, obj interface{}) (item interface{}) {
if reflect.ValueOf(obj).Kind() != reflect.Map {
return
}
i := strings.Index(query, ".")
objMap := obj.(map[string]interface{})
if i == -1 {
item, _ = objMap[query]
} else {
item = findObject(query[i+1:], objMap[query[:i]])
}
return
}
func findNumber(query string, obj interface{}) (result float64) {
item := findObject(query, obj)
if item != nil {
result = item.(float64)
}
return
}
func findString(query string, obj interface{}) (result string) {
item := findObject(query, obj)
if item != nil {
result = item.(string)
}
return
}
func fetchQueueMetrics(mgmtUri string, prefix string) (metrics []graphite.Metric) {
url := mgmtUri + "/api/queues"
response, statusCode, err := fetchUrl(url)
if err != nil || statusCode != 200 {
log.Printf("error fetch rabbiqmq queues: %d - %s", statusCode, err)
return
}
var stats []interface{}
json.Unmarshal(response, &stats)
for _, stat := range stats {
name := findString("name", stat)
if name == "" {
continue
}
rate_publish := findNumber("message_stats.publish_details.rate", stat)
rate_get := findNumber("message_stats.deliver_get_details.rate", stat)
rate_noack := findNumber("message_stats.deliver_no_ack_details.rate", stat)
msg_ready := findNumber("messages_ready", stat)
msg_unack := findNumber("messages_unacknowledged", stat)
metric := graphite.Metric{Name:prefix+"queue."+name+".rate_publish",
Value:strconv.Itoa(int(rate_publish)),Timestamp:time.Now().Unix()}
metrics = append(metrics, metric)
metric = graphite.Metric{Name:prefix+"queue."+name+".rate_get",
Value:strconv.Itoa(int(rate_get)),Timestamp:time.Now().Unix()}
metrics = append(metrics, metric)
metric = graphite.Metric{Name:prefix+"queue."+name+".rate_noack",
Value:strconv.Itoa(int(rate_noack)),Timestamp:time.Now().Unix()}
metrics = append(metrics, metric)
metric = graphite.Metric{Name:prefix+"queue."+name+".msg_ready",
Value:strconv.Itoa(int(msg_ready)),Timestamp:time.Now().Unix()}
metrics = append(metrics, metric)
metric = graphite.Metric{Name:prefix+"queue."+name+".msg_unack",
Value:strconv.Itoa(int(msg_unack)),Timestamp:time.Now().Unix()}
metrics = append(metrics, metric)
}
return
}
func fetchExchangeMetrics(mgmtUri string, prefix string) (metrics []graphite.Metric) {
url := mgmtUri + "/api/exchanges"
response, statusCode, err := fetchUrl(url)
if err != nil || statusCode != 200 {
log.Printf("error fetch rabbiqmq queues: %d - %s", statusCode, err)
return
}
var stats []interface{}
json.Unmarshal(response, &stats)
for _, stat := range stats {
name := findString("name", stat)
if name == "" {
continue
}
rate_in := findNumber("message_stats.publish_in_details.rate", stat)
rate_out := findNumber("message_stats.publish_out_details.rate", stat)
metric := graphite.Metric{Name:prefix+"exchange."+name+".rate_in",
Value:strconv.Itoa(int(rate_in)),
Timestamp:time.Now().Unix()}
metrics = append(metrics, metric)
metric = graphite.Metric{Name:prefix+"exchange."+name+".rate_out",
Value:strconv.Itoa(int(rate_out)),
Timestamp:time.Now().Unix()}
metrics = append(metrics, metric)
}
return
}
func monitoring(uri string, queueName string, mgmtUri string, prefix string) {
var (
queueConn *amqp.Connection
queueChan *amqp.Channel
err error
)
queueConn, queueChan, err = rabbitmqConnect(uri, queueName)
if err != nil {
return
}
for {
log.Printf("fetch rabbitmq stats")
var metrics []graphite.Metric
for _, metric := range fetchQueueMetrics(mgmtUri, prefix) {
metrics = append(metrics, metric)
}
for _, metric := range fetchExchangeMetrics(mgmtUri, prefix) {
metrics = append(metrics, metric)
}
for _, metric := range metrics {
body := []byte( metric.Name+"\t"+metric.Value+"\t"+strconv.FormatInt(metric.Timestamp, 10))
msg := amqp.Publishing{ContentType:"text/plain",Body:body}
err = queueChan.Publish("", queueName, false, false, msg)
if err != nil {
log.Printf("publish err: %s", err)
return
}
//log.Printf("metric\t%s\t\t%s", metric.Name, metric.Value)
}
time.Sleep(time.Second * 5)
}
queueChan.Close()
queueConn.Close()
}
func metricListen(uri string, queueName string, graphiteHost string, graphitePort int) (err error) {
queueConn, queueChan, err := rabbitmqConnect(uri, queueName)
if nonFatalError("can't connect to rabbitmq", err, 5000) {
return
}
defer queueConn.Close()
defer queueChan.Close()
msgs, err := queueChan.Consume(queueName, "", true, false, false, false, nil)
if err != nil {
return
}
graphiteConn, err := graphite.NewGraphite(graphiteHost, graphitePort)
if err != nil {
return
}
for msg := range msgs {
data := strings.Split(string(msg.Body), "\t")
timestamp, _ := strconv.ParseInt(data[2], 10, 64)
//log.Printf("metric: %s = %s", data[0], data[1])
metric := graphite.Metric{Name:data[0],Value:data[1],Timestamp:timestamp}
err = graphiteConn.SendMetric(metric)
if err != nil {
return
}
}
return
}
func main() {
log.Printf("Welcome to rabbitmq-graphite-tool")
var (
queue string
uri string
mgmtUri string
graphite string
prefix string
err error
)
flag.StringVar(&queue,
"rabbitmq-queue", "graphite", "incoming queue name for graphite metrics")
flag.StringVar(&uri,
"rabbitmq-uri", "amqp://guest:guest@localhost:5672", "rabbitmq connection uri")
flag.StringVar(&mgmtUri,
"rabbitmq-mgmt-uri",
"http://guest:guest@localhost:15672", "rabbitmq managment plugin address host:port")
flag.StringVar(&graphite,
"graphite", "localhost:2003", "graphite server address host:port")
flag.StringVar(&prefix,
"prefix", "rabbitmq.node01.", "prefix for rabbitmq monitoring in graphite")
flag.Parse()
log.Printf("rabbitmq-queue: %s", queue)
log.Printf("rabbitmq-uri: %s", uri)
log.Printf("rabbitmq-mgmt-uri: %s", mgmtUri)
log.Printf("graphite-addr: %s", graphite)
log.Printf("prefix: %s", prefix)
graphiteHost, _graphitePort, err := net.SplitHostPort(graphite)
if err != nil {
log.Fatalf("can't parse graphite host:port: %s", graphite)
return
}
graphitePort, _ := strconv.Atoi(_graphitePort)
go func () {
for {
log.Printf("start monitoring")
monitoring(uri, queue, mgmtUri, prefix)
time.Sleep(time.Second)
}
}()
for {
err = metricListen(uri, queue, graphiteHost, graphitePort)
if err != nil {
log.Printf("err: %s", err)
time.Sleep(time.Second)
}
}
}
|
<gh_stars>10-100
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vhudson-jaxb-ri-2.2-147
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2010.01.26 at 02:04:22 PM MST
//
package net.opengis.wfs._110;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.annotation.XmlElementDecl;
import javax.xml.bind.annotation.XmlRegistry;
import javax.xml.namespace.QName;
/**
* This object contains factory methods for each
* Java content interface and Java element interface
* generated in the net.opengis.wfs._110 package.
* <p>An ObjectFactory allows you to programatically
* construct new instances of the Java representation
* for XML content. The Java representation of XML
* content can consist of schema derived interfaces
* and classes representing the binding of schema
* type definitions, element declarations and model
* groups. Factory methods for each of these are
* provided in this class.
*
*/
@XmlRegistry
public class ObjectFactory {
private final static QName _GetFeature_QNAME = new QName("http://www.opengis.net/wfs", "GetFeature");
private final static QName _Property_QNAME = new QName("http://www.opengis.net/wfs", "Property");
private final static QName _DescribeFeatureType_QNAME = new QName("http://www.opengis.net/wfs", "DescribeFeatureType");
private final static QName _FeatureTypeList_QNAME = new QName("http://www.opengis.net/wfs", "FeatureTypeList");
private final static QName _LockFeature_QNAME = new QName("http://www.opengis.net/wfs", "LockFeature");
private final static QName _ServesGMLObjectTypeList_QNAME = new QName("http://www.opengis.net/wfs", "ServesGMLObjectTypeList");
private final static QName _Update_QNAME = new QName("http://www.opengis.net/wfs", "Update");
private final static QName _PropertyName_QNAME = new QName("http://www.opengis.net/wfs", "PropertyName");
private final static QName _TransactionResponse_QNAME = new QName("http://www.opengis.net/wfs", "TransactionResponse");
private final static QName _GetGmlObject_QNAME = new QName("http://www.opengis.net/wfs", "GetGmlObject");
private final static QName _GetCapabilities_QNAME = new QName("http://www.opengis.net/wfs", "GetCapabilities");
private final static QName _Native_QNAME = new QName("http://www.opengis.net/wfs", "Native");
private final static QName _WFSCapabilities_QNAME = new QName("http://www.opengis.net/wfs", "WFS_Capabilities");
private final static QName _LockFeatureResponse_QNAME = new QName("http://www.opengis.net/wfs", "LockFeatureResponse");
private final static QName _LockId_QNAME = new QName("http://www.opengis.net/wfs", "LockId");
private final static QName _SupportsGMLObjectTypeList_QNAME = new QName("http://www.opengis.net/wfs", "SupportsGMLObjectTypeList");
private final static QName _Insert_QNAME = new QName("http://www.opengis.net/wfs", "Insert");
private final static QName _Transaction_QNAME = new QName("http://www.opengis.net/wfs", "Transaction");
private final static QName _Query_QNAME = new QName("http://www.opengis.net/wfs", "Query");
private final static QName _GetFeatureWithLock_QNAME = new QName("http://www.opengis.net/wfs", "GetFeatureWithLock");
private final static QName _Delete_QNAME = new QName("http://www.opengis.net/wfs", "Delete");
private final static QName _FeatureCollection_QNAME = new QName("http://www.opengis.net/wfs", "FeatureCollection");
/**
* Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: net.opengis.wfs._110
*
*/
public ObjectFactory() {
}
/**
* Create an instance of {@link LockFeatureType }
*
*/
public LockFeatureType createLockFeatureType() {
return new LockFeatureType();
}
/**
* Create an instance of {@link GetGmlObjectType }
*
*/
public GetGmlObjectType createGetGmlObjectType() {
return new GetGmlObjectType();
}
/**
* Create an instance of {@link DescribeFeatureTypeType }
*
*/
public DescribeFeatureTypeType createDescribeFeatureTypeType() {
return new DescribeFeatureTypeType();
}
/**
* Create an instance of {@link OutputFormatListType }
*
*/
public OutputFormatListType createOutputFormatListType() {
return new OutputFormatListType();
}
/**
* Create an instance of {@link PropertyType }
*
*/
public PropertyType createPropertyType() {
return new PropertyType();
}
/**
* Create an instance of {@link MetadataURLType }
*
*/
public MetadataURLType createMetadataURLType() {
return new MetadataURLType();
}
/**
* Create an instance of {@link LockFeatureResponseType }
*
*/
public LockFeatureResponseType createLockFeatureResponseType() {
return new LockFeatureResponseType();
}
/**
* Create an instance of {@link NativeType }
*
*/
public NativeType createNativeType() {
return new NativeType();
}
/**
* Create an instance of {@link TransactionResponseType }
*
*/
public TransactionResponseType createTransactionResponseType() {
return new TransactionResponseType();
}
/**
* Create an instance of {@link QueryType }
*
*/
public QueryType createQueryType() {
return new QueryType();
}
/**
* Create an instance of {@link DeleteElementType }
*
*/
public DeleteElementType createDeleteElementType() {
return new DeleteElementType();
}
/**
* Create an instance of {@link TransactionResultsType }
*
*/
public TransactionResultsType createTransactionResultsType() {
return new TransactionResultsType();
}
/**
* Create an instance of {@link GetFeatureWithLockType }
*
*/
public GetFeatureWithLockType createGetFeatureWithLockType() {
return new GetFeatureWithLockType();
}
/**
* Create an instance of {@link ActionType }
*
*/
public ActionType createActionType() {
return new ActionType();
}
/**
* Create an instance of {@link InsertElementType }
*
*/
public InsertElementType createInsertElementType() {
return new InsertElementType();
}
/**
* Create an instance of {@link GetCapabilitiesType }
*
*/
public GetCapabilitiesType createGetCapabilitiesType() {
return new GetCapabilitiesType();
}
/**
* Create an instance of {@link FeaturesNotLockedType }
*
*/
public FeaturesNotLockedType createFeaturesNotLockedType() {
return new FeaturesNotLockedType();
}
/**
* Create an instance of {@link InsertResultsType }
*
*/
public InsertResultsType createInsertResultsType() {
return new InsertResultsType();
}
/**
* Create an instance of {@link FeatureCollectionType }
*
*/
public FeatureCollectionType createFeatureCollectionType() {
return new FeatureCollectionType();
}
/**
* Create an instance of {@link GetFeatureType }
*
*/
public GetFeatureType createGetFeatureType() {
return new GetFeatureType();
}
/**
* Create an instance of {@link GMLObjectTypeListType }
*
*/
public GMLObjectTypeListType createGMLObjectTypeListType() {
return new GMLObjectTypeListType();
}
/**
* Create an instance of {@link OperationsType }
*
*/
public OperationsType createOperationsType() {
return new OperationsType();
}
/**
* Create an instance of {@link XlinkPropertyName }
*
*/
public XlinkPropertyName createXlinkPropertyName() {
return new XlinkPropertyName();
}
/**
* Create an instance of {@link GMLObjectTypeType }
*
*/
public GMLObjectTypeType createGMLObjectTypeType() {
return new GMLObjectTypeType();
}
/**
* Create an instance of {@link UpdateElementType }
*
*/
public UpdateElementType createUpdateElementType() {
return new UpdateElementType();
}
/**
* Create an instance of {@link TransactionSummaryType }
*
*/
public TransactionSummaryType createTransactionSummaryType() {
return new TransactionSummaryType();
}
/**
* Create an instance of {@link WFSCapabilitiesType }
*
*/
public WFSCapabilitiesType createWFSCapabilitiesType() {
return new WFSCapabilitiesType();
}
/**
* Create an instance of {@link FeatureTypeType.NoSRS }
*
*/
public FeatureTypeType.NoSRS createFeatureTypeTypeNoSRS() {
return new FeatureTypeType.NoSRS();
}
/**
* Create an instance of {@link FeatureTypeType }
*
*/
public FeatureTypeType createFeatureTypeType() {
return new FeatureTypeType();
}
/**
* Create an instance of {@link FeaturesLockedType }
*
*/
public FeaturesLockedType createFeaturesLockedType() {
return new FeaturesLockedType();
}
/**
* Create an instance of {@link TransactionType }
*
*/
public TransactionType createTransactionType() {
return new TransactionType();
}
/**
* Create an instance of {@link FeatureTypeListType }
*
*/
public FeatureTypeListType createFeatureTypeListType() {
return new FeatureTypeListType();
}
/**
* Create an instance of {@link LockType }
*
*/
public LockType createLockType() {
return new LockType();
}
/**
* Create an instance of {@link InsertedFeatureType }
*
*/
public InsertedFeatureType createInsertedFeatureType() {
return new InsertedFeatureType();
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link GetFeatureType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "GetFeature")
public JAXBElement<GetFeatureType> createGetFeature(GetFeatureType value) {
return new JAXBElement<GetFeatureType>(_GetFeature_QNAME, GetFeatureType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link PropertyType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "Property")
public JAXBElement<PropertyType> createProperty(PropertyType value) {
return new JAXBElement<PropertyType>(_Property_QNAME, PropertyType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link DescribeFeatureTypeType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "DescribeFeatureType")
public JAXBElement<DescribeFeatureTypeType> createDescribeFeatureType(DescribeFeatureTypeType value) {
return new JAXBElement<DescribeFeatureTypeType>(_DescribeFeatureType_QNAME, DescribeFeatureTypeType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link FeatureTypeListType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "FeatureTypeList")
public JAXBElement<FeatureTypeListType> createFeatureTypeList(FeatureTypeListType value) {
return new JAXBElement<FeatureTypeListType>(_FeatureTypeList_QNAME, FeatureTypeListType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link LockFeatureType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "LockFeature")
public JAXBElement<LockFeatureType> createLockFeature(LockFeatureType value) {
return new JAXBElement<LockFeatureType>(_LockFeature_QNAME, LockFeatureType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link GMLObjectTypeListType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "ServesGMLObjectTypeList")
public JAXBElement<GMLObjectTypeListType> createServesGMLObjectTypeList(GMLObjectTypeListType value) {
return new JAXBElement<GMLObjectTypeListType>(_ServesGMLObjectTypeList_QNAME, GMLObjectTypeListType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link UpdateElementType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "Update")
public JAXBElement<UpdateElementType> createUpdate(UpdateElementType value) {
return new JAXBElement<UpdateElementType>(_Update_QNAME, UpdateElementType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "PropertyName")
public JAXBElement<String> createPropertyName(String value) {
return new JAXBElement<String>(_PropertyName_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link TransactionResponseType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "TransactionResponse")
public JAXBElement<TransactionResponseType> createTransactionResponse(TransactionResponseType value) {
return new JAXBElement<TransactionResponseType>(_TransactionResponse_QNAME, TransactionResponseType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link GetGmlObjectType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "GetGmlObject")
public JAXBElement<GetGmlObjectType> createGetGmlObject(GetGmlObjectType value) {
return new JAXBElement<GetGmlObjectType>(_GetGmlObject_QNAME, GetGmlObjectType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link GetCapabilitiesType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "GetCapabilities")
public JAXBElement<GetCapabilitiesType> createGetCapabilities(GetCapabilitiesType value) {
return new JAXBElement<GetCapabilitiesType>(_GetCapabilities_QNAME, GetCapabilitiesType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link NativeType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "Native")
public JAXBElement<NativeType> createNative(NativeType value) {
return new JAXBElement<NativeType>(_Native_QNAME, NativeType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link WFSCapabilitiesType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "WFS_Capabilities")
public JAXBElement<WFSCapabilitiesType> createWFSCapabilities(WFSCapabilitiesType value) {
return new JAXBElement<WFSCapabilitiesType>(_WFSCapabilities_QNAME, WFSCapabilitiesType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link LockFeatureResponseType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "LockFeatureResponse")
public JAXBElement<LockFeatureResponseType> createLockFeatureResponse(LockFeatureResponseType value) {
return new JAXBElement<LockFeatureResponseType>(_LockFeatureResponse_QNAME, LockFeatureResponseType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "LockId")
public JAXBElement<String> createLockId(String value) {
return new JAXBElement<String>(_LockId_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link GMLObjectTypeListType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "SupportsGMLObjectTypeList")
public JAXBElement<GMLObjectTypeListType> createSupportsGMLObjectTypeList(GMLObjectTypeListType value) {
return new JAXBElement<GMLObjectTypeListType>(_SupportsGMLObjectTypeList_QNAME, GMLObjectTypeListType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link InsertElementType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "Insert")
public JAXBElement<InsertElementType> createInsert(InsertElementType value) {
return new JAXBElement<InsertElementType>(_Insert_QNAME, InsertElementType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link TransactionType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "Transaction")
public JAXBElement<TransactionType> createTransaction(TransactionType value) {
return new JAXBElement<TransactionType>(_Transaction_QNAME, TransactionType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link QueryType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "Query")
public JAXBElement<QueryType> createQuery(QueryType value) {
return new JAXBElement<QueryType>(_Query_QNAME, QueryType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link GetFeatureWithLockType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "GetFeatureWithLock")
public JAXBElement<GetFeatureWithLockType> createGetFeatureWithLock(GetFeatureWithLockType value) {
return new JAXBElement<GetFeatureWithLockType>(_GetFeatureWithLock_QNAME, GetFeatureWithLockType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link DeleteElementType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "Delete")
public JAXBElement<DeleteElementType> createDelete(DeleteElementType value) {
return new JAXBElement<DeleteElementType>(_Delete_QNAME, DeleteElementType.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link FeatureCollectionType }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://www.opengis.net/wfs", name = "FeatureCollection", substitutionHeadNamespace = "http://www.opengis.net/gml", substitutionHeadName = "_FeatureCollection")
public JAXBElement<FeatureCollectionType> createFeatureCollection(FeatureCollectionType value) {
return new JAXBElement<FeatureCollectionType>(_FeatureCollection_QNAME, FeatureCollectionType.class, null, value);
}
}
|
<filename>nlp/command/color.py
from nlp.dispatch import Dispatchable
import object_detection.constants as constants
from nlp.transform import position
class DescribeObjectColor(Dispatchable):
color_tmpl = 'The {obj_name} is primarily {color}'
color_fail_tmpl = 'I cannot determine the color'
def __init__(self, state_q):
self.state_q = state_q
def __call__(self, payload):
state = self.state_q.get()
if state:
if type(state[0]) is not constants.ObjectSeries:
vecs = to_object_series_list(state)
else:
vecs = state
center_vecs = list(filter(lambda v: position(v.obj_bbox) is 'center', vecs))
if len(center_vecs) > 0:
obj_vec = center_vecs.pop(0)
else:
obj_vec = vecs.pop(0)
max_color = obj_vec.obj_primary_color
obj_name = obj_vec['category']
print(state)
payload = {
'response': self.color_tmpl.format(obj_name=obj_name, color=max_color)
}
self.send(payload)
return
payload = {
'response': self.color_fail_tmpl
}
self.send(payload)
class DescribeSceneColor(Dispatchable):
# TODO: implement
def get_scene_color(self):
pass
def to_object_series_list(state):
return [constants.ObjectSeries(obj, index=constants.OBJECT_VECTOR_KEYS) for obj in state]
|
from typing import List
def calculate_memory_size(operations: List[str]) -> int:
memory_size = 0
for operation in operations:
load_type = operation.split('(')[0].split('.')[-1] # Extract the type of value to be loaded
if load_type == 'loadInt':
memory_size += 4 # Integer requires 4 bytes of memory
elif load_type == 'loadFloat':
memory_size += 4 # Float requires 4 bytes of memory
# Add more conditions for other load types if needed
return memory_size |
<gh_stars>0
/**
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gravitee.rest.api.service;
import io.gravitee.repository.exceptions.TechnicalException;
import io.gravitee.repository.management.api.PageRevisionRepository;
import io.gravitee.repository.management.model.Page;
import io.gravitee.repository.management.model.PageReferenceType;
import io.gravitee.repository.management.model.PageRevision;
import io.gravitee.rest.api.model.PageRevisionEntity;
import io.gravitee.rest.api.model.PageType;
import io.gravitee.rest.api.service.exceptions.TechnicalManagementException;
import io.gravitee.rest.api.service.impl.PageRevisionServiceImpl;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import java.util.Date;
import java.util.Optional;
import static org.mockito.Mockito.*;
import static org.junit.Assert.*;
/**
* @author <NAME> (eric.leleu at graviteesource.com)
* @author GraviteeSource Team
*/
@RunWith(MockitoJUnitRunner.class)
public class PageRevisionServiceTest {
private static final String PAGE_ID = "ba01aef0-e3da-4499-81ae-f0e3daa4995a";
@InjectMocks
private PageRevisionServiceImpl pageRevisionService = new PageRevisionServiceImpl();
@Mock
private AuditService auditService;
@Mock
private PageRevisionRepository pageRevisionRepository;
@Test
public void shouldCreateRevision() throws TechnicalException {
Date now = new Date();
Page page = mock(Page.class);
when(page.getId()).thenReturn(PAGE_ID);
when(page.getName()).thenReturn("SomeName");
when(page.getContent()).thenReturn("SomeContent");
when(page.getUpdatedAt()).thenReturn(now);
when(page.getLastContributor()).thenReturn("Author");
when(page.getType()).thenReturn(PageType.MARKDOWN.name());
PageRevision newRevision = mock(PageRevision.class);
when(pageRevisionRepository.findLastByPageId(PAGE_ID)).thenReturn(Optional.empty());
when(pageRevisionRepository.create(any())).thenReturn(newRevision);
ArgumentCaptor<PageRevision> newRevisionCaptor = ArgumentCaptor.forClass(PageRevision.class);
PageRevisionEntity createdRevision = pageRevisionService.create(page);
assertNotNull(createdRevision);
verify(pageRevisionRepository).findLastByPageId(PAGE_ID);
verify(pageRevisionRepository).create(newRevisionCaptor.capture());
PageRevision createdRev = newRevisionCaptor.getValue();
assertNotNull(createdRev);
assertEquals(1, createdRev.getRevision());
}
@Test
public void shouldCreateRevision_NewIncrementedRevision() throws TechnicalException {
Date now = new Date();
Page page = mock(Page.class);
when(page.getId()).thenReturn(PAGE_ID);
when(page.getName()).thenReturn("SomeName");
when(page.getContent()).thenReturn("SomeContent");
when(page.getUpdatedAt()).thenReturn(now);
when(page.getLastContributor()).thenReturn("Author");
when(page.getType()).thenReturn(PageType.MARKDOWN.name());
PageRevision lastRevision = mock(PageRevision.class);
when(lastRevision.getRevision()).thenReturn(2);
PageRevision newRevision = mock(PageRevision.class);
when(pageRevisionRepository.findLastByPageId(PAGE_ID)).thenReturn(Optional.of(lastRevision));
when(pageRevisionRepository.create(any())).thenReturn(newRevision);
ArgumentCaptor<PageRevision> newRevisionCaptor = ArgumentCaptor.forClass(PageRevision.class);
PageRevisionEntity createdRevision = pageRevisionService.create(page);
assertNotNull(createdRevision);
verify(pageRevisionRepository).findLastByPageId(PAGE_ID);
verify(pageRevisionRepository).create(newRevisionCaptor.capture());
PageRevision createdRev = newRevisionCaptor.getValue();
assertNotNull(createdRev);
assertEquals(lastRevision.getRevision() + 1, createdRev.getRevision());
}
@Test
public void shouldCreateRevision_PortalPage() throws TechnicalException {
Date now = new Date();
Page page = mock(Page.class);
when(page.getId()).thenReturn(PAGE_ID);
when(page.getName()).thenReturn("SomeName");
when(page.getContent()).thenReturn("SomeContent");
when(page.getUpdatedAt()).thenReturn(now);
when(page.getLastContributor()).thenReturn("Author");
when(page.getType()).thenReturn(PageType.MARKDOWN.name());
PageRevision newRevision = mock(PageRevision.class);
when(pageRevisionRepository.findLastByPageId(PAGE_ID)).thenReturn(Optional.empty());
when(pageRevisionRepository.create(any())).thenReturn(newRevision);
ArgumentCaptor<PageRevision> newRevisionCaptor = ArgumentCaptor.forClass(PageRevision.class);
PageRevisionEntity createdRevision = pageRevisionService.create(page);
assertNotNull(createdRevision);
verify(pageRevisionRepository).findLastByPageId(PAGE_ID);
verify(pageRevisionRepository).create(newRevisionCaptor.capture());
PageRevision createdRev = newRevisionCaptor.getValue();
assertNotNull(createdRev);
assertEquals(1, createdRev.getRevision());
}
@Test(expected = TechnicalManagementException.class)
public void shouldNotCreate_Because_InvalidType() throws TechnicalException {
Date now = new Date();
Page page = mock(Page.class);
when(page.getId()).thenReturn(PAGE_ID);
when(page.getType()).thenReturn(PageType.FOLDER.name());
pageRevisionService.create(page);
}
}
|
package com.xxmassdeveloper.mpchartexample;
/**
* @author 闫鹏飞
* create at 2017-11-15 0015
* description: 查询邮储网点信息
*/
public class PieBalance {
private String region;
private String balance;
private String fixed_live_ratio;
public PieBalance() {
}
public PieBalance(String region, String balance, String fixed_live_ratio) {
this.region = region;
this.balance = balance;
this.fixed_live_ratio = fixed_live_ratio;
}
public String getRegion() {
return region;
}
public void setRegion(String region) {
this.region = region;
}
public String getBalance() {
return balance;
}
public void setBalance(String balance) {
this.balance = balance;
}
public String getFixed_live_ratio() {
return fixed_live_ratio;
}
public void setFixed_live_ratio(String fixed_live_ratio) {
this.fixed_live_ratio = fixed_live_ratio;
}
}
|
const express = require('express');
const router = express.Router();
const cheerio = require('cheerio');
const request = require('request');
const phantom = require('phantom');
router.get('/info/:id', (req, res, next) => {
var url = 'http://companyinfo.stock.naver.com/v1/company/c1010001.aspx?cmp_cd=' + req.params.id
request(url, (err, response, html) => {
if(err) {
res.send({});
throw err;
}
(async function() {
const instance = await phantom.create();
const page = await instance.createPage();
await page.on('onResourceRequested', function(requestData) {
console.info('Requesting', requestData.url);
});
const status = await page.open(url);
const content = await page.property('content');
var $ = cheerio.load(content);
var profits = [];
var assets = [];
var headers = [];
for(var i=8;i<16;i++){
profits.push($('table.gHead01 tbody tr td.num span.cBk')[i].children[0].data);
assets.push($('table.gHead01 tbody tr td.num span.cBk')[i + 48].children[0].data);
}
for(var i=3;i<11;i++){
headers.push($('table.gHead01 thead tr th.bg')[i].children[0].data.trim());
}
console.log()
res.send({
name: $('span.name')[0].children[0].data,
code: $('b.num')[0].children[0].data,
per: $('dt.line-left b.num')[1].children[0].data,
per_job: $('dt.line-left b.num')['2'].children[0].data,
wics: $('table.cmp-table tbody tr.cmp-table-row td.cmp-table-cell dl').children('dt.line-left')['2'].children[0].data.split(' : ')[1],
volume: $('td.num')['3'].children[0].data.split('/')[0].trim(),
foreigner_percentage: $('td.num')['7'].children[0].data,
profits: profits,
headers: headers,
assets: assets,
consensus: $('table.gHead.all-width#cTB15 tbody tr td.noline-bottom.line-right.center.cUp b')[0].children[0].data
})
await instance.exit();
})();
})
});
module.exports = router;
|
#!/bin/bash
set -aueo pipefail
# shellcheck disable=SC1091
source .env
echo -e "Deploy bookstore-vm demo service"
kubectl apply -f - <<EOF
apiVersion: v1
kind: Service
metadata:
name: bookstore-vm
namespace: $K8S_NAMESPACE
labels:
app: bookstore-vm
spec:
ports:
- port: 80
targetPort: 80
name: app-port
selector:
app: bookstore-vm
---
apiVersion: osm.osm.k8s.io/v1
kind: AzureResource
metadata:
name: bookstore
namespace: $K8S_NAMESPACE
labels:
app: bookstore-vm
spec:
resourceid: /subscriptions/your-subscription-id/resourceGroups/your-resource-group-name/providers/Microsoft.Compute/virtualMachines/vm-name
EOF
|
#include<iostream>
using namespace std;
string reverseString(string str) {
int n = str.length();
// Swap character starting from two
// corners
for (int i = 0; i < n / 2; i++)
swap(str[i], str[n - i - 1]);
return str;
}
// Driver code
int main()
{
string str = "Geeks For Geeks";
cout << reverseString(str);
return 0;
} |
function makeChart(data,stylename,media,plotpadding,legAlign,yAlign,seriesNames){
console.log(data)
var titleYoffset = d3.select("#"+media+"Title").node().getBBox().height
var subtitleYoffset=d3.select("#"+media+"Subtitle").node().getBBox().height;
//Select the plot space in the frame from which to take measurements
var frame=d3.select("#"+media+"chart")
var plot=d3.select("#"+media+"plot")
var yOffset=d3.select("#"+media+"Subtitle").style("font-size");
yOffset=Number(yOffset.replace(/[^\d.-]/g, ''));
//Get the width,height and the marginins unique to this chart
var w=plot.node().getBBox().width;
var h=plot.node().getBBox().height;
var margin=plotpadding.filter(function(d){
return (d.name === media);
});
margin=margin[0].margin[0]
var colours=stylename.linecolours;
var plotWidth = w-(margin.left+margin.right);
var plotHeight = h-(margin.top+margin.bottom);
//console.log(plotWidth,colours,plotHeight,data)
// console.log(margin)
//you now have a chart area, inner margin data and colour palette - with titles pre-rendered
var innerMargin={left:plotWidth*.15,top:20}
//some globals for chart configuration
var numRows = 5;
var divisor = 1;
var yScale = d3.scale.ordinal()
.domain(data.map(function(d){
return d.date;
}))
.rangeBands([0,plotHeight],0)
var maxValue = d3.max(data.map(function(d){
return d.total;
}))
var maxCols = (maxValue/divisor)/numRows
var colIndex = d3.range(maxCols)
var xDotScale = d3.scale.ordinal()
.domain(colIndex)
.rangeBands([innerMargin.left,plotWidth*0.9],0.9)
var yDotScale = d3.scale.ordinal()
.domain(d3.range(numRows))
.rangeBands([0,yScale.rangeBand()/2])
var stacks = plot.selectAll("g").data(data).enter().append("g")
.attr("transform",function(d){
return "translate(0,"+(innerMargin.top+yScale(d.date))+")";
})
//create circles for the stack
stacks.each(function(d,i){
var circlerange = d3.range(d.total/divisor);
d3.select(this).selectAll("circle").data(circlerange).enter().append("circle")
.attr("r",yDotScale.rangeBand()/2)
.attr("id",function(d){
return "circle"+i+"_"+d;
})
.attr("cx",function(d,i){
return xDotScale(Math.floor(d/numRows))
})
.attr("cy",function(d,i){
return yDotScale(d % numRows)
})
.attr("fill","gray")
})
//colour circles according to source
stacks.each(function(d,i){
var ranges = seriesNames.map(function(e,j){
return d[e]/divisor//number of shapes to be coloured for each group
})
console.log(ranges)
var index = 0;
stackIndex=[0]
seriesNames.forEach(function(obj,k){
if (k>0){
index=index+ranges[k-1];
stackIndex.push(index)
}
})
for (i=0;i<seriesNames.length;i++){
var selecty = d3.select(this).selectAll("circle").filter(function(y,z){
if (i<seriesNames.length-1){
return z>=stackIndex[i]&&z<stackIndex[i+1]
} else {
return z>=stackIndex[i];
}
})
selecty.attr("fill",colours[i])
}
})
stacks.append("text")
.text(function(d){
return d.date
})
.attr("class",media+"title")
.attr("y",innerMargin.top-20)
stacks.append("text")
.text(function(d){
return d.prepack+" prepack"
})
.attr("class",media+"subtitle")
.attr("y",innerMargin.top)
var key = plot.append("g")
.attr("id","key")
.selectAll("g")
.data(seriesNames)
.enter()
.append("g")
key.append("circle")
.attr("r",yDotScale.rangeBand()/2.5)
.attr("fill",function(d,i){
return colours[i]
})
.attr("cx",function(d,i){
return margin.left+(i*70)
})
.attr("cy",h-margin.bottom)
key.append("text")
.text(function(d){
return d;
})
.attr("y",h-margin.bottom+(yDotScale.rangeBand()/2))
.attr("x",function(d,i){
return margin.left+(yDotScale.rangeBand()/1.5)+(i*70)
})
.attr("class",media+"subtitle")
}
|
#!/usr/bin/env bash
set -eu
# check dependencies
(
type docker &>/dev/null || ( echo "docker is not available"; exit 1 )
)>&2
function printMessage {
echo "# ${@}" >&3
}
# Assert that $1 is the output of a command $2
function assert {
local expected_output
local actual_output
expected_output="${1}"
shift
actual_output=$("${@}")
if ! [[ "${actual_output}" = "${expected_output}" ]]; then
printMessage "Expected: '${expected_output}', actual: '${actual_output}'"
false
fi
}
# Retry a command $1 times until it succeeds. Wait $2 seconds between retries.
function retry {
local attempts
local delay
local i
attempts="${1}"
shift
delay="${1}"
shift
for ((i=0; i < attempts; i++)); do
run "${@}"
if [[ "${status}" -eq 0 ]]; then
return 0
fi
sleep "${delay}"
done
printMessage "Command '${*}' failed $attempts times. Status: ${status}. Output: ${output}"
false
}
function get_sut_image {
test -n "${IMAGE:?"[sut_image] Please set the variable 'IMAGE' to the name of the image to test in 'docker-bake.hcl'."}"
## Retrieve the SUT image name from buildx
# Option --print for 'docker buildx bake' prints the JSON configuration on the stdout
# Option --silent for 'make' suppresses the echoing of command so the output is valid JSON
# The image name is the 1st of the "tags" array, on the first "image" found
make --silent show | jq -r ".target.${IMAGE}.tags[0]"
}
function get_dockerfile_directory() {
test -n "${IMAGE:?"[sut_image] Please set the variable 'IMAGE' to the name of the image to test in 'docker-bake.hcl'."}"
DOCKERFILE=$(make --silent show | jq -r ".target.${IMAGE}.dockerfile")
echo "${DOCKERFILE%"/Dockerfile"}"
}
function clean_test_container {
docker kill "${AGENT_CONTAINER}" "${NETCAT_HELPER_CONTAINER}" &>/dev/null || :
docker rm -fv "${AGENT_CONTAINER}" "${NETCAT_HELPER_CONTAINER}" &>/dev/null || :
}
function is_agent_container_running {
local cid="${1}"
sleep 1
retry 3 1 assert "true" docker inspect -f '{{.State.Running}}' "${cid}"
}
function buildNetcatImage() {
if ! docker inspect --type=image netcat-helper:latest &>/dev/null; then
docker build -t netcat-helper:latest tests/netcat-helper/ &>/dev/null
fi
}
function cleanup {
docker kill "$1" &>/dev/null ||:
docker rm -fv "$1" &>/dev/null ||:
}
|
<filename>app/components/canvas/shapes/Line.js<gh_stars>1-10
import CanvasContainer from 'abstract/CanvasContainer'
import SceneMap from 'components/canvas/sceneMap/SceneMap'
export default class Line extends CanvasContainer {
constructor (props) {
super(props);
this.start = props.start
this.end = props.end
this.color = props.color;
this.lineWidth = props.lineWidth || 1;
}
draw () {
super.draw()
const ctx = SceneMap.getCtx()
ctx.beginPath()
ctx.strokeStyle = this.color;
ctx.lineCap = 'butt';
ctx.lineWidth = this.lineWidth
ctx.moveTo(this.start.x, this.start.y)
ctx.lineTo(this.end.x, this.end.y)
ctx.stroke()
ctx.closePath()
}
}
|
#!/bin/sh
# [Gedit Tool]
# Name=[info] compare files (meld)
# Shortcut=<Control><Alt>m
# Applicability=all
# Output=nothing
# Input=nothing
# Save-files=document
# Opens meld to compare two documents
# (depends on meld, zenity)
#
# Save: Current document
# Input: Nothing
# Output: Nothing
#
# by GNOME wiki <http://live.gnome.org/Gedit/ExternalToolsPluginCommands#Comparing_Files>, edited by (you?)
TITLE="Compare With..."
TEXT="Compare $GEDIT_CURRENT_DOCUMENT_NAME with:"
BROWSE=" browse..."
FILE=
while [ -z "$FILE" ]; do
FILE=`zenity --list --title="$TITLE" --text="$TEXT" --width=640 --height=320 --column=Documents $GEDIT_DOCUMENTS_PATH "$BROWSE"`
if [ "$FILE" = "$BROWSE" ]; then
FILE=`zenity --file-selection --title="$TITLE" --filename="$GEDIT_CURRENT_DOCUMENT_DIR/"`
elif [ -z "$FILE" ]; then
exit
fi
done
meld "$GEDIT_CURRENT_DOCUMENT_DIR/$GEDIT_CURRENT_DOCUMENT_NAME" "$FILE" &
|
<filename>datavenue-android-sdk/src/main/java/com/orange/datavenue/client/common/HTTPException.java
/**
* Copyright (C) 2015 Orange
*
* This software is distributed under the terms and conditions of the 'Apache-2.0'
* license which can be found in the file 'LICENSE' in this package distribution
* or at 'http://www.apache.org/licenses/LICENSE-2.0'.
*/
package com.orange.datavenue.client.common;
/**
*
* @author <NAME>
*
*/
public class HTTPException extends Exception {
private int codeErrorHTTP = 0;
private DatavenueError datavenueError = null;
public HTTPException(int code, DatavenueError datavenueError) {
this.codeErrorHTTP = code;
this.datavenueError = datavenueError;
}
public int getCodeErrorHTTP() {
return codeErrorHTTP;
}
public void setCodeErrorHTTP(int code) {
this.codeErrorHTTP = code;
}
public DatavenueError getDatavenueError() {
return datavenueError;
}
public void setDatavenueError(DatavenueError datavenueError) {
this.datavenueError = datavenueError;
}
}
|
#ifndef MYON_WINFONT_H_INCLUDED
#define MYON_WINFONT_H_INCLUDED
#include "fmdsp/font.h"
bool fmdsp_font_win(struct fmdsp_font *font);
#endif // MYON_WINFONT_H_INCLUDED
|
#to install cocoapods: https://guides.cocoapods.org/using/getting-started.html
pod install --repo-update
|
package aula097;
import java.util.Scanner;
public class CondicionalCase {
public static void main(String[] args) {
//switch(minha variavel){
//case1:
//sout("caso1")
//break
//case2:
//sout("caso2")
//break
//}default
Scanner sc= new Scanner(System.in);
int d = sc.nextInt();
String dia;
switch(d){
case 1:
dia="domingo";
break;
case 2:
dia="segunda";
break;
}
sc.close();
}
}
|
<filename>algorithm/1900/1998-gcdSort.go<gh_stars>1-10
//给你一个整数数组 nums ,你可以在 nums 上执行下述操作 任意次 :
//
//
// 如果 gcd(nums[i], nums[j]) > 1 ,交换 nums[i] 和 nums[j] 的位置。其中 gcd(nums[i], nums[
//j]) 是 nums[i] 和 nums[j] 的最大公因数。
//
//
// 如果能使用上述交换方式将 nums 按 非递减顺序 排列,返回 true ;否则,返回 false 。
//
//
//
// 示例 1:
//
// 输入:nums = [7,21,3]
//输出:true
//解释:可以执行下述操作完成对 [7,21,3] 的排序:
//- 交换 7 和 21 因为 gcd(7,21) = 7 。nums = [21,7,3]
//- 交换 21 和 3 因为 gcd(21,3) = 3 。nums = [3,7,21]
//
//
// 示例 2:
//
// 输入:nums = [5,2,6,2]
//输出:false
//解释:无法完成排序,因为 5 不能与其他元素交换。
//
//
// 示例 3:
//
// 输入:nums = [10,5,9,3,15]
//输出:true
//解释:
//可以执行下述操作完成对 [10,5,9,3,15] 的排序:
//- 交换 10 和 15 因为 gcd(10,15) = 5 。nums = [15,5,9,3,10]
//- 交换 15 和 3 因为 gcd(15,3) = 3 。nums = [3,5,9,15,10]
//- 交换 10 和 15 因为 gcd(10,15) = 5 。nums = [3,5,9,10,15]
//
//
//
//
// 提示:
//
//
// 1 <= nums.length <= 3 * 10⁴
// 2 <= nums[i] <= 10⁵
//
// Related Topics 并查集 数组 数学 排序 👍 19 👎 0
package algorithm_1900
func gcdSort(nums []int) (res bool) {
res = true
for i := 1; i < len(nums); i++ {
for j := i; j > 0; j-- {
if nums[j] >= nums[j-1] {
break
}
g := gcd(nums[j], nums[j-1])
if g > 1 {
nums[j], nums[j-1] = nums[j-1], nums[j]
} else {
return false
}
}
}
return
}
func gcd(a, b int) int {
for b != 0 {
a, b = b, a%b
}
return a
}
|
<filename>js/ui/form/ui.form.item_option_action.js
export default class ItemOptionAction {
constructor(options) {
const { item, itemsRunTimeInfo, value } = options;
this.item = item;
this.itemsRunTimeInfo = itemsRunTimeInfo;
this.value = value;
}
getInstance() {
return this.itemsRunTimeInfo.findWidgetInstanceByItem(this.item);
}
getItemContainer() {
return this.itemsRunTimeInfo.findItemContainerByItem(this.item);
}
tryExecute() {
return true;
}
}
|
package procelem.components
import chisel3.Module
import procelem._
import procelem.enums._
import procelem.interfaces._
class RegFileL(config: Config) extends Module {
val io = new LeftIO(1, config)
}
|
#!/bin/bash -eu
# Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
./bootstrap
# FIXME FUZZING_LIBS="$LIB_FUZZING_ENGINE" fails with some missing C++ library, I don't know how to fix this
./configure --disable-shared --disable-pcsc --enable-ctapi --enable-fuzzing FUZZING_LIBS="$LIB_FUZZING_ENGINE"
make -j4
cp src/tests/fuzzing/fuzz_asn1_print $OUT
cp src/tests/fuzzing/fuzz_asn1_sig_value $OUT
cp src/tests/fuzzing/fuzz_pkcs15_decode $OUT
cp src/tests/fuzzing/fuzz_pkcs15_reader $OUT
#cp src/tests/fuzzing/fuzz_pkcs15_reader.options $OUT
|
from .users import (
UserViewSet
)
__all__ = ['UserViewSet']
|
#!/bin/bash
if ! [ -x "$(command -v docker-compose)" ]; then
echo 'Error: docker-compose is not installed.' >&2
exit 1
fi
domains=(codegrow.org www.codegrow.org)
rsa_key_size=4096
data_path="./data/certbot"
email="munikeraragon@gmail.com" # Adding a valid address is strongly recommended
staging=0 # Set to 1 if you're testing your setup to avoid hitting request limits
if [ -d "$data_path" ]; then
read -p "Existing data found for $domains. Continue and replace existing certificate? (y/N) " decision
if [ "$decision" != "Y" ] && [ "$decision" != "y" ]; then
exit
fi
fi
if [ ! -e "$data_path/conf/options-ssl-nginx.conf" ] || [ ! -e "$data_path/conf/ssl-dhparams.pem" ]; then
echo "### Downloading recommended TLS parameters ..."
mkdir -p "$data_path/conf"
curl -s https://raw.githubusercontent.com/certbot/certbot/master/certbot-nginx/certbot_nginx/_internal/tls_configs/options-ssl-nginx.conf > "$data_path/conf/options-ssl-nginx.conf"
curl -s https://raw.githubusercontent.com/certbot/certbot/master/certbot/certbot/ssl-dhparams.pem > "$data_path/conf/ssl-dhparams.pem"
echo
fi
echo "### Creating dummy certificate for $domains ..."
path="/etc/letsencrypt/live/$domains"
mkdir -p "$data_path/conf/live/$domains"
docker-compose -f production.yml run --rm --entrypoint "\
openssl req -x509 -nodes -newkey rsa:$rsa_key_size -days 1\
-keyout '$path/privkey.pem' \
-out '$path/fullchain.pem' \
-subj '/CN=localhost'" certbot
echo
echo "### Starting nginx ..."
docker-compose -f production.yml up --force-recreate -d nginx
echo
echo "### Deleting dummy certificate for $domains ..."
docker-compose -f production.yml run --rm --entrypoint "\
rm -Rf /etc/letsencrypt/live/$domains && \
rm -Rf /etc/letsencrypt/archive/$domains && \
rm -Rf /etc/letsencrypt/renewal/$domains.conf" certbot
echo
echo "### Requesting Let's Encrypt certificate for $domains ..."
#Join $domains to -d args
domain_args=""
for domain in "${domains[@]}"; do
domain_args="$domain_args -d $domain"
done
# Select appropriate email arg
case "$email" in
"") email_arg="--register-unsafely-without-email" ;;
*) email_arg="--email $email" ;;
esac
# Enable staging mode if needed
if [ $staging != "0" ]; then staging_arg="--staging"; fi
docker-compose -f production.yml run --rm --entrypoint "\
certbot certonly --webroot -w /var/www/certbot \
$staging_arg \
$email_arg \
$domain_args \
--rsa-key-size $rsa_key_size \
--agree-tos \
--force-renewal" certbot
echo
echo "### Reloading nginx ..."
docker-compose -f production.yml exec nginx nginx -s reload |
#!/bin/sh
# Run this to generate all the initial makefiles, etc.
test -n "$srcdir" || srcdir=`dirname "$0"`
test -n "$srcdir" || srcdir=.
olddir=`pwd`
cd "$srcdir"
AUTORECONF=`which autoreconf`
if test -z $AUTORECONF; then
echo "*** No autoreconf found, please install it ***"
exit 1
else
ACLOCAL="aclocal $ACLOCAL_FLAGS" autoreconf --force --install || exit $?
fi
cd "$olddir"
test -n "$NOCONFIGURE" || "$srcdir/configure" "$@"
|
import m from 'mithril';
import { FileUpload } from './FileUpload';
export function DropArea() {
return {
view() {
return m('div', { class: 'droparea uk-placeholder uk-text-center' },
m('span', { 'uk-icon': "icon: camera", class: 'uk-margin-small-right' }),
m('span', { class: 'uk-text-middle' }, ' drag/paste new image or '),
m('div', { 'uk-form-custom': true },
m('input', { type: 'file' }),
m(FileUpload),
),
);
},
}
} |
#!/bin/bash
# Used to clean dangling svc, should put this script in the same directory where ctl.py exist
# user should call this script with no args, if provided with 1 arg, it will print svc label if
# the svc is dangling.
set -e
NUM_PROC=20
if [ $# != 0 -a $# != 1 ] ; then
echo "Usage: $0 [svc]"
exit 1
fi
if [ $# == 0 ] ; then
./ctl.py kubectl get svc | grep "^e-" | cut -d " " -f 1 | xargs -n 1 -P $NUM_PROC ./$0 | uniq | \
xargs -n 1 -P $NUM_PROC ./ctl.py kubectl delete svc -l
elif [ $# == 1 ] ; then
svc_name=$1
label=`./ctl.py kubectl describe svc $svc_name | grep jobId | cut -d : -f 2 | sed "s/ //g"`
./ctl.py kubectl get pod -l $label 2>&1 | grep "No resources found" > /dev/null
rtn=$?
if [ $rtn == 0 ] ; then
echo $label
fi
else
echo "Usage: $0 [svc]"
exit 1
fi
|
def manageFileAndModule(editedFileContents: str, newScriptFileOldName: str, oldModuleName: str) -> str:
try:
with open(newScriptFileOldName, 'w') as file:
file.write(editedFileContents)
file.close()
except Exception as e:
return "File write operation failed."
try:
success = fileDelete(newScriptFileOldName)
if not success:
return "File deletion failed."
except Exception as e:
return "File deletion failed."
try:
ModuleDatabase.unloadExplicit(oldModuleName)
except Exception as e:
return "Module unloading failed."
return "File and module management successful." |
<reponame>labiraus/kata-react<filename>03-add_remove/src/components/ToDoList/ToDoList.js<gh_stars>0
import React from 'react';
import PropTypes from 'prop-types';
import ToDoListElement from '../ToDoListElement/ToDoListElement'
import styles from './ToDoList.module.css';
const ToDoList = toDos => (
<div className={styles.ToDoList} data-testid="ToDoList">
{toDos.map((value,index) =>{
return <ToDoListElement value={value} key={index} />
})}
</div>
);
ToDoList.propTypes = {};
ToDoList.defaultProps = {};
export default ToDoList;
|
export const ReactDeviceDetect = jest.requireMock('react-device-detect');
jest.mock('react-device-detect', () => ({
isMobileOnly: false,
}));
|
#!/usr/bin/env bash
source ./env
[[ ! -d "${ANDROID_PREFIX}/${BUILD_IDENTIFIER}" ]] && (mkdir -p "${ANDROID_PREFIX}/${BUILD_IDENTIFIER}" || exit 1)
[[ ! -d "${ANDROID_PREFIX}/${BUILD_IDENTIFIER}/include" ]] && (mkdir "${ANDROID_PREFIX}/${BUILD_IDENTIFIER}/include" || exit 1)
export PATH="${ANDROID_TOOL_PREFIX}/${BUILD_IDENTIFIER}/bin:${PATH}"
export PREFIX="${ANDROID_PREFIX}/${BUILD_IDENTIFIER}"
export TOOL_PREFIX="${ANDROID_TOOL_PREFIX}/${BUILD_IDENTIFIER}"
export HOST="${ANDROID_HOST}"
export TARGET="${ANDROID_TARGET}"
export NDK_ROOT="${BASE}/sdk/${NDK_REL}"
export SDK_ROOT="${BASE}/sdk/${SDK_REL}"
export NDK_PLATFORM="android-${NDK_REV}"
export SDK_PLATFORM="android-${SDK_REV}"
export cross="${ANDROID_TARGET}-"
export CFLAGS="--sysroot ${TOOL_PREFIX}/sysroot -I${PREFIX}/include -I${TOOL_PREFIX}/include -DANDROID -mandroid ${CFLAGS_EXTRA}"
export CPPFLAGS="${CFLAGS} ${CPPFLAGS_EXTRA}"
export CXXFLAGS="${CFLAGS} ${CXXFLAGS_EXTRA}"
export LDFLAGS="--sysroot ${TOOL_PREFIX}/sysroot -L${PREFIX}/lib -L${TOOL_PREFIX}/lib ${LDFLAGS_EXTRA}"
export CC="${ANDROID_TARGET}-gcc"
export CXX="${ANDROID_TARGET}-g++"
export CPP="${ANDROID_TARGET}-cpp"
export AR="${ANDROID_TARGET}-ar"
export AS="${ANDROID_TARGET}-ls"
export LD="${ANDROID_TARGET}-ld"
export OBJCOPY="${ANDROID_TARGET}-objcopy"
export OBJDUMP="${ANDROID_TARGET}-objdump"
export RANLIB="${ANDROID_TARGET}-ranlib"
export STRIP="${ANDROID_TARGET}-strip"
export NAME="$1"
export VERSION="$2"
export PACKAGE="${NAME}-${VERSION}"
export FILESDIR="${BASE}/mk/${NAME}/${VERSION}"
pushd "${BASE}" > /dev/null
. "${FILESDIR}/build.sh" || exit 1
popd > /dev/null
|
uint8_t TEMPLATE_FUNC_NAME (TEMPLATE_BUFFER_TYPE const Buffer,
uint16_t Length,
uint16_t* const BytesProcessed)
{
uint8_t* DataStream = ((uint8_t*)Buffer + TEMPLATE_BUFFER_OFFSET(Length));
uint16_t BytesInTransfer = 0;
uint8_t ErrorCode;
Pipe_SetPipeToken(TEMPLATE_TOKEN);
if ((ErrorCode = Pipe_WaitUntilReady()))
return ErrorCode;
if (BytesProcessed != NULL)
{
Length -= *BytesProcessed;
TEMPLATE_BUFFER_MOVE(DataStream, *BytesProcessed);
}
while (Length)
{
if (!(Pipe_IsReadWriteAllowed()))
{
TEMPLATE_CLEAR_PIPE();
if (BytesProcessed != NULL)
{
*BytesProcessed += BytesInTransfer;
return PIPE_RWSTREAM_IncompleteTransfer;
}
if ((ErrorCode = Pipe_WaitUntilReady()))
return ErrorCode;
}
else
{
TEMPLATE_TRANSFER_BYTE(DataStream);
TEMPLATE_BUFFER_MOVE(DataStream, 1);
Length--;
BytesInTransfer++;
}
}
return PIPE_RWSTREAM_NoError;
}
#undef TEMPLATE_FUNC_NAME
#undef TEMPLATE_BUFFER_TYPE
#undef TEMPLATE_TOKEN
#undef TEMPLATE_TRANSFER_BYTE
#undef TEMPLATE_CLEAR_PIPE
#undef TEMPLATE_BUFFER_OFFSET
#undef TEMPLATE_BUFFER_MOVE
|
/**
* Copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import path from 'path';
import fs from 'fs';
import { test, expect } from './inspectorTest';
const emptyHTML = new URL('file://' + path.join(__dirname, '..', '..', 'assets', 'empty.html')).toString();
const launchOptions = (channel: string) => {
return channel ? `Headless = false,\n Channel = "${channel}",` : `Headless = false,`;
};
function capitalize(browserName: string): string {
return browserName[0].toUpperCase() + browserName.slice(1);
}
test('should print the correct imports and context options', async ({ browserName, channel, runCLI }) => {
const cli = runCLI(['--target=csharp', emptyHTML]);
const expectedResult = `using Microsoft.Playwright;
using System;
using System.Threading.Tasks;
class Program
{
public static async Task Main()
{
using var playwright = await Playwright.CreateAsync();
await using var browser = await playwright.${capitalize(browserName)}.LaunchAsync(new BrowserTypeLaunchOptions
{
${launchOptions(channel)}
});
var context = await browser.NewContextAsync();`;
await cli.waitFor(expectedResult).catch(e => e);
expect(cli.text()).toContain(expectedResult);
});
test('should print the correct context options for custom settings', async ({ browserName, channel, runCLI }) => {
const cli = runCLI([
'--color-scheme=dark',
'--geolocation=37.819722,-122.478611',
'--lang=es',
'--proxy-server=http://myproxy:3128',
'--timezone=Europe/Rome',
'--user-agent=hardkodemium',
'--viewport-size=1280,720',
'--target=csharp',
emptyHTML]);
const expectedResult = `
using var playwright = await Playwright.CreateAsync();
await using var browser = await playwright.${capitalize(browserName)}.LaunchAsync(new BrowserTypeLaunchOptions
{
${launchOptions(channel)}
Proxy = new ProxySettings
{
Server = "http://myproxy:3128",
},
});
var context = await browser.NewContextAsync(new BrowserNewContextOptions
{
ViewportSize = new ViewportSize
{
Width = 1280,
Height = 720,
},
Geolocation = new Geolocation
{
Latitude = 37.819722m,
Longitude = -122.478611m,
},
Permissions = new[] { ContextPermission.Geolocation },
UserAgent = "hardkodemium",
Locale = "es",
ColorScheme = ColorScheme.Dark,
TimezoneId = "Europe/Rome",
});`;
await cli.waitFor(expectedResult);
expect(cli.text()).toContain(expectedResult);
});
test('should print the correct context options when using a device', async ({ browserName, channel, runCLI }) => {
test.skip(browserName !== 'chromium');
const cli = runCLI(['--device=Pixel 2', '--target=csharp', emptyHTML]);
const expectedResult = `
using var playwright = await Playwright.CreateAsync();
await using var browser = await playwright.${capitalize(browserName)}.LaunchAsync(new BrowserTypeLaunchOptions
{
${launchOptions(channel)}
});
var context = await browser.NewContextAsync(playwright.Devices["Pixel 2"]);`;
await cli.waitFor(expectedResult);
expect(cli.text()).toContain(expectedResult);
});
test('should print the correct context options when using a device and additional options', async ({ browserName, channel, runCLI }) => {
test.skip(browserName !== 'webkit');
const cli = runCLI([
'--device=iPhone 11',
'--color-scheme=dark',
'--geolocation=37.819722,-122.478611',
'--lang=es',
'--proxy-server=http://myproxy:3128',
'--timezone=Europe/Rome',
'--user-agent=hardkodemium',
'--viewport-size=1280,720',
'--target=csharp',
emptyHTML]);
const expectedResult = `
using var playwright = await Playwright.CreateAsync();
await using var browser = await playwright.${capitalize(browserName)}.LaunchAsync(new BrowserTypeLaunchOptions
{
${launchOptions(channel)}
Proxy = new ProxySettings
{
Server = "http://myproxy:3128",
},
});
var context = await browser.NewContextAsync(new BrowserNewContextOptions(playwright.Devices["iPhone 11"])
{
UserAgent = "hardkodemium",
ViewportSize = new ViewportSize
{
Width = 1280,
Height = 720,
},
Geolocation = new Geolocation
{
Latitude = 37.819722m,
Longitude = -122.478611m,
},
Permissions = new[] { ContextPermission.Geolocation },
Locale = "es",
ColorScheme = ColorScheme.Dark,
TimezoneId = "Europe/Rome",
});`;
await cli.waitFor(expectedResult);
expect(cli.text()).toContain(expectedResult);
});
test('should print load/save storageState', async ({ browserName, channel, runCLI }, testInfo) => {
const loadFileName = testInfo.outputPath('load.json');
const saveFileName = testInfo.outputPath('save.json');
await fs.promises.writeFile(loadFileName, JSON.stringify({ cookies: [], origins: [] }), 'utf8');
const cli = runCLI([`--load-storage=${loadFileName}`, `--save-storage=${saveFileName}`, '--target=csharp', emptyHTML]);
const expectedResult1 = `
using var playwright = await Playwright.CreateAsync();
await using var browser = await playwright.${capitalize(browserName)}.LaunchAsync(new BrowserTypeLaunchOptions
{
${launchOptions(channel)}
});
var context = await browser.NewContextAsync(new BrowserNewContextOptions
{
StorageStatePath = "${loadFileName.replace(/\\/g, '\\\\')}",
});`;
await cli.waitFor(expectedResult1);
const expectedResult2 = `
await context.StorageStateAsync(new BrowserContextStorageStateOptions
{
Path = "${saveFileName.replace(/\\/g, '\\\\')}"
});
`;
await cli.waitFor(expectedResult2);
});
|
# Given a list
list = [1, 2, 3, 4, 5]
# Calculate sum
sum_val = 0
for num in list:
sum_val += num
# Print the result
print(sum_val) # returns 15 |
import { useContext } from 'react'
import { useRouteMatch, useHistory } from 'react-router-dom'
import gameService from '../../../services/gameService'
import genreService from '../../../services/genreService'
import devService from '../../../services/devService'
import UserContext from '../../../contexts/UserContext'
import errorHandler from '../../../utils/errorHandler'
import AlertBox from '../../AlertBox'
const DeleteAlert = ({ deleteAlert, setDeleteAlert }) => {
const { userData } = useContext(UserContext)
const { itemID } = useRouteMatch().params
const history = useHistory()
const type = history.location.pathname.split('/')[1]
const handleDeleteYes = async () => {
try {
if (type === 'games') gameService.deleteGame(itemID, userData.user.id)
if (type === 'genres') genreService.deleteGenre(itemID, userData.user.id)
if (type === 'devs') devService.deleteDev(itemID, userData.user.id)
history.push(`/${type}`)
} catch (err) { errorHandler(err) }
}
const handleDeleteNo = () => { setDeleteAlert('hide') }
return (
<AlertBox display={deleteAlert} yesHandler={handleDeleteYes} noHandler={handleDeleteNo} type="delete">
<p>{`Are you sure you want to delete this ${type.slice(0, type.length - 1)}?`}</p>
</AlertBox>
)
}
export default DeleteAlert |
function filterByValue(arr, value) {
return arr.filter(el => el >= value);
} |
# GIT
alias g="git"
alias ga="git add"
alias gss="git status -s"
alias gst="git status"
alias gac="git add . && git commit -m"
alias gc="git commit -m"
alias gap="git add -p ."
alias gp="git push"
alias gaf="git push -f"
alias gco="git checkout"
alias gb="git branch"
alias glog="git log --oneline --decorate"
alias gamend="git commit --amend"
alias gamendadd="git commit --amend --no-edit"
alias gfall="git fetch --all"
alias gpall="git pull --all"
alias gunstage="git reset -q HEAD --"
alias gdiscard="git checkout --"
alias guncommit="git reset --mixed HEAD~"
alias gplease="git push --force-with-lease"
alias gnomessage="git commit --allow-empty-message -m ''"
alias gpoh="git push --set-upstream origin HEAD"
alias gprm="git pull --rebase origin master"
alias commit-without-husky="HUSKY_SKIP_HOOKS=true git commit --no-verify"
# Docker & K8S
alias dc="docker compose"
alias k="kubectl"
# ZSH
alias reload!=". $HOME/.zshrc"
alias timezsh="time zsh -i -c echo"
# OS
alias make-executable="chmod +x"
alias -g ...='../..'
alias -g ....='../../..'
alias -g .....='../../../..'
alias -g ......='../../../../..'
alias my-ip="ipconfig getifaddr en0"
alias super-danger-rm="rm -rf"
alias l="ls -lah"
alias ll="ls -lh"
alias ..="cd .."
alias make-dir='mkdir -p'
alias finder="open"
alias show-hidden='defaults write com.apple.finder AppleShowAllFiles YES; killall Finder /System/Library/CoreServices/Finder.app'
alias hide-hidden='defaults write com.apple.finder AppleShowAllFiles NO; killall Finder /System/Library/CoreServices/Finder.app'
# Brew
alias brews='brew list -1'
alias bubo='brew update && brew outdated'
alias bubc='brew upgrade && brew cleanup'
alias bubu='bubo && bubc'
# Node
# alias npm="yarn"
alias yarndep="yarn add --exact"
alias yarndev="yarn add -D --exact"
# Dev
alias dev="cd $HOME/Developer"
alias read-my-keystrokes="sed -n l"
alias open-pr="gh pr view --web"
alias pretty-json="python -m json.tool"
# Kubernetes
alias k="kubectl"
alias kcontext="kubectl config current-context"
|
<filename>src/main/java/org/olat/core/commons/services/doceditor/ui/AdminDocEditorController.java
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.core.commons.services.doceditor.ui;
import java.util.List;
import org.olat.core.commons.services.doceditor.DocEditorAdminSegment;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.link.Link;
import org.olat.core.gui.components.link.LinkFactory;
import org.olat.core.gui.components.segmentedview.SegmentViewComponent;
import org.olat.core.gui.components.segmentedview.SegmentViewEvent;
import org.olat.core.gui.components.segmentedview.SegmentViewFactory;
import org.olat.core.gui.components.velocity.VelocityContainer;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.controller.BasicController;
import org.olat.core.gui.control.generic.dtabs.Activateable2;
import org.olat.core.id.context.ContextEntry;
import org.olat.core.id.context.StateEntry;
import org.olat.core.util.resource.OresHelper;
import org.springframework.beans.factory.annotation.Autowired;
/**
*
* Initial date: 8 Apr 2019<br>
* @author uhensler, <EMAIL>, http://www.frentix.com
*
*/
public class AdminDocEditorController extends BasicController implements Activateable2 {
private static final String DOCUMENTS_IN_USE_RES_TYPE = "OpenDocuments";
private VelocityContainer mainVC;
private final Link documentsInUseLink;
private final SegmentViewComponent segmentView;
private Controller editorCtrl;
private DocumentsInUseListController documentsInUseCtrl;
private int counter = 0;
@Autowired
private List<DocEditorAdminSegment> adminSegments;
public AdminDocEditorController(UserRequest ureq, WindowControl wControl) {
super(ureq, wControl);
mainVC = createVelocityContainer("admin");
segmentView = SegmentViewFactory.createSegmentView("segments", mainVC, this);
adminSegments.sort((s1, s2) -> s1.getLinkName(getLocale()).compareToIgnoreCase(s2.getLinkName(getLocale())));
for (DocEditorAdminSegment adminSegment : adminSegments) {
String name = "ed-" + (++counter);
Link link = LinkFactory.createLink(name, name, getTranslator(), mainVC, this, Link.NONTRANSLATED);
link.setCustomDisplayText(adminSegment.getLinkName(getLocale()));
link.setUserObject(adminSegment);
segmentView.addSegment(link, false);
}
documentsInUseLink = LinkFactory.createLink("admin.documents.in.use", mainVC, this);
segmentView.addSegment(documentsInUseLink, false);
Component firstLink = segmentView.getSegments().get(0);
segmentView.select(firstLink);
doOpenAdminSegment(ureq, adminSegments.get(0));
putInitialPanel(mainVC);
}
@Override
public void activate(UserRequest ureq, List<ContextEntry> entries, StateEntry state) {
if(entries == null || entries.isEmpty()) return;
String type = entries.get(0).getOLATResourceable().getResourceableTypeName();
if (DOCUMENTS_IN_USE_RES_TYPE.equalsIgnoreCase(type)) {
doOpenDocumentsInUse(ureq);
segmentView.select(documentsInUseLink);
} else {
DocEditorAdminSegment adminSegment = getAdminSegment(type);
if (adminSegment != null) {
doOpenAdminSegment(ureq, adminSegment);
}
}
}
private DocEditorAdminSegment getAdminSegment(String type) {
for (Component component : segmentView.getSegments()) {
if (component instanceof Link) {
Link link = (Link)component;
Object userObject = link.getUserObject();
if (userObject instanceof DocEditorAdminSegment) {
DocEditorAdminSegment adminSegment = (DocEditorAdminSegment)userObject;
if (adminSegment.getBusinessPathType().equalsIgnoreCase(type)) {
segmentView.select(component);
return adminSegment;
}
}
}
}
return null;
}
@Override
protected void event(UserRequest ureq, Component source, Event event) {
if(source == segmentView) {
if(event instanceof SegmentViewEvent) {
SegmentViewEvent sve = (SegmentViewEvent)event;
String segmentCName = sve.getComponentName();
Component clickedLink = mainVC.getComponent(segmentCName);
if (clickedLink == documentsInUseLink) {
doOpenDocumentsInUse(ureq);
} else if (clickedLink instanceof Link) {
Link link = (Link)clickedLink;
Object userObject = link.getUserObject();
if (userObject instanceof DocEditorAdminSegment) {
DocEditorAdminSegment adminSegment = (DocEditorAdminSegment)userObject;
doOpenAdminSegment(ureq, adminSegment);
}
}
}
}
}
private void doOpenAdminSegment(UserRequest ureq, DocEditorAdminSegment adminSegment) {
removeAsListenerAndDispose(editorCtrl);
editorCtrl = null;
WindowControl swControl = addToHistory(ureq, OresHelper.createOLATResourceableType(adminSegment.getBusinessPathType()), null);
editorCtrl = adminSegment.createController(ureq, swControl);
listenTo(editorCtrl);
mainVC.put("segmentCmp", editorCtrl.getInitialComponent());
}
private void doOpenDocumentsInUse(UserRequest ureq) {
if(documentsInUseCtrl == null) {
WindowControl swControl = addToHistory(ureq, OresHelper.createOLATResourceableType(DOCUMENTS_IN_USE_RES_TYPE), null);
documentsInUseCtrl = new DocumentsInUseListController(ureq, swControl);
listenTo(documentsInUseCtrl);
} else {
documentsInUseCtrl.loadModel();
addToHistory(ureq, documentsInUseCtrl);
}
mainVC.put("segmentCmp", documentsInUseCtrl.getInitialComponent());
}
}
|
<reponame>danielleolgin/to-fix-backend
const Sequelize = require('sequelize');
module.exports = function(db) {
var Quadkey = db.define(
'quadkey',
{
id: {
type: Sequelize.INTEGER,
autoIncrement: true,
primaryKey: true
},
quadkey: {
type: Sequelize.STRING,
allowNull: false,
unique: 'quadkey_set'
},
set_id: {
type: Sequelize.STRING,
allowNull: true,
unique: 'quadkey_set'
},
priority: {
type: Sequelize.FLOAT,
allowNull: false
}
},
{
indexes: [
{
fields: ['quadkey']
},
{
fields: ['priority']
},
{
fields: ['set_id']
}
]
}
);
return Quadkey;
};
|
// SPDX-License-Identifier: BSD-3-Clause
//
// Copyright(c) 2021 Google LLC.
//
// Author: <NAME> <<EMAIL>>
#ifndef GOOGLE_RTC_AUDIO_PROCESSING_H
#define GOOGLE_RTC_AUDIO_PROCESSING_H
#include <stdint.h>
#ifdef __cplusplus
extern "C" {
#endif
// This define ensure that the linked library matches the header file.
#define GoogleRtcAudioProcessingCreate GoogleRtcAudioProcessingCreate_v1
typedef struct GoogleRtcAudioProcessingState GoogleRtcAudioProcessingState;
// Creates an instance of GoogleRtcAudioProcessing with the tuning embedded in
// the library.
GoogleRtcAudioProcessingState *GoogleRtcAudioProcessingCreate(void);
// Frees all allocated resources in `state`.
void GoogleRtcAudioProcessingFree(GoogleRtcAudioProcessingState *state);
// Returns the framesize used for processing.
int GoogleRtcAudioProcessingGetFramesizeInMs(
GoogleRtcAudioProcessingState *state);
// Processes the microphone stream.
// Accepts deinterleaved float audio with the range [-1, 1]. Each element of
// |src| points to an array of samples for the channel. At output, the channels
// will be in |dest|.
// Returns 0 if success and non zero if failure.
int GoogleRtcAudioProcessingProcessCapture_float32(
GoogleRtcAudioProcessingState *const state, const float *const *src,
float *const *dest);
// Accepts and and produces a frame of interleaved 16 bit integer audio. `src`
// and `dest` may use the same memory, if desired.
// Returns 0 if success and non zero if failure.
int GoogleRtcAudioProcessingProcessCapture_int16(
GoogleRtcAudioProcessingState *const state, const int16_t *const src,
int16_t *const dest);
// Analyzes the playback stream.
// Accepts deinterleaved float audio with the range [-1, 1]. Each element
// of |src| points to an array of samples for the channel.
// Returns 0 if success and non zero if failure.
int GoogleRtcAudioProcessingAnalyzeRender_float32(
GoogleRtcAudioProcessingState *const state, const float *const *src);
// Accepts interleaved int16 audio.
// Returns 0 if success and non zero if failure.
int GoogleRtcAudioProcessingAnalyzeRender_int16(
GoogleRtcAudioProcessingState *const state, const int16_t *const src);
#ifdef __cplusplus
}
#endif
#endif // GOOGLE_RTC_AUDIO_PROCESSING_H
|
########################################################################
# pod-security-policy.sh
# https://pmcgrath.net/using-pod-security-policies-with-kubeadm
# https://kubernetes.io/docs/concepts/policy/pod-security-policy/#policy-order
########################################################################
# no pods will be created until we establish a PodSecurityPolicy that allows them.
# based on https://github.com/kubernetes/website/raw/master/content/en/examples/policy/privileged-psp.yaml
cat <<EOF | tee $WORK_DIR/privileged-psp.yaml
apiVersion: policy/v1beta1
kind: PodSecurityPolicy
metadata:
# All other things equal, a PSP is selected by name-order. Rank this one
# lastish so any more-intentional PSPs get applied to services when they exist.
name: z-privileged
annotations:
seccomp.security.alpha.kubernetes.io/allowedProfileNames: '*'
seccomp.security.alpha.kubernetes.io/defaultProfileName: 'docker/default'
apparmor.security.beta.kubernetes.io/defaultProfileName: 'runtime/default'
spec:
privileged: true
allowPrivilegeEscalation: true
allowedCapabilities:
- '*'
volumes:
- '*'
hostNetwork: true
hostPorts:
- min: 0
max: 65535
hostIPC: true
hostPID: true
runAsUser:
rule: 'RunAsAny'
seLinux:
rule: 'RunAsAny'
supplementalGroups:
rule: 'RunAsAny'
fsGroup:
rule: 'RunAsAny'
EOF
kubectl apply -f $WORK_DIR/privileged-psp.yaml
# a la https://pmcgrath.net/using-pod-security-policies-with-kubeadm
cat <<EOF | tee $WORK_DIR/privileged-psp-role-binding.yaml
# Cluster role which grants access to the default pod security policy
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: privileged-psp
rules:
- apiGroups:
- policy
resourceNames:
- z-privileged
resources:
- podsecuritypolicies
verbs:
- use
---
# Cluster role binding for default pod security policy granting all authenticated users access
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: privileged-psp
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: privileged-psp
subjects:
- apiGroup: rbac.authorization.k8s.io
kind: Group
name: system:nodes
namespace: kube-system
- apiGroup: rbac.authorization.k8s.io
kind: Group
name: system:serviceaccounts:kube-system
EOF
kubectl apply -f $WORK_DIR/privileged-psp-role-binding.yaml
# default non-system policy
cat <<EOF | tee $WORK_DIR/default-psp.yaml
apiVersion: policy/v1beta1
kind: PodSecurityPolicy
metadata:
annotations:
apparmor.security.beta.kubernetes.io/allowedProfileNames: 'runtime/default'
apparmor.security.beta.kubernetes.io/defaultProfileName: 'runtime/default'
seccomp.security.alpha.kubernetes.io/allowedProfileNames: 'docker/default'
seccomp.security.alpha.kubernetes.io/defaultProfileName: 'docker/default'
name: z-default
spec:
allowedCapabilities: [] # default set of capabilities are implicitly allowed
allowPrivilegeEscalation: false
fsGroup:
rule: 'MustRunAs'
ranges:
# Forbid adding the root group.
- min: 1
max: 65535
hostIPC: false
hostNetwork: false
hostPID: false
privileged: false
readOnlyRootFilesystem: false
runAsUser:
rule: 'MustRunAsNonRoot'
seLinux:
rule: 'RunAsNonRoot'
supplementalGroups:
rule: 'RunAsNonRoot'
ranges:
# Forbid adding the root group.
- min: 1
max: 65535
volumes:
- 'configMap'
- 'downwardAPI'
- 'emptyDir'
- 'persistentVolumeClaim'
- 'projected'
- 'secret'
hostNetwork: false
runAsUser:
rule: 'RunAsAny'
seLinux:
rule: 'RunAsAny'
supplementalGroups:
rule: 'RunAsAny'
fsGroup:
rule: 'RunAsAny'
EOF
kubectl apply -f $WORK_DIR/default-psp.yaml
cat <<EOF | tee $WORK_DIR/default-psp-role-binding.yaml
# Cluster role which grants access to the default pod security policy
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: default-psp
rules:
- apiGroups:
- policy
resourceNames:
- z-default
resources:
- podsecuritypolicies
verbs:
- use
---
# Cluster role binding for default pod security policy granting all authenticated users access
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: default-psp
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: default-psp
subjects:
- apiGroup: rbac.authorization.k8s.io
kind: Group
name: system:authenticated
EOF
kubectl apply -f $WORK_DIR/default-psp-role-binding.yaml
|
def calculate_cost_with_tax(price):
tax_rate = 0.0825
tax_amount = price * tax_rate
total_cost = price + tax_amount
return total_cost
cost = calculate_cost_with_tax(price)
print(cost) # 37.79375 |
import java.util.ArrayList;
import java.util.List;
public class DeepComparator {
public List<Difference> execTestCompareOneType(Object o1, Object o2, int nbDifferences) {
List<Difference> differences = new ArrayList<>();
compareObjects(o1, o2, differences);
if (differences.size() != nbDifferences) {
throw new AssertionError("Expected " + nbDifferences + " differences, but found " + differences.size());
}
return differences;
}
private void compareObjects(Object o1, Object o2, List<Difference> differences) {
if (o1 == null || o2 == null) {
if (o1 != o2) {
differences.add(new Difference(o1, o2));
}
return;
}
if (!o1.getClass().equals(o2.getClass())) {
differences.add(new Difference(o1, o2));
return;
}
if (o1 instanceof List) {
compareLists((List<?>) o1, (List<?>) o2, differences);
} else if (o1 instanceof Map) {
compareMaps((Map<?, ?>) o1, (Map<?, ?>) o2, differences);
} else if (o1 instanceof Set) {
compareSets((Set<?>) o1, (Set<?>) o2, differences);
} else if (o1.getClass().isArray()) {
compareArrays(o1, o2, differences);
} else {
compareFields(o1, o2, differences);
}
}
private void compareLists(List<?> list1, List<?> list2, List<Difference> differences) {
if (list1.size() != list2.size()) {
differences.add(new Difference(list1, list2));
return;
}
for (int i = 0; i < list1.size(); i++) {
compareObjects(list1.get(i), list2.get(i), differences);
}
}
private void compareMaps(Map<?, ?> map1, Map<?, ?> map2, List<Difference> differences) {
if (map1.size() != map2.size()) {
differences.add(new Difference(map1, map2));
return;
}
for (Map.Entry<?, ?> entry : map1.entrySet()) {
Object key = entry.getKey();
Object value1 = entry.getValue();
Object value2 = map2.get(key);
compareObjects(value1, value2, differences);
}
}
private void compareSets(Set<?> set1, Set<?> set2, List<Difference> differences) {
if (set1.size() != set2.size()) {
differences.add(new Difference(set1, set2));
return;
}
for (Object obj1 : set1) {
boolean found = false;
for (Object obj2 : set2) {
if (obj1.equals(obj2)) {
found = true;
break;
}
}
if (!found) {
differences.add(new Difference(set1, set2));
return;
}
}
}
private void compareArrays(Object array1, Object array2, List<Difference> differences) {
if (Array.getLength(array1) != Array.getLength(array2)) {
differences.add(new Difference(array1, array2));
return;
}
for (int i = 0; i < Array.getLength(array1); i++) {
compareObjects(Array.get(array1, i), Array.get(array2, i), differences);
}
}
private void compareFields(Object obj1, Object obj2, List<Difference> differences) {
// Implement comparison of fields for non-collection, non-array objects
// Add differences to the 'differences' list as needed
}
}
class Difference {
private Object value1;
private Object value2;
public Difference(Object value1, Object value2) {
this.value1 = value1;
this.value2 = value2;
}
// Implement getters and any other necessary methods
} |
// Define the grid of letters
const grid = [
['A', 'B', 'C', 'D'],
['E', 'F', 'G', 'H'],
['I', 'J', 'K', 'L'],
['M', 'N', 'O', 'P']
];
// Define the list of acceptable words
const acceptableWords = ['DOG', 'CAT', 'FISH', 'BIRD', 'LION', 'BEAR'];
// Initialize game variables
let selectedLetters = '';
let score = 0;
let timer;
let timeLimit = 60; // in seconds
// Function to start the game
function startGame() {
displayGrid();
startTimer();
}
// Function to display the grid of letters
function displayGrid() {
// Code to display the grid on the game interface
}
// Function to start the timer
function startTimer() {
timer = setInterval(updateTimer, 1000);
}
// Function to update the timer
function updateTimer() {
timeLimit--;
if (timeLimit === 0) {
endGame();
}
// Code to update the timer display on the game interface
}
// Function to handle letter selection
function selectLetter(row, col) {
selectedLetters += grid[row][col];
// Code to visually indicate the selected letters on the game interface
}
// Function to check for valid word and update score
function checkWord() {
if (acceptableWords.includes(selectedLetters)) {
score++;
// Code to update the score display on the game interface
}
selectedLetters = '';
}
// Function to end the game
function endGame() {
clearInterval(timer);
// Code to display the final score and end game message on the game interface
} |
class Person:
def __init__(self, name, age, gender):
self.name = name
self.age = age
self.gender = gender |
#!/bin/bash
# This function runs the feature extraction binary for the generated
# stereo pairs (see make_train_stereo_pairs.sh). This function uses a
# training image list file similar to make_train_stereo_pairs.sh:
#
# <wavefront_obj_file_full_path> <texture_png_file_full_path> <obj_name>
#
# <obj_name> will be used to name the output files
# Usage: prompt:~>source make_train_stereo_pairs.sh <train_obj_file> [<output_dir>]
tempwork_dir="TEMPWORK_KIT"
slam_config_skeleton="data/KIT_slam_config_skeleton_for_trainset.xml";
slam_bin="./slam";
echo " ";
echo "======================= EXTRACT_TRAIN_PRIMITIVES ======================="
printf "=> Using Slam config skeleton: $slam_config_skeleton \n";
if [ ! -e ${slam_config_skeleton} ]; then
echo "Slam xml skeletong $slam_config_skeleton does not exist (check for .svn file)!!";
echo "Try: source checklocal.bash in the ObjectDetection directory"
return 1;
fi;
if [ $# -eq 0 ]; then
echo "Not enough input arguments!";
echo "Usage: source extract_train_primitives.sh <train_obj_file> [<tempwork_dir>]";
echo " Example: source scripts/extract_train_primitives.sh config/kit_5k_tex_first_12.txt"
return 1;
fi;
if [ $# -eq 1 ]; then
tempwork_dir=${tempwork_dir%/}; # removes last slash if exists
echo "=> Using default temporary work directory " $tempwork_dir;
fi;
if [ $# -eq 2 ]; then
tempwork_dir=${2%/}; # removes last slash if exists
echo $tempwork_dir;
fi;
while read -r lline
do
if [[ ! "$lline" == "#"* ]]; then # Skip comment lines
line_ar=( $lline );
basename=${line_ar[2]};
echo "----------- Processing ${basename} ----------------"
mkdir -p "${tempwork_dir}/Slam_output_${basename}";
sed -e "s/@BASENAME@/$basename/g" -e "s/@BASEDIR@/$tempwork_dir/g" $slam_config_skeleton > "${tempwork_dir}/Slam_config_${basename}_train.xml";
$slam_bin --images "${tempwork_dir}/Slam_config_${basename}_train.xml" > "${tempwork_dir}/Slam_output_${basename}.log"
echo "2D processing part:"
grep "line segments created" "${tempwork_dir}/Slam_output_${basename}.log"
echo "Stereo reconstruction processing part:"
grep "many 3D segments have been created" "${tempwork_dir}/Slam_output_${basename}.log"
grep "as a vector of 3D-" "${tempwork_dir}/Slam_output_${basename}.log"
fi
done < $1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.