text stringlengths 1 1.05M |
|---|
#!/bin/sh
main() {
if ! pgrep -x spotify >/dev/null; then
echo ""; exit
fi
cmd="org.freedesktop.DBus.Properties.Get"
domain="org.mpris.MediaPlayer2"
path="/org/mpris/MediaPlayer2"
meta=$(dbus-send --print-reply --dest=${domain}.spotify \
/org/mpris/MediaPlayer2 org.freedesktop.DBus.Properties.Get string:${domain}.Player string:Metadata)
artist=$(echo "$meta" | sed -nr '/xesam:artist"/,+2s/^ +string "(.*)"$/\1/p' | tail -1 | sed 's/\&/\\&/g' | sed 's#\/#\\/#g')
album=$(echo "$meta" | sed -nr '/xesam:album"/,+2s/^ +variant +string "(.*)"$/\1/p' | tail -1| sed 's/\&/\\&/g'| sed 's#\/#\\/#g')
title=$(echo "$meta" | sed -nr '/xesam:title"/,+2s/^ +variant +string "(.*)"$/\1/p' | tail -1 | sed 's/\&/\\&/g'| sed 's#\/#\\/#g')
echo "${*:-%artist% - %title%}" | sed "s/%artist%/$artist/g;s/%title%/$title/g;s/%album%/$album/g"i | sed "s/\&/\&/g" | sed "s#\/#\/#g"
}
main "$@"
|
<gh_stars>1-10
/*
* Copyright (C) 2012-2014 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package info.archinnov.achilles.query.cql;
import java.util.Iterator;
import java.util.List;
import info.archinnov.achilles.internal.persistence.operations.NativeQueryMapper;
import info.archinnov.achilles.internal.persistence.operations.TypedMapIterator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.datastax.driver.core.RegularStatement;
import com.datastax.driver.core.Row;
import info.archinnov.achilles.internal.context.DaoContext;
import info.archinnov.achilles.internal.statement.wrapper.NativeQueryLog;
import info.archinnov.achilles.internal.statement.wrapper.NativeStatementWrapper;
import info.archinnov.achilles.type.Options;
import info.archinnov.achilles.type.TypedMap;
/**
* Class to wrap CQL3 native query
*
* <pre class="code"><code class="java">
*
* String nativeQuery = "SELECT name,age_in_years FROM UserEntity WHERE id IN(?,?)";
* List<TypedMap> actual = manager.nativeQuery(nativeQuery,10L,11L).get();
*
* </code></pre>
*
* @see <a href="https://github.com/doanduyhai/Achilles/wiki/Queries#native-query" target="_blank">Native query</a>
*/
public class NativeQuery {
private static final Logger log = LoggerFactory.getLogger(NativeQuery.class);
private DaoContext daoContext;
private NativeQueryMapper mapper = NativeQueryMapper.Singleton.INSTANCE.get();
protected Object[] boundValues;
protected NativeStatementWrapper nativeStatementWrapper;
protected Options options;
public NativeQuery(DaoContext daoContext, RegularStatement regularStatement, Options options, Object... boundValues) {
this.daoContext = daoContext;
this.nativeStatementWrapper = new NativeStatementWrapper(NativeQueryLog.class, regularStatement, boundValues, options.getCasResultListener());
this.options = options;
this.boundValues = boundValues;
}
/**
* Return found rows. The list represents the number of returned rows The
* map contains the (column name, column value) of each row. The map is
* backed by a LinkedHashMap and thus preserves the columns order as they
* were declared in the native query
*
* @return List<TypedMap>
*/
public List<TypedMap> get() {
log.debug("Get results for native query {}", nativeStatementWrapper.getStatement());
List<Row> rows = daoContext.execute(nativeStatementWrapper).all();
return mapper.mapRows(rows);
}
/**
* Return the first found row. The map contains the (column name, column
* value) of each row. The map is backed by a LinkedHashMap and thus
* preserves the columns order as they were declared in the native query
*
* @return TypedMap
*/
public TypedMap first() {
log.debug("Get first result for native query {}", nativeStatementWrapper.getStatement());
List<Row> rows = daoContext.execute(nativeStatementWrapper).all();
List<TypedMap> result = mapper.mapRows(rows);
if (result.isEmpty())
return null;
else
return result.get(0);
}
/**
* Execute statement without returning result. Useful for
* INSERT/UPDATE/DELETE and DDL statements
*/
public void execute() {
log.debug("Execute native query {}", nativeStatementWrapper.getStatement());
daoContext.execute(nativeStatementWrapper);
}
/**
* Return an iterator of {@link info.archinnov.achilles.type.TypedMap} instance. Each instance represents a CQL row
* @return Iterator<TypedMap>
*/
public Iterator<TypedMap> iterator() {
log.debug("Execute native query {} and return iterator", nativeStatementWrapper.getStatement());
return new TypedMapIterator(daoContext.execute(nativeStatementWrapper).iterator());
}
}
|
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
public class WebPageReader {
public static void main(String[] args) {
if (args.length < 1) {
System.out.println("Please enter a URL");
System.exit(1);
}
String url = args[0];
StringBuilder response = new StringBuilder();
try {
URL obj = new URL(url);
HttpURLConnection con = (HttpURLConnection) obj.openConnection();
BufferedReader in = new BufferedReader(
new InputStreamReader(con.getInputStream()));
String inputLine;
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
}
in.close();
} catch (IOException e) {
System.err.println("Error while downloading the web page: " + e);
System.exit(1);
}
System.out.println(response);
}
} |
# Copyright 2014 The crabapple Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
import datetime
from flask import render_template, redirect, request
from crabapple.objects import Commit, Deployment, DeploymentStatus
class ControllerDeployment(object):
def __init__(self, server):
self.server = server
def view_deployments(self):
return render_template('index.html',
deployments=self.server.store.get_all_deployments(),
specs={o.id: o for o in self.server.store.get_all_specs()})
def view_deployment(self, deployment_id):
deployment_object = self.server.store.get_deployment(deployment_id)
if deployment_object is None:
return redirect('/')
content = ''
try:
with open(self.server.config.logdir + '/' + str(deployment_object.id) + '.log') as f:
content = f.read()
except IOError:
content = ''
return render_template('deployment_view.html', deployment=deployment_object, content=content)
def view_deploy(self):
if request.method == 'GET':
return render_template('deploy.html', specs=self.server.store.get_all_specs())
elif request.method == 'POST':
spec = request.form['spec']
commit = request.form['commit']
c = Commit()
c.hash = commit
o = Deployment(status=DeploymentStatus.SCHEDULED,
triggered_time=datetime.datetime.now())
o.spec_id = int(spec)
o.branch = '* Manual *'
o.triggered_commit = c
o.pusher_name = 'admin'
o.pusher_email = '-'
self.server.trigger_deployment(o)
return redirect('/deployments')
def register(self, app):
app.add_url_rule('/deployments', 'view_deployments', self.view_deployments)
app.add_url_rule('/deploy', 'view_deploy', self.view_deploy, methods=['GET', 'POST'])
app.add_url_rule('/deployment/<int:deployment_id>', 'view_deployment', self.view_deployment) |
package com.acmvit.acm_app.ui.splash;
import android.app.Application;
import android.os.Handler;
import android.os.Looper;
import androidx.lifecycle.AndroidViewModel;
import androidx.lifecycle.LiveData;
import androidx.lifecycle.MutableLiveData;
import com.acmvit.acm_app.AcmApp;
import com.acmvit.acm_app.pref.SessionManager;
import com.acmvit.acm_app.repository.UserRepository;
public class SplashViewModel extends AndroidViewModel {
private static final String TAG = "SplashViewModel";
private static final int SPLASH_TIMEOUT = 1000;
private final SessionManager sessionManager;
private final UserRepository userRepository;
private final Application application;
private final MutableLiveData<Boolean> canNavigate = new MutableLiveData<>(
false
);
public SplashViewModel(Application application) {
super(application);
this.sessionManager = AcmApp.getSessionManager();
userRepository = UserRepository.getInstance();
this.application = application;
}
public void startNavigationIntent() {
Handler handler = new Handler(Looper.getMainLooper());
handler.postDelayed(() -> canNavigate.setValue(true), SPLASH_TIMEOUT);
}
public void fetchUserDetails() {
if (sessionManager.getAuthState()) {
userRepository.fetchUserUsingWM(application);
}
}
public LiveData<Boolean> getCanNavigate() {
return canNavigate;
}
}
|
class DoublyLinkedListNode<Key, Value> {
var payload: CachePayload<Key, Value>
var prev: DoublyLinkedListNode?
var next: DoublyLinkedListNode?
init(payload: CachePayload<Key, Value>) {
self.payload = payload
}
}
class DoublyLinkedList<Key, Value> {
private var head: DoublyLinkedListNode<Key, Value>?
private var tail: DoublyLinkedListNode<Key, Value>?
var count: Int = 0
func addHead(_ payload: CachePayload<Key, Value>) -> DoublyLinkedListNode<Key, Value> {
let newNode = DoublyLinkedListNode(payload: payload)
if let currentHead = head {
newNode.next = currentHead
currentHead.prev = newNode
} else {
tail = newNode
}
head = newNode
count += 1
return newNode
}
func moveToHead(_ node: DoublyLinkedListNode<Key, Value>) {
if node !== head {
let prevNode = node.prev
let nextNode = node.next
prevNode?.next = nextNode
nextNode?.prev = prevNode
if node === tail {
tail = prevNode
}
node.prev = nil
node.next = head
head?.prev = node
head = node
}
}
func removeTail() -> DoublyLinkedListNode<Key, Value>? {
if let currentTail = tail {
let prevNode = currentTail.prev
prevNode?.next = nil
tail = prevNode
count -= 1
return currentTail
}
return nil
}
}
struct CachePayload<Key, Value> {
let key: Key
var value: Value
}
class LRUCache<Key: Hashable, Value> {
private var nodesDict: [Key: DoublyLinkedListNode<Key, Value>] = [:]
private let list = DoublyLinkedList<Key, Value>()
private let capacity: Int
init(capacity: Int) {
self.capacity = capacity
}
func setValue(_ value: Value, for key: Key) {
let payload = CachePayload(key: key, value: value)
if let node = nodesDict[key] {
node.payload = payload
list.moveToHead(node)
} else {
if list.count >= capacity {
if let tailKey = list.removeTail()?.payload.key {
nodesDict[tailKey] = nil
}
}
let node = list.addHead(payload)
nodesDict[key] = node
}
}
func getValue(for key: Key) -> Value? {
if let node = nodesDict[key] {
list.moveToHead(node)
return node.payload.value
}
return nil
}
} |
# Fresh
fresh freshshell/fresh bin/fresh --bin
# Fish
fresh config/fish/config.fish --file=~/.config/fish/config.fish
fresh config/fish/fish_plugins --file=~/.config/fish/fish_plugins
fresh config/fish/completions/poetry.fish --file=~/.config/fish/completions/poetry.fish
# JetBrains
fresh config/ideavimrc.vim --file=~/.ideavimrc
# Xorg
fresh-options --file
fresh config/xorg/\*
fresh-options
# Custom scripts
fresh-options --bin
fresh bin/\*
fresh-options
# Git
fresh-options --file
fresh config/git/\*
fresh-options
# Fontconfig
fresh config/fontconfig/conf.d/00-blacklist_nimbus.conf --file=~/.config/fontconfig/conf.d/00-blacklist_nimbus.conf
# i3
fresh config/i3status.conf --file=~/.i3status.conf
fresh-options --file=~/.config/i3/config --marker
fresh config/i3/appearance.conf
fresh config/i3/modifiers.conf
fresh config/i3/modes/\*.conf
fresh config/i3/keybindings/\*.conf
# theme
fresh config/i3/theme/one-dark.conf
# startup must be last
fresh config/i3/startup.conf
fresh-options
# .config
fresh config/ranger.conf --file=~/.config/ranger/rc.conf
fresh config/redshift.ini --file=~/.config/redshift/redshift.conf
# fresh config/flameshot.ini --file=~/.config/flameshot/flameshot.ini
fresh config/starship.toml --file=~/.config/starship.toml
fresh config/fdignore --file
fresh config/quokka.json --file=~/.quokka/config.json
# Dunst
fresh-options --file=~/.config/dunst/dunstrc
fresh config/dunst/default.conf
fresh config/dunst/theme/one-dark.conf
fresh-options
# Rofi
fresh config/rofi.rasi --file=~/.config/rofi/config.rasi
# GTK 2 & 3
fresh config/gtk/gtkrc-2.0 --file=~/.gtkrc-2.0
fresh config/gtk/settings.ini --file=~/.config/gtk-3.0/settings.ini
# SSH
fresh config/ssh/ssh.conf --file=~/.ssh/config
fresh config/ssh/pam_environment --file=~/.pam_environment
# GPG
fresh config/gpg/gpg.conf --file=~/.gnupg/gpg.conf
fresh config/gpg/sshcontrol --file=~/.gnupg/sshcontrol
fresh config/gpg/gpg-agent.conf --file=~/.gnupg/gpg-agent.conf
# Neovim
fresh-options --file=~/.config/nvim/init.vim --marker='"'
fresh config/nvim/plugs.vim # NOTE: Must be first
fresh config/nvim/theme.vim
fresh config/nvim/nvim-config/\*.vim
fresh config/nvim/plug-config/\*.vim
fresh-options
fresh config/nvim/coc.nvim/settings.json --file=~/.config/nvim/coc-settings.json
# Alacritty
fresh-options --file=~/.config/alacritty/alacritty.yml --marker=#
fresh config/alacritty/\*
fresh eendroroy/alacritty-theme themes/one_dark.yaml
fresh-options
fresh-options --file=~/.gitignore_global --marker
# Global ignores
fresh github/gitignore Global/GPG.gitignore
fresh github/gitignore Global/Vim.gitignore
fresh github/gitignore Global/VisualStudioCode.gitignore
fresh github/gitignore Global/JetBrains.gitignore
# Language specific
fresh github/gitignore Go.gitignore
fresh github/gitignore Node.gitignore
fresh github/gitignore Rust.gitignore
fresh github/gitignore Kotlin.gitignore
fresh github/gitignore Java.gitignore
fresh-options
|
#!/bin/bash
function start_flink() {
/opt/flink-1.9.3/bin/start-cluster.sh
echo "flink started"
}
function stop_flink() {
/opt/flink-1.9.3/bin/stop-cluster.sh
echo "flink stoped"
}
case $1 in
"start")
start_flink
;;
"stop")
stop_flink
;;
*)
echo Invalid Args!
echo 'Usage: '$(basename $0)' start|stop'
;;
esac
|
package com.sample.domain.dto.group;
import lombok.Data;
@Data
public class GroupCriteria extends Group {
// 住所がNULLのデータに絞り込む
Boolean onlyNullAddress;
}
|
letter = 's'
string = "Mississippi"
count = 0
for char in string:
if char == letter:
count += 1
print(count) |
#!/bin/bash
set -e
set -x
export LATEX_RUN_FOLDER=../scratch/LATEX_RUN_FOLDER
export GOOGLE_CLIENT_ID=NULL
export GOOGLE_CLIENT_SECRET=NULL
export LUALATEX=$(which lualatex)
export LISTEN_HOST=127.0.0.1
export LISTEN_PORT=8081
export APPROOT="http://127.0.0.1:8081"
export DEVELOPMENT=1
stack build --executable-profiling --haddock-deps
stack exec labdecl --RTS -- +RTS -N -qa -A8m -sstderr -pa
|
package cmd
import (
"os"
"strings"
"github.com/spf13/cobra"
)
var destroyCmd = &cobra.Command{
Use: "destroy",
Short: "Destroys all AWS resources created by the apply/create",
Long: `This command runs terraform workspace select, terraform destroy and terraform workspace delete`,
Run: func(cmd *cobra.Command, args []string) {
configFile, readFileError := ReadFile(WEBSOCKET_CONFIG_FILE_PATH)
if len(configFile) == 0 {
errorMessage.Println(CONFIG_FILE_NOT_FOUND_MESSAGE)
return
}
if readFileError != nil {
errorMessage.Println(readFileError)
return
}
argumentsValid, argumentInvalidMessage, argumentsForMessage := checkForValidArguments("terraform destroy", args, configFile)
if !argumentsValid {
errorMessage.Printf(argumentInvalidMessage, strings.Join(argumentsForMessage, " "))
return
}
currentDirectory, error := os.Getwd()
if error != nil {
errorMessage.Println(CURRENT_DIRECTORY_ERROR_MESSAGE, error)
return
}
projectName := strings.ToLower(args[0])
environment := strings.ToLower(args[1])
SelectWorkSpaceTerraform(configFile, currentDirectory, environment)
DestroyTerraform(configFile, currentDirectory, projectName, environment)
DeleteWorkSpaceTerraform(configFile, currentDirectory, projectName, environment)
},
}
func init() {
terraformCmd.AddCommand(destroyCmd)
}
|
package de.lmu.cis.ocrd.profile;
public interface Profiler {
Profile profile() throws Exception;
}
|
/**
* @implements {SchemaFilter}
*/
class SchemaFilterDecimal
{
filter(options, data)
{
return options.collection
? this.filterCollection(data)
: this.filterSingle(data)
}
filterCollection(data)
{
if(!Array.isArray(data))
return data
const collection = []
for(const item of data)
{
const filtered = this.filterSingle(item)
collection.push(filtered)
}
return collection
}
filterSingle(data)
{
if(isNaN(data) === false)
return +data
return data
}
}
module.exports = SchemaFilterDecimal
|
<reponame>kariminf/KSimpleNLG
/*
* The contents of this file are subject to the Mozilla Public License
* Version 1.1 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations
* under the License.
*
* The Original Code is "Simplenlg".
*
* The Initial Developer of the Original Code is <NAME>, <NAME> and <NAME>.
* Portions created by <NAME>, <NAME> and <NAME> are Copyright (C) 2010-11 The University of Aberdeen. All Rights Reserved.
*
* Contributor(s): <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>.
*/
package simplenlg.syntax.english.nonstatic;
import java.util.Iterator;
import java.util.List;
import java.util.Stack;
import simplenlg.features.Feature;
import simplenlg.features.Form;
import simplenlg.features.InternalFeature;
import simplenlg.features.InterrogativeType;
import simplenlg.features.NumberAgreement;
import simplenlg.features.Tense;
import simplenlg.framework.CoordinatedPhraseElement;
import simplenlg.framework.InflectedWordElement;
import simplenlg.framework.LexicalCategory;
import simplenlg.framework.NLGElement;
import simplenlg.framework.PhraseCategory;
import simplenlg.framework.PhraseElement;
import simplenlg.framework.StringElement;
import simplenlg.framework.WordElement;
import simplenlg.phrasespec.VPPhraseSpec;
import simplenlg.syntax.AbstractVerbPhraseHelper;
/**
* <p>
* This class contains methods to help the syntax processor realise verb
* phrases for English. It adds auxiliary verbs into the element tree as required.
* It is a non static version by vaudrypl of the class of
* the same name in the <code>simplenlg.syntax.english</code> package.
* </p>
* modified by vaudrypl :
* abstract class replaced by public class
* private static methods replaced by protected methods
* parent.realise(element) replaced by element.realiseSyntax()
* SyntaxProcessor parent arguments removed
* PhraseHelper replaced by phrase.getPhraseHelper()
* now extends AbstractVerbPhraseHelper
*
* some methods now moved to AbstractClauseHelper
*
* @author <NAME>, University of Aberdeen.
* @version 4.0
*/
public class VerbPhraseHelper extends AbstractVerbPhraseHelper {
/**
* Splits the stack of verb components into two sections. One being the verb
* associated with the main verb group, the other being associated with the
* auxiliary verb group.
*
* @param vgComponents
* the stack of verb components in the verb group.
* @param mainVerbRealisation
* the main group of verbs.
* @param auxiliaryRealisation
* the auxiliary group of verbs.
*/
@Override
protected void splitVerbGroup(Stack<NLGElement> vgComponents,
Stack<NLGElement> mainVerbRealisation,
Stack<NLGElement> auxiliaryRealisation) {
boolean mainVerbSeen = false;
for (NLGElement word : vgComponents) {
if (!mainVerbSeen) {
mainVerbRealisation.push(word);
if (!word.equals("not")) { //$NON-NLS-1$
mainVerbSeen = true;
}
} else {
auxiliaryRealisation.push(word);
}
}
}
/**
* Creates a stack of verbs for the verb phrase. Additional auxiliary verbs
* are added as required based on the features of the verb phrase.
*
* @param phrase
* the <code>PhraseElement</code> representing this noun phrase.
* @return the verb group as a <code>Stack</code> of <code>NLGElement</code>
* s.
*/
@Override
@SuppressWarnings("deprecation")
protected Stack<NLGElement> createVerbGroup(PhraseElement phrase) {
String actualModal = null;
Object formValue = phrase.getFeature(Feature.FORM);
Tense tenseValue = phrase.getTense();
String modal = phrase.getFeatureAsString(Feature.MODAL);
boolean modalPast = false;
Stack<NLGElement> vgComponents = new Stack<NLGElement>();
boolean interrogative = phrase.hasFeature(Feature.INTERROGATIVE_TYPE);
if (Form.GERUND.equals(formValue) || Form.INFINITIVE.equals(formValue)) {
tenseValue = Tense.PRESENT;
}
if (Form.INFINITIVE.equals(formValue)) {
actualModal = "to"; //$NON-NLS-1$
} else if (formValue == null || Form.NORMAL.equals(formValue)
|| Form.SUBJUNCTIVE.equals(formValue)) {
if (Tense.FUTURE.equals(tenseValue)
&& modal == null
&& ((!(phrase.getHead() instanceof CoordinatedPhraseElement)) || (phrase
.getHead() instanceof CoordinatedPhraseElement && interrogative))) {
actualModal = "will"; //$NON-NLS-1$
} else if (Tense.CONDITIONAL.equals(tenseValue)
&& modal == null
&& ((!(phrase.getHead() instanceof CoordinatedPhraseElement)) || (phrase
.getHead() instanceof CoordinatedPhraseElement && interrogative))) {
actualModal = "could"; //$NON-NLS-1$
} else if (modal != null) {
actualModal = modal;
if (Tense.PAST.equals(tenseValue)) {
modalPast = true;
}
}
}
pushParticles(phrase, vgComponents);
NLGElement frontVG = grabHeadVerb(phrase, tenseValue, modal != null);
checkImperativeInfinitive(formValue, frontVG);
if (phrase.getFeatureAsBoolean(Feature.PASSIVE).booleanValue()) {
frontVG = addBe(frontVG, vgComponents, Form.PAST_PARTICIPLE);
}
if (phrase.getFeatureAsBoolean(Feature.PROGRESSIVE).booleanValue()) {
frontVG = addBe(frontVG, vgComponents, Form.PRESENT_PARTICIPLE);
}
if (phrase.getFeatureAsBoolean(Feature.PERFECT).booleanValue()
|| modalPast) {
frontVG = addHave(frontVG, vgComponents, modal, tenseValue);
}
frontVG = pushIfModal(actualModal != null, phrase, frontVG,
vgComponents);
frontVG = createNot(phrase, vgComponents, frontVG, modal != null);
if (frontVG != null) {
pushFrontVerb(phrase, vgComponents, frontVG, formValue,
interrogative);
}
pushModal(actualModal, phrase, vgComponents);
return vgComponents;
}
/**
* Pushes the modal onto the stack of verb components.
*
* @param actualModal
* the modal to be used.
* @param phrase
* the <code>PhraseElement</code> representing this noun phrase.
* @param vgComponents
* the stack of verb components in the verb group.
*/
protected void pushModal(String actualModal, PhraseElement phrase,
Stack<NLGElement> vgComponents) {
if (actualModal != null
&& !phrase.getFeatureAsBoolean(InternalFeature.IGNORE_MODAL)
.booleanValue()) {
vgComponents.push(new InflectedWordElement(actualModal,
LexicalCategory.MODAL));
}
}
/**
* Pushes the front verb onto the stack of verb components.
*
* @param phrase
* the <code>PhraseElement</code> representing this noun phrase.
* @param vgComponents
* the stack of verb components in the verb group.
* @param frontVG
* the first verb in the verb group.
* @param formValue
* the <code>Form</code> of the phrase.
* @param interrogative
* <code>true</code> if the phrase is interrogative.
*/
protected void pushFrontVerb(PhraseElement phrase,
Stack<NLGElement> vgComponents, NLGElement frontVG,
Object formValue, boolean interrogative) {
if (Form.GERUND.equals(formValue)) {
frontVG.setFeature(Feature.FORM, Form.PRESENT_PARTICIPLE);
vgComponents.push(frontVG);
} else if (Form.PAST_PARTICIPLE.equals(formValue)) {
frontVG.setFeature(Feature.FORM, Form.PAST_PARTICIPLE);
vgComponents.push(frontVG);
} else if (Form.PRESENT_PARTICIPLE.equals(formValue)) {
frontVG.setFeature(Feature.FORM, Form.PRESENT_PARTICIPLE);
vgComponents.push(frontVG);
} else if ((!(formValue == null || Form.NORMAL.equals(formValue)
|| Form.SUBJUNCTIVE.equals(formValue)) || interrogative)
&& !isCopular(phrase.getHead()) && vgComponents.isEmpty()) {
if (!InterrogativeType.WHO_SUBJECT.equals(phrase
.getFeature(Feature.INTERROGATIVE_TYPE))) {
frontVG.setFeature(InternalFeature.NON_MORPH, true);
}
vgComponents.push(frontVG);
} else {
NumberAgreement numToUse = determineNumber(phrase.getParent(),
phrase);
frontVG.setTense(phrase.getTense());
frontVG.setFeature(Feature.PERSON, phrase
.getFeature(Feature.PERSON));
frontVG.setFeature(Feature.NUMBER, numToUse);
vgComponents.push(frontVG);
}
}
/**
* Adds <em>not</em> to the stack if the phrase is negated.
*
* @param phrase
* the <code>PhraseElement</code> representing this noun phrase.
* @param vgComponents
* the stack of verb components in the verb group.
* @param frontVG
* the first verb in the verb group.
* @param hasModal
* the phrase has a modal
* @return the new element for the front of the group.
*/
protected NLGElement createNot(PhraseElement phrase,
Stack<NLGElement> vgComponents, NLGElement frontVG, boolean hasModal) {
NLGElement newFront = frontVG;
if (phrase.isNegated()) {
if (!vgComponents.empty() || frontVG != null && isCopular(frontVG)) {
vgComponents.push(new InflectedWordElement(
"not", LexicalCategory.ADVERB)); //$NON-NLS-1$
} else {
if (frontVG != null && !hasModal) {
frontVG.setNegated(true);
vgComponents.push(frontVG);
}
vgComponents.push(new InflectedWordElement(
"not", LexicalCategory.ADVERB)); //$NON-NLS-1$
// vaudrypl changed InflectedWordElement constructor call
WordElement auxiliary = (WordElement) phrase.getFactory().createWord("do", LexicalCategory.VERB);
newFront = new InflectedWordElement(auxiliary); //$NON-NLS-1$
}
}
return newFront;
}
/**
* Pushes the front verb on to the stack if the phrase has a modal.
*
* @param hasModal
* the phrase has a modal
* @param phrase
* the <code>PhraseElement</code> representing this noun phrase.
* @param frontVG
* the first verb in the verb group.
* @param vgComponents
* the stack of verb components in the verb group.
* @return the new element for the front of the group.
*/
protected NLGElement pushIfModal(boolean hasModal,
PhraseElement phrase, NLGElement frontVG,
Stack<NLGElement> vgComponents) {
NLGElement newFront = frontVG;
if (hasModal
&& !phrase.getFeatureAsBoolean(InternalFeature.IGNORE_MODAL)
.booleanValue()) {
if (frontVG != null) {
frontVG.setFeature(InternalFeature.NON_MORPH, true);
vgComponents.push(frontVG);
}
newFront = null;
}
return newFront;
}
/**
* Adds <em>have</em> to the stack.
*
* @param frontVG
* the first verb in the verb group.
* @param vgComponents
* the stack of verb components in the verb group.
* @param modal
* the modal to be used.
* @param tenseValue
* the <code>Tense</code> of the phrase.
* @return the new element for the front of the group.
*/
protected NLGElement addHave(NLGElement frontVG,
Stack<NLGElement> vgComponents, String modal, Tense tenseValue) {
NLGElement newFront = frontVG;
if (frontVG != null) {
frontVG.setFeature(Feature.FORM, Form.PAST_PARTICIPLE);
vgComponents.push(frontVG);
}
// vaudrypl changed InflectedWordElement constructor call
WordElement auxiliary = frontVG.getLexicon().lookupWord("have", LexicalCategory.VERB);
newFront = new InflectedWordElement(auxiliary); //$NON-NLS-1$
newFront.setTense(tenseValue);
if (modal != null) {
newFront.setFeature(InternalFeature.NON_MORPH, true);
}
return newFront;
}
/**
* Adds the <em>be</em> verb to the front of the group.
*
* @param frontVG
* the first verb in the verb group.
* @param vgComponents
* the stack of verb components in the verb group.
* @param frontForm
* the form the current front verb is to take.
* @return the new element for the front of the group.
*/
protected NLGElement addBe(NLGElement frontVG,
Stack<NLGElement> vgComponents, Form frontForm) {
if (frontVG != null) {
frontVG.setFeature(Feature.FORM, frontForm);
vgComponents.push(frontVG);
}
// vaudrypl changed InflectedWordElement constructor call
WordElement auxiliary = frontVG.getLexicon().lookupWord("be", LexicalCategory.VERB);
return new InflectedWordElement(auxiliary); //$NON-NLS-1$
}
/**
* Checks to see if the phrase is in imperative, infinitive or bare
* infinitive form. If it is then no morphology is done on the main verb.
*
* @param formValue
* the <code>Form</code> of the phrase.
* @param frontVG
* the first verb in the verb group.
*/
protected void checkImperativeInfinitive(Object formValue,
NLGElement frontVG) {
if ((Form.IMPERATIVE.equals(formValue)
|| Form.INFINITIVE.equals(formValue) || Form.BARE_INFINITIVE
.equals(formValue))
&& frontVG != null) {
frontVG.setFeature(InternalFeature.NON_MORPH, true);
}
}
/**
* Grabs the head verb of the verb phrase and sets it to future tense if the
* phrase is future tense. It also turns off negation if the group has a
* modal.
*
* @param phrase
* the <code>PhraseElement</code> representing this noun phrase.
* @param tenseValue
* the <code>Tense</code> of the phrase.
* @param hasModal
* <code>true</code> if the verb phrase has a modal.
* @return the modified head element
*/
protected NLGElement grabHeadVerb(PhraseElement phrase,
Tense tenseValue, boolean hasModal) {
NLGElement frontVG = phrase.getHead();
if (frontVG instanceof WordElement)
frontVG = new InflectedWordElement((WordElement) frontVG);
if (Tense.FUTURE.equals(tenseValue) && frontVG != null) {
frontVG.setTense(Tense.FUTURE);
}
if (hasModal && frontVG != null) {
frontVG.setNegated(false);
}
return frontVG;
}
/**
* Pushes the particles of the main verb onto the verb group stack.
*
* @param phrase
* the <code>PhraseElement</code> representing this noun phrase.
* @param vgComponents
* the stack of verb components in the verb group.
*/
protected void pushParticles(PhraseElement phrase,
Stack<NLGElement> vgComponents) {
Object particle = phrase.getFeature(Feature.PARTICLE);
if (particle instanceof String) {
vgComponents.push(new StringElement((String) particle));
} else if (particle instanceof NLGElement) {
vgComponents.push((NLGElement)particle).realiseSyntax();
}
}
/**
* Determines the number agreement for the phrase ensuring that any number
* agreement on the parent element is inherited by the phrase.
*
* @param parent
* the parent element of the phrase.
* @param phrase
* the <code>PhraseElement</code> representing this noun phrase.
* @return the <code>NumberAgreement</code> to be used for the phrase.
*/
protected NumberAgreement determineNumber(NLGElement parent,
PhraseElement phrase) {
Object numberValue = phrase.getFeature(Feature.NUMBER);
NumberAgreement number = null;
if (numberValue != null && numberValue instanceof NumberAgreement) {
number = (NumberAgreement) numberValue;
} else {
number = NumberAgreement.SINGULAR;
}
if (parent instanceof PhraseElement) {
if (parent.isA(PhraseCategory.CLAUSE)
&& phrase.getPhraseHelper().isExpletiveSubject((PhraseElement) parent)
&& isCopular(phrase.getHead())) {
if (hasPluralComplement(phrase
.getFeatureAsElementList(InternalFeature.COMPLEMENTS))) {
number = NumberAgreement.PLURAL;
} else {
number = NumberAgreement.SINGULAR;
}
}
}
return number;
}
/**
* Checks to see if any of the complements to the phrase are plural.
*
* @param complements
* the list of complements of the phrase.
* @return <code>true</code> if any of the complements are plural.
*/
protected boolean hasPluralComplement(List<NLGElement> complements) {
boolean plural = false;
Iterator<NLGElement> complementIterator = complements.iterator();
NLGElement eachComplement = null;
Object numberValue = null;
while (complementIterator.hasNext() && !plural) {
eachComplement = complementIterator.next();
if (eachComplement != null
&& eachComplement.isA(PhraseCategory.NOUN_PHRASE)) {
numberValue = eachComplement.getFeature(Feature.NUMBER);
if (numberValue != null
&& NumberAgreement.PLURAL.equals(numberValue)) {
plural = true;
}
}
}
return plural;
}
/**
* Checks to see if the base form of the word is copular, i.e. <em>be</em>.
*
* @param element
* the element to be checked
* @return <code>true</code> if the element is copular.
*/
@Override
public boolean isCopular(NLGElement element) {
boolean copular = false;
if (element instanceof InflectedWordElement) {
copular = "be".equalsIgnoreCase(((InflectedWordElement) element) //$NON-NLS-1$
.getBaseForm());
} else if (element instanceof WordElement) {
copular = "be".equalsIgnoreCase(((WordElement) element) //$NON-NLS-1$
.getBaseForm());
}
return copular;
}
/**
* Add a modifier to a verb phrase. Use heuristics to decide where it goes.
*
* code moved from simplenl.phrasespec.VPPhraseSpec.addModifier(Object modifier)
* by vaudrypl
*
* @param verbPhrase
* @param modifier
*
*/
@Override
public void addModifier(VPPhraseSpec verbPhrase, Object modifier) {
// adverb is preModifier
// string which is one lexicographic word is looked up in lexicon,
// if it is an adverb than it becomes a preModifier
// Everything else is postModifier
if (modifier == null)
return;
// get modifier as NLGElement if possible
NLGElement modifierElement = null;
if (modifier instanceof NLGElement)
modifierElement = (NLGElement) modifier;
else if (modifier instanceof String) {
String modifierString = (String)modifier;
if (modifierString.length() > 0 && !modifierString.contains(" "))
modifierElement = verbPhrase.getFactory().createWord(modifier, LexicalCategory.ANY);
}
// if no modifier element, must be a complex string
if (modifierElement == null) {
verbPhrase.addPostModifier((String)modifier);
return;
}
// extract WordElement if modifier is a single word
WordElement modifierWord = null;
if (modifierElement != null && modifierElement instanceof WordElement)
modifierWord = (WordElement) modifierElement;
else if (modifierElement != null && modifierElement instanceof InflectedWordElement)
modifierWord = ((InflectedWordElement) modifierElement).getBaseWord();
if (modifierWord != null && modifierWord.getCategory() == LexicalCategory.ADVERB) {
verbPhrase.addPreModifier(modifierWord);
return;
}
// default case
verbPhrase.addPostModifier(modifierElement);
}
}
|
#include <iostream>
#include <string>
#include <vector>
using namespace std;
struct Student {
int id;
string name;
string lastName;
int height;
};
int main() {
// Initialize vector of students
vector<Student> students;
// Add student data
students.push_back(Student {1, "Johnny", "Smith", 190});
students.push_back(Student {2, "Jane", "Doe", 176});
students.push_back(Student {3, "Bill", "Nye", 180});
// Iterate over the students vector and increase the student height
for(auto& student : students) {
student.height += 10; //Increase height by 10
}
// Print student data
cout << "Updated student data" << endl;
for(const auto& student : students)
cout << student.id << " " << student.name << " " << student.lastName << " " << student.height << endl;
return 0;
} |
using System;
public class MinCoinChange
{
// Function to find the minimum
// number of coins required
public int MinCoinChange(int[] coins, int amount)
{
// Initialize array to store
// intermediate values
int[] result = new int[amount + 1];
// Initialize the result array
result[0] = 0;
for (int i = 1; i < result.Length; i++)
{
result[i] = amount + 1;
}
// Compute minimum coins required
// for all values from 1 to amount.
for (int i = 1; i <= amount; i++)
{
for (int j = 0; j < coins.Length; j++)
{
if (coins[j] <= i)
{
result[i] = Math. Min(result[i], result[i - coins[j]] + 1);
}
}
}
// if the result is greater than amount,
// then is it not possible to give change.
if (result[amount] > amount)
{
return -1;
}
else
{
return result[amount];
}
}
} |
#!/bin/bash
## Create database to store data execution
# 1: Path of db file | 2: Path to sql scheme
function createDB {
local LDBSCHEMAFILE=$1
sqlite3 "$DATABASEPATH" < "$LDBSCHEMAFILE"
}
## Import from CSV to table
# 1: Path to csv file | 2: Table name to import | 3: Path to db file
function importCSVToTable {
local LPATHTOCSV=$1
local LTABLE=$2
printf ".mode csv\n.import $LPATHTOCSV $LTABLE \n" | sqlite3 $DATABASEPATH
return $?
}
## Query to sqlite
# 1: sql string
# print: Result of query
function query {
local LSQL=$1
printf "$LSQL" | sqlite3 "$DATABASEPATH"
return $?
}
## Query to sqlite with spaces and headers
# 1: sql string
# print: Result of query
function prettyQuery {
local LSQL=$1
printf "$LSQL" | sqlite3 -column -header "$DATABASEPATH"
return $?
}
|
package libs.trustconnector.scdp.smartcard.checkrule.tlv;
import libs.trustconnector.scdp.util.tlv.*;
import java.util.*;
import libs.trustconnector.scdp.util.*;
import libs.trustconnector.scdp.util.ByteArray;
import libs.trustconnector.scdp.util.Util;
import libs.trustconnector.scdp.util.tlv.TagList;
public class ResponseTLVCheckRuleBytes extends ResponseTLVCheckRule
{
protected byte[] retVlue;
protected byte[] expValue;
protected byte[] expValueM;
public ResponseTLVCheckRuleBytes(final String name, final TagList tagPath) {
super(name, tagPath, 0);
}
public ResponseTLVCheckRuleBytes(final String name, final TagList tagPath, final int valueOff) {
super(name, tagPath, valueOff);
}
public ResponseTLVCheckRuleBytes(final String name, final TagList tagPath, final int valueOff, final Map<String, String> valueInfoMap) {
super(name, tagPath, valueOff, valueInfoMap);
}
@Override
public boolean checkTLVValue(final byte[] value) {
final int retLen = value.length - this.valueOff;
this.retVlue = new byte[value.length - this.valueOff];
System.arraycopy(value, this.valueOff, this.retVlue, 0, retLen);
this.retValue = ByteArray.convert(this.retVlue);
return !this.matchSet || Util.arrayCompare(this.retVlue, 0, this.expValue, 0, retLen);
}
public void setMatch(final byte[] expValue) {
this.matchSet = true;
this.expValue = expValue.clone();
}
public void setMatch(final String expValueStr) {
this.matchSet = true;
this.expValue = ByteArray.convert(expValueStr);
}
public byte[] getReturnValue() {
return this.retVlue;
}
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-only-pad/13-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-only-pad/13-512+512+512-STWS-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_trigrams_within_sentences_first_third_sixth --eval_function last_sixth_eval |
import chatterbot
from chatterbot.trainers import ChatterbotCorpusTrainer
bot = chatterbot.ChatBot('Language Bot')
trainer = ChatterbotCorpusTrainer(bot)
trainer.train('chatterbot.corpus.english')
def chatbot_response(message):
return bot.get_response(message).text
while True:
message = input('You: ')
if message.strip() != 'Bye':
response = chatbot_response(message)
print('Chatbot:', response)
else:
print('Chatbot: Bye!')
break |
#!/bin/bash
COLOR_RED='\033[0;31m'
COLOR_GREEN='\033[0;32m'
COLOR_YELLOW='\033[0;33m'
COLOR_OFF='\033[0m' # No Color
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
NOW=$(date +"%m-%d-%Y-%T")
declare -a test_type=("optimized" "unoptimized")
declare -a load=("load" "no-load")
source ${DIR}/../config.sh
DUT_TEST_SCRIPT_PATH="${DUT_REMOTE_MORPHEUS_FOLDER}"/experiments/katran/start_throughput_test_dut.sh
DUT_TEST_SCRIPT_RESULT="${DUT_REMOTE_MORPHEUS_FOLDER}"/experiments/katran/config_result
KATRAN_LATENCY_SCRIPT_ORIGIN="${DIR}"/config/latency/latency-katran.lua
KATRAN_LATENCY_SCRIPT_NON_OPT_ORIGIN="${DIR}"/config/latency/latency-katran-non-opt.lua
KATRAN_LATENCY_SCRIPT_NEW="${DIR}"/config/latency/latency-katran_new.lua
KATRAN_LATENCY_SCRIPT_NON_OPT_NEW="${DIR}"/config/latency/latency-katran-non-opt_new.lua
RESULTS_FOLDER_PATH=${DIR}/results/latency
TEST_DURATION=30
function show_help() {
usage="$(basename "$0") [-h] [-r #runs]
Run Katran latency tests for optimized and unoptimized path
where:
-h show this help text
-r number of runs for the test"
echo "$usage"
}
function cleanup_environment {
ssh ${DUT_SERVER_USER}@${DUT_SERVER_IP} << EOF
sudo killall polycubed &> /dev/null
sleep 5
sudo killall polycubed &> /dev/null
EOF
}
function start_config_remote {
local morpheus_flag=$1
ssh ${DUT_SERVER_USER}@${DUT_SERVER_IP} << EOF
chmod +x ${DUT_TEST_SCRIPT_PATH}
${DUT_TEST_SCRIPT_PATH} -l $morpheus_flag
echo \$? > ${DUT_TEST_SCRIPT_RESULT}
EOF
}
function cleanup {
set +e
rm ${KATRAN_LATENCY_SCRIPT_NEW} &> /dev/null
rm ${KATRAN_LATENCY_SCRIPT_NON_OPT_NEW} &> /dev/null
echo -e "${COLOR_YELLOW}Killing polycubed & perf${COLOR_OFF}"
cleanup_environment
trap - EXIT
exit 0
}
while getopts :r:h option; do
case "${option}" in
h|\?)
show_help
exit 0
;;
r) NUMBER_RUNS=${OPTARG}
;;
:)
echo "Option -$OPTARG requires an argument." >&2
show_help
exit 0
;;
esac
done
if [ -z ${NUMBER_RUNS+x} ]; then
NUMBER_RUNS=5
echo -e "${COLOR_YELLOW}[ INFO ] Number of runs not specified, using default value: ${NUMBER_RUNS}${COLOR_OFF}"
fi
# Check if sudo without password is enabled on this server
sudo -n true &> /dev/null
if [ $? == 0 ]; then
echo -e "${COLOR_GREEN}[ INFO ] Sudo without password is enabled. ${COLOR_OFF}"
else
echo -e "${COLOR_RED}[ ERROR ] You should enable sudo without password to continue with this script ${COLOR_OFF}"
exit 1
fi
# Check if the server can connect without password
ssh -o PasswordAuthentication=no -o BatchMode=yes ${DUT_SERVER_USER}@${DUT_SERVER_IP} exit &>/dev/null
if [ $? == 0 ]; then
echo -e "${COLOR_GREEN}[ INFO ] Can connect: let's continue. ${COLOR_OFF}"
else
echo -e "${COLOR_RED}[ ERROR ] This client can connect to the DUT without password. ${COLOR_OFF}"
exit 1
fi
# Check if sudo without password is enabled on the remote server
sudo_nopass_enabled=$(ssh ${DUT_SERVER_USER}@${DUT_SERVER_IP} sudo -n true &> /dev/null; echo "$?")
if [ $sudo_nopass_enabled == 0 ]; then
echo -e "${COLOR_GREEN}[ INFO ] Sudo without password is enabled on remote machine. ${COLOR_OFF}"
else
echo -e "${COLOR_RED}[ ERROR ] You should enable sudo without password on the remote DUT to continue with this script ${COLOR_OFF}"
exit 1
fi
trap cleanup EXIT SIGINT
set -e
TEST_RESULT_DIR=${RESULTS_FOLDER_PATH}
sudo rm -rf ${TEST_RESULT_DIR} &> /dev/null
mkdir -p ${TEST_RESULT_DIR} &> /dev/null
cp ${KATRAN_LATENCY_SCRIPT_ORIGIN} ${KATRAN_LATENCY_SCRIPT_NEW}
sed -i -e 's/<PKTGEN_MAC_IF1>/'${PKTGEN_MAC_IF1}'/g' ${KATRAN_LATENCY_SCRIPT_NEW}
sed -i -e 's/<PKTGEN_MAC_IF2>/'${PKTGEN_MAC_IF2}'/g' ${KATRAN_LATENCY_SCRIPT_NEW}
sed -i -e 's/<DUT_MAC_IF1>/'${DUT_MAC_IF1}'/g' ${KATRAN_LATENCY_SCRIPT_NEW}
cp ${KATRAN_LATENCY_SCRIPT_NON_OPT_ORIGIN} ${KATRAN_LATENCY_SCRIPT_NON_OPT_NEW}
sed -i -e 's/<PKTGEN_MAC_IF1>/'${PKTGEN_MAC_IF1}'/g' ${KATRAN_LATENCY_SCRIPT_NON_OPT_NEW}
sed -i -e 's/<PKTGEN_MAC_IF2>/'${PKTGEN_MAC_IF2}'/g' ${KATRAN_LATENCY_SCRIPT_NON_OPT_NEW}
sed -i -e 's/<DUT_MAC_IF1>/'${DUT_MAC_IF1}'/g' ${KATRAN_LATENCY_SCRIPT_NON_OPT_NEW}
for l in "${load[@]}"; do
if [ $l == "load" ]; then
# rate="10Mp/s"
rate=4
else
rate=0
# rate="10p/s"
fi
for i in $(eval echo "{1..$NUMBER_RUNS}"); do
echo -e "${COLOR_GREEN}[ INFO ] Running baseline latency test: ${l}, run: ${i} ${COLOR_OFF}"
start_config_remote
config_result=$(ssh ${DUT_SERVER_USER}@${DUT_SERVER_IP} "cat ${DUT_TEST_SCRIPT_RESULT}")
if [ $config_result == 0 ]; then
echo -e "${COLOR_GREEN}[ INFO ] Configuration on remote server succeded. ${COLOR_OFF}"
else
echo -e "${COLOR_RED}[ ERROR ] Error in the configuration script on the remote server ${COLOR_OFF}"
exit 1
fi
sleep 10
echo -e "${COLOR_GREEN}[ INFO ] Starting Moongen latency test. ${COLOR_OFF}"
if [ -z ${MOONGEN_BUILD_DIR+x} ]; then
echo -e "${COLOR_RED}[ ERROR ] Moongen is not installed correctly ${COLOR_OFF}"
exit 1
fi
pushd .
cd ${MOONGEN_BUILD_DIR}/..
sudo ${MOONGEN_BUILD_DIR}/MoonGen ${KATRAN_LATENCY_SCRIPT_NEW} 0 0 ${rate} 0.1 ${TEST_DURATION}
popd
sudo mv ${MOONGEN_BUILD_DIR}/../katran-latency.txt ${RESULTS_FOLDER_PATH}/results_baseline_latency_${l}_run${i}.csv
done
for test in "${test_type[@]}"; do
# Now let's start the test with Morpheus
for i in $(eval echo "{1..$NUMBER_RUNS}"); do
echo -e "${COLOR_GREEN}[ INFO ] Running Morpheus test ${l}: ${test}, run: ${i} ${COLOR_OFF}"
start_config_remote "-m"
config_result=$(ssh ${DUT_SERVER_USER}@${DUT_SERVER_IP} "cat ${DUT_TEST_SCRIPT_RESULT}")
if [ $config_result == 0 ]; then
echo -e "${COLOR_GREEN}[ INFO ] Configuration on remote server succeded. ${COLOR_OFF}"
else
echo -e "${COLOR_RED}[ ERROR ] Error in the configuration script on the remote server ${COLOR_OFF}"
exit 1
fi
sleep 15
echo -e "${COLOR_GREEN}[ INFO ] Starting Moongen latency test. ${COLOR_OFF}"
if [ -z ${MOONGEN_BUILD_DIR+x} ]; then
echo -e "${COLOR_RED}[ ERROR ] Moongen is not installed correctly ${COLOR_OFF}"
exit 1
fi
pushd .
cd ${MOONGEN_BUILD_DIR}/..
if [ $test == "optimized" ]; then
sudo ${MOONGEN_BUILD_DIR}/MoonGen ${KATRAN_LATENCY_SCRIPT_NEW} 0 0 ${rate} 0.1 ${TEST_DURATION}
sudo mv ${MOONGEN_BUILD_DIR}/../katran-latency.txt ${RESULTS_FOLDER_PATH}/results_${test}_latency_${l}_run${i}.csv
else
sudo ${MOONGEN_BUILD_DIR}/MoonGen ${KATRAN_LATENCY_SCRIPT_NON_OPT_NEW} 0 0 ${rate} 0.1 ${TEST_DURATION}
sudo mv ${MOONGEN_BUILD_DIR}/../katran-latency-non-opt.txt ${RESULTS_FOLDER_PATH}/results_${test}_latency_${l}_run${i}.csv
fi
popd
done
done
done
echo -e "${COLOR_GREEN}[ INFO ] Latency test completed. ${COLOR_OFF}"
exit 0 |
// Function to return the maximum element in the array
int findMax(int arr[], int n)
{
// Initialize maximum element
int max = arr[0];
// Traverse array elements from second to last
for (int i = 1; i < n; i++)
{
// If current element is greater than max
if (arr[i] > max)
max = arr[i];
}
return max;
} |
<filename>src/main/java/org/olat/repository/ui/RepositoryFlexiTableModel.java<gh_stars>1-10
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.repository.ui;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.olat.core.CoreSpringFactory;
import org.olat.core.commons.persistence.SortKey;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.form.flexible.elements.FlexiTableFilter;
import org.olat.core.gui.components.form.flexible.impl.elements.table.DefaultFlexiTableDataModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FilterableFlexiTableModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiBusinessPathModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiSortableColumnDef;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiTableColumnModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.SortableFlexiTableDataModel;
import org.olat.core.gui.translator.Translator;
import org.olat.core.id.context.BusinessControlFactory;
import org.olat.core.util.StringHelper;
import org.olat.core.util.Util;
import org.olat.repository.RepositoryEntry;
import org.olat.repository.RepositoryService;
import org.olat.repository.model.RepositoryEntryLifecycle;
import org.olat.resource.accesscontrol.ACService;
import org.olat.resource.accesscontrol.AccessControlModule;
import org.olat.resource.accesscontrol.model.OLATResourceAccess;
import org.olat.user.UserManager;
import org.springframework.beans.factory.annotation.Autowired;
/**
*
* Initial date: 9 mai 2018<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public class RepositoryFlexiTableModel extends DefaultFlexiTableDataModel<RepositoryEntry>
implements SortableFlexiTableDataModel<RepositoryEntry>, FilterableFlexiTableModel, FlexiBusinessPathModel {
private static final RepoCols[] COLS = RepoCols.values();
private final Translator translator;
private List<RepositoryEntry> backups;
private final Map<String,String> fullNames = new HashMap<>();
private final Map<Long,OLATResourceAccess> repoEntriesWithOffer = new HashMap<>();
@Autowired
private ACService acService;
@Autowired
private UserManager userManager;
@Autowired
private AccessControlModule acModule;
public RepositoryFlexiTableModel(FlexiTableColumnModel columnModel, Locale locale) {
super(columnModel);
CoreSpringFactory.autowireObject(this);
translator = Util.createPackageTranslator(RepositoryService.class, locale);
}
@Override
public void sort(SortKey orderBy) {
if(orderBy != null) {
RepositoryFlexiTableSortDelegate sort = new RepositoryFlexiTableSortDelegate(orderBy, this, translator.getLocale());
List<RepositoryEntry> sorted = sort.sort();
super.setObjects(sorted);
}
}
@Override
public void filter(String searchString, List<FlexiTableFilter> filters) {
if(StringHelper.containsNonWhitespace(searchString)) {
List<RepositoryEntry> filteredRows = new ArrayList<>();
searchString = searchString.toLowerCase();
for(RepositoryEntry row:backups) {
if(accept(searchString, row)) {
filteredRows.add(row);
}
}
super.setObjects(filteredRows);
} else {
super.setObjects(backups);
}
}
private boolean accept(String searchString, RepositoryEntry entry) {
return accept(searchString, entry.getDisplayname())
|| accept(searchString, entry.getExternalRef());
}
private boolean accept(String searchString, String value) {
return StringHelper.containsNonWhitespace(value) && value.toLowerCase().contains(searchString);
}
@Override
public String getUrl(Component source, Object object, String action) {
if("select".equals(action) && object instanceof RepositoryEntry) {
String businessPath = "[RepositoryEntry:" + ((RepositoryEntry)object).getKey() + "]";
return BusinessControlFactory.getInstance().getAuthenticatedURLFromBusinessPathString(businessPath);
}
return null;
}
@Override
public Object getValueAt(int row, int col) {
RepositoryEntry entry = getObject(row);
return getValueAt(entry, col);
}
@Override
public Object getValueAt(RepositoryEntry re, int col) {
switch (COLS[col]) {
case ac: return getAccessControl(re);
case repoEntry: return re;
case displayname: return re.getDisplayname();
case author: return getFullname(re.getInitialAuthor());
case access: return re;
case creationDate: return re.getCreationDate();
case lastUsage: return re.getStatistics().getLastUsage();
case externalId: return re.getExternalId();
case externalRef: return re.getExternalRef();
case lifecycleLabel: {
RepositoryEntryLifecycle lf = re.getLifecycle();
if(lf == null || lf.isPrivateCycle()) {
return "";
}
return lf.getLabel();
}
case lifecycleSoftKey: {
RepositoryEntryLifecycle lf = re.getLifecycle();
if(lf == null || lf.isPrivateCycle()) {
return "";
}
return lf.getSoftKey();
}
case lifecycleStart: return re.getLifecycle() == null ? null : re.getLifecycle().getValidFrom();
case lifecycleEnd: return re.getLifecycle() == null ? null : re.getLifecycle().getValidTo();
default: return "ERROR";
}
}
@Override
public void setObjects(List<RepositoryEntry> objects) {
backups = objects;
super.setObjects(objects);
repoEntriesWithOffer.clear();
secondaryInformations(objects);
}
public void addObject(RepositoryEntry object) {
getObjects().add(object);
secondaryInformations(Collections.singletonList(object));
}
public void addObjects(List<RepositoryEntry> addedObjects) {
getObjects().addAll(addedObjects);
secondaryInformations(addedObjects);
}
private void secondaryInformations(List<RepositoryEntry> repoEntries) {
if(repoEntries == null || repoEntries.isEmpty()) return;
secondaryInformationsAccessControl(repoEntries);
secondaryInformationsUsernames(repoEntries);
}
private void secondaryInformationsAccessControl(List<RepositoryEntry> repoEntries) {
if(repoEntries == null || repoEntries.isEmpty() || !acModule.isEnabled()) return;
List<OLATResourceAccess> withOffers = acService.filterRepositoryEntriesWithAC(repoEntries);
for(OLATResourceAccess withOffer:withOffers) {
repoEntriesWithOffer.put(withOffer.getResource().getKey(), withOffer);
}
}
private void secondaryInformationsUsernames(List<RepositoryEntry> repoEntries) {
if(repoEntries == null || repoEntries.isEmpty()) return;
Set<String> newNames = new HashSet<>();
for(RepositoryEntry re:repoEntries) {
final String author = re.getInitialAuthor();
if(StringHelper.containsNonWhitespace(author) &&
!fullNames.containsKey(author)) {
newNames.add(author);
}
}
if(!newNames.isEmpty()) {
Map<String,String> newFullnames = userManager.getUserDisplayNamesByUserName(newNames);
fullNames.putAll(newFullnames);
}
}
public void removeObject(RepositoryEntry object) {
getObjects().remove(object);
repoEntriesWithOffer.remove(object.getOlatResource().getKey());
}
private Object getAccessControl(RepositoryEntry re) {
if(re.isBookable()) {
return repoEntriesWithOffer.get(re.getOlatResource().getKey());
}
if (!re.isAllUsers() && !re.isGuests()) {
// members only always show lock icon
return Collections.singletonList("o_ac_membersonly");
}
return null;
}
private String getFullname(String author) {
if(fullNames.containsKey(author)) {
return fullNames.get(author);
}
return author;
}
public enum RepoCols implements FlexiSortableColumnDef {
ac("table.header.ac"),
repoEntry("table.header.typeimg"),
displayname("table.header.displayname"),
author("table.header.author"),
access("table.header.access"),
creationDate("table.header.date"),
lastUsage("table.header.lastusage"),
externalId("table.header.externalid"),
externalRef("table.header.externalref"),
lifecycleLabel("table.header.lifecycle.label"),
lifecycleSoftKey("table.header.lifecycle.softkey"),
lifecycleStart("table.header.lifecycle.start"),
lifecycleEnd("table.header.lifecycle.end"),
guests("table.header.guests");
private final String i18nKey;
private RepoCols(String i18nKey) {
this.i18nKey = i18nKey;
}
@Override
public String i18nHeaderKey() {
return i18nKey;
}
@Override
public boolean sortable() {
return true;
}
@Override
public String sortKey() {
return name();
}
}
}
|
#!/bin/bash
DIR=$(dirname $0)
source "$DIR/backup.env.sh"
cd "$JOPLIN_DATA"
if [ ! -d "$JOPLIN_DATA/.tree" ]; then
mkdir "$JOPLIN_DATA/.tree"
fi
# Create tree snapshot for the record
tree -a -I .git -o "$JOPLIN_DATA/.tree/tree.txt"
# First secure all data
git add -A
if [ -z "$JOPLIN_GPG_KEY" ]; then
git commit --gpg-sign=$JOPLIN_GPG_KEY -am "feat(automatic): Scripted catch-all backup"
else
git commit -am "feat(automatic-unsigned): Scripted catch-all backup"
fi
# Get new knowledge
git fetch origin master:master
# Save current branch name
CURRENT_BRANCH=$(git branch | grep \* | cut -d ' ' -f2)
# Add new knowledge to master
git checkout master
git merge $CURRENT_BRANCH
# Importing pre-existing knowledge
git checkout $CURRENT_BRANCH
git merge master
# Push to server
git push --all
|
#!/bin/sh
# Copyright (c) 2017 Red Hat, Inc.
exec dbus-run-session bash -c 'gnome-terminal >& /dev/null; gdbus monitor --session -d org.gnome.Terminal 2>&1 | while read line; do echo $line | grep -q "does not have an owner" && kill $$; done'
|
<filename>open-sphere-plugins/wms/src/main/java/io/opensphere/wms/sld/event/package-info.java
/**
* Classes related to the WMS layer model.
*/
package io.opensphere.wms.sld.event;
|
#!/bin/bash
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
# Test dstat.
#
# This test launches three jobs and then verifies that dstat
# can lookup jobs by job-id, status, age, and job-name, with
# both default and --full output. It ensures that no error is
# returned and the output looks minimally sane.
readonly SCRIPT_DIR="$(dirname "${0}")"
readonly COMPLETED_JOB_NAME="completed-job"
readonly RUNNING_JOB_NAME="running-job"
readonly RUNNING_JOB_NAME_2="running-job-2"
function verify_dstat_output() {
local dstat_out="${1}"
# Verify that that the jobs are found and are in the expected order.
# dstat sort ordering is by create-time (descending), so job 0 here should be the last started.
local first_job_name="$(python "${SCRIPT_DIR}"/get_data_value.py "yaml" "${dstat_out}" "[0].job-name")"
local second_job_name="$(python "${SCRIPT_DIR}"/get_data_value.py "yaml" "${dstat_out}" "[1].job-name")"
local third_job_name="$(python "${SCRIPT_DIR}"/get_data_value.py "yaml" "${dstat_out}" "[2].job-name")"
if [[ "${first_job_name}" != "${RUNNING_JOB_NAME_2}" ]]; then
echo "Job ${RUNNING_JOB_NAME_2} not found in the correct location in the dstat output! "
echo "${dstat_out}"
exit 1
fi
if [[ "${second_job_name}" != "${RUNNING_JOB_NAME}" ]]; then
echo "Job ${RUNNING_JOB_NAME} not found in the correct location in the dstat output!"
echo "${dstat_out}"
exit 1
fi
if [[ "${third_job_name}" != "${COMPLETED_JOB_NAME}" ]]; then
echo "Job ${COMPLETED_JOB_NAME} not found in the correct location in the dstat output!"
echo "${dstat_out}"
exit 1
fi
local expected_events=(start pulling-image localizing-files running-docker delocalizing-files ok)
util::dstat_out_assert_equal_events "${dstat_out}" "[2].events" "${expected_events[@]}"
}
readonly -f verify_dstat_output
function verify_dstat_google_provider_fields() {
local dstat_out="${1}"
for (( task=0; task < 3; task++ )); do
# Run the provider test.
local job_name="$(python "${SCRIPT_DIR}"/get_data_value.py "yaml" "${dstat_out}" "[${task}].job-name")"
local job_provider="$(python "${SCRIPT_DIR}"/get_data_value.py "yaml" "${dstat_out}" "[${task}].provider")"
# Validate provider.
if [[ "${job_provider}" != "${DSUB_PROVIDER}" ]]; then
echo " - FAILURE: provider ${job_provider} does not match '${DSUB_PROVIDER}'"
echo "${dstat_out}"
exit 1
fi
# Provider fields are both metadata set on task submission and machine
# information set when the Pipelines API starts processing the task.
local events="$(python "${SCRIPT_DIR}"/get_data_value.py "yaml" "${dstat_out}" "[${task}].events")"
if [[ "${events}" == "[]" ]]; then
echo " - NOTICE: task $((task+1)) not started; skipping provider instance fields tests."
exit 0
fi
local job_zone=$(python "${SCRIPT_DIR}"/get_data_value.py "yaml" "${dstat_out}" "[${task}].provider-attributes.zone")
if ! [[ "${job_zone}" =~ ^[a-z]{1,4}-[a-z]{2,15}[0-9]-[a-z]$ ]]; then
echo " - FAILURE: Zone ${job_zone} for job ${job_name}, task $((task+1)) not valid."
echo "${dstat_out}"
exit 1
fi
done
echo " - ${DSUB_PROVIDER} provider fields verified"
}
readonly -f verify_dstat_google_provider_fields
# This test is not sensitive to the output of the dsub job.
# Set the ALLOW_DIRTY_TESTS environment variable to 1 in your shell to
# run this test without first emptying the output and logging directories.
source "${SCRIPT_DIR}/test_setup_e2e.sh"
if [[ "${CHECK_RESULTS_ONLY:-0}" -eq 0 ]]; then
echo "Launching pipeline..."
COMPLETED_JOB_ID="$(run_dsub \
--name "${COMPLETED_JOB_NAME}" \
--command 'echo TEST' \
--label test-token="${TEST_TOKEN}" \
--wait)"
RUNNING_JOB_ID="$(run_dsub \
--name "${RUNNING_JOB_NAME}" \
--label test-token="${TEST_TOKEN}" \
--command 'sleep 1m')"
RUNNING_JOB_ID_2="$(run_dsub \
--name "${RUNNING_JOB_NAME_2}" \
--label test-token="${TEST_TOKEN}" \
--command 'sleep 1m')"
echo "Checking dstat (by status)..."
if ! DSTAT_OUTPUT="$(run_dstat --status 'RUNNING' 'SUCCESS' --full --jobs "${RUNNING_JOB_ID_2}" "${RUNNING_JOB_ID}" "${COMPLETED_JOB_ID}")"; then
echo "dstat exited with a non-zero exit code!"
echo "Output:"
echo "${DSTAT_OUTPUT}"
exit 1
fi
verify_dstat_output "${DSTAT_OUTPUT}"
echo "Checking dstat (by job-name)..."
# For the google provider, sleep briefly to allow the Pipelines v1
# to set the compute properties, which occurs shortly after pipeline submit.
if [[ "${DSUB_PROVIDER}" == "google" ]]; then
sleep 2
fi
if ! DSTAT_OUTPUT="$(run_dstat --status 'RUNNING' 'SUCCESS' --full --names "${RUNNING_JOB_NAME_2}" "${RUNNING_JOB_NAME}" "${COMPLETED_JOB_NAME}" --label "test-token=${TEST_TOKEN}")"; then
echo "dstat exited with a non-zero exit code!"
echo "Output:"
echo "${DSTAT_OUTPUT}"
exit 1
fi
verify_dstat_output "${DSTAT_OUTPUT}"
if [[ "${DSUB_PROVIDER}" == "google" ]] || [[ "${DSUB_PROVIDER}" == "google-v2" ]]; then
echo "Checking dstat ${DSUB_PROVIDER} provider fields"
verify_dstat_google_provider_fields "${DSTAT_OUTPUT}"
fi
echo "Checking dstat (by job-id: default)..."
if ! DSTAT_OUTPUT="$(run_dstat --status '*' --jobs "${RUNNING_JOB_ID_2}" "${RUNNING_JOB_ID}" "${COMPLETED_JOB_ID}")"; then
echo "dstat exited with a non-zero exit code!"
echo "Output:"
echo "${DSTAT_OUTPUT}"
exit 1
fi
if ! echo "${DSTAT_OUTPUT}" | grep -qi "${RUNNING_JOB_NAME}"; then
echo "Job ${RUNNING_JOB_NAME} not found in the dstat output!"
echo "${DSTAT_OUTPUT}"
exit 1
fi
if ! echo "${DSTAT_OUTPUT}" | grep -qi "${RUNNING_JOB_NAME_2}"; then
echo "Job ${RUNNING_JOB_NAME} not found in the dstat output!"
echo "${DSTAT_OUTPUT}"
exit 1
fi
if ! echo "${DSTAT_OUTPUT}" | grep -qi "${COMPLETED_JOB_NAME}"; then
echo "Job ${RUNNING_JOB_NAME} not found in the dstat output!"
echo "${DSTAT_OUTPUT}"
exit 1
fi
echo "Checking dstat (by job-id: full)..."
if ! DSTAT_OUTPUT="$(run_dstat --status '*' --full --jobs "${RUNNING_JOB_ID_2}" "${RUNNING_JOB_ID}" "${COMPLETED_JOB_ID}")"; then
echo "dstat exited with a non-zero exit code!"
echo "Output:"
echo "${DSTAT_OUTPUT}"
exit 1
fi
verify_dstat_output "${DSTAT_OUTPUT}"
if [[ "${DSUB_PROVIDER}" == "google" ]] || [[ "${DSUB_PROVIDER}" == "google-v2" ]]; then
echo "Checking dstat ${DSUB_PROVIDER} provider fields"
verify_dstat_google_provider_fields "${DSTAT_OUTPUT}"
fi
echo "Checking dstat (by repeated job-ids: full)..."
if ! DSTAT_OUTPUT="$(run_dstat --status '*' --full --jobs "${RUNNING_JOB_ID_2}" "${RUNNING_JOB_ID_2}" "${RUNNING_JOB_ID}" "${COMPLETED_JOB_ID}")"; then
echo "dstat exited with a non-zero exit code!"
echo "Output:"
echo "${DSTAT_OUTPUT}"
exit 1
fi
verify_dstat_output "${DSTAT_OUTPUT}"
echo "Waiting 5 seconds and checking 'dstat --age 5s'..."
sleep 5s
DSTAT_OUTPUT="$(run_dstat_age "5s" --status '*' --full --jobs "${RUNNING_JOB_ID_2}" "${RUNNING_JOB_ID}" "${COMPLETED_JOB_ID}")"
if [[ "${DSTAT_OUTPUT}" != "[]" ]]; then
echo "dstat output not empty as expected:"
echo "${DSTAT_OUTPUT}"
exit 1
fi
echo "Verifying that the job didn't disappear completely."
DSTAT_OUTPUT="$(run_dstat --status '*' --full --jobs "${RUNNING_JOB_ID_2}" "${RUNNING_JOB_ID}" "${COMPLETED_JOB_ID}")"
verify_dstat_output "${DSTAT_OUTPUT}"
echo "SUCCESS"
fi
|
python -m torch.distributed.launch --nproc_per_node=4 train_epoch0.py > train_epoch0.txt
python -m torch.distributed.launch --nproc_per_node=4 train_epoch1.py > train_epoch1.txt
python -m torch.distributed.launch --nproc_per_node=4 train_epoch2.py > train_epoch2.txt
python -m torch.distributed.launch --nproc_per_node=4 train_epoch3.py > train_epoch3.txt
|
package legoset;
import jakarta.xml.bind.JAXBException;
import jaxb.JAXBHelper;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.net.URL;
import java.time.Year;
import java.util.ArrayList;
import java.util.HashSet;
public class Main {
public static void main(String[] args) throws Exception {
var set = new LegoSet();
set.setNumber(75211);
set.setUrl(new URL("https://brickset.com/sets/75211-1/Imperial-TIE-Fighter"));
set.setName("Imperial TIE Fighter");
set.setTheme("Star Wars");
set.setSubtheme("Solo");
set.setYear(Year.of(2018));
set.setPieces(519);
set.setPackaging(LegoSet.Packaging.BOX);
var tags = new HashSet<String>();
tags.add("Starfighter");
tags.add("Stormtrooper");
tags.add("Star Wars");
tags.add("Solo");
set.setTags(tags);
var minifigs = new ArrayList<Minifig>();
minifigs.add(new Minifig(2, "Imperial Mudtrooper"));
minifigs.add(new Minifig(1, "Imperial Pilot"));
minifigs.add(new Minifig(1, "Mimban Stormtrooper"));
set.setMinifigs(minifigs);
set.setRatings(new Ratings(468, 4.4));
JAXBHelper.toXML(set, new FileOutputStream("legoset.xml"));
}
}
|
/*
* All rights Reserved, Designed By DataDriver
* Copyright: DataDriver.Inc
* Company: Zhuo Wo Infomation Technology (ShangHai) CO.LTD
*/
package com.datadriver.config;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
/**
* DefinedSettings 自定义设置
*
* @Description 自定义配置文件Properties对应的Key对象
*/
@Component
public class DefinedSettings {
/**
* login
*/
@Value("${dd.login.url:''}")
private String loginUrl;
@Value("${dd.loginout.url:''}")
private String loginOutUrl;
@Value("${dd.login.success.url:''}")
private String loginSuccessUrl;
@Value("${dd.login.unauth.url:''}")
private String unauthorizedUrl;
/**
* common
*/
@Value("${dd.conf.security.key:''}")
private String securityKey;
@Value("${dd.user.defaultpassword:''}")
private String userDefaultPassword;
@Value("${dd.filesave.type}")
private String fileSaveType = FinalConfig.FILE_TYPE_DEFALULT;
@Value("${dd.filesave.root:''}")
private String fileSaveRoot;
@Value("${dd.filesave.ftp.host:'127.0.0.1'}")
private String ftpHost;
@Value("${dd.filesave.ftp.username:''}")
private String ftpLoginUserName;
@Value("${dd.filesave.ftp.password:''}")
private String ftpLoginPassword;
@Value("${spring.messages.basename:''}")
private String baseName;
/**
* Kaptcha
*/
@Value("${dd.kaptcha.border:'no'}")
private String kaptchaBorder;
@Value("${dd.kaptcha.border.color:''}")
private String kaptchaBorderColor;
@Value("${dd.kaptcha.border.thickness:''}")
private String kaptchaBorderThickness;
@Value("${dd.kaptcha.producer.impl:''}")
private String kaptchaProducerImpl;
@Value("${dd.kaptcha.textproducer.impl:''}")
private String kaptchaTextProducerImpl;
@Value("${dd.kaptcha.textproducer.char.string:''}")
private String kaptchaTextProducerCharString;
@Value("${dd.kaptcha.textproducer.char.length:''}")
private String kaptchaTextProducerCharLength;
@Value("${dd.kaptcha.textproducer.font.names:''}")
private String kaptchaTextProducerFontNames;
@Value("${dd.kaptcha.textproducer.font.size:'14'}")
private String kaptchaTextProducerFontSize;
@Value("${dd.kaptcha.textproducer.font.color:'blue'}")
private String kaptchaTextProducerFontColor;
@Value("${dd.kaptcha.textproducer.char.space:''}")
private String kaptchaTextProducerCharSpace;
@Value("${dd.kaptcha.noise.impl:''}")
private String kaptchaNoiseImpl;
@Value("${dd.kaptcha.noise.color:''}")
private String kaptchaNoiseColor;
@Value("${dd.kaptcha.obscurificator.impl:''}")
private String kaptchaObscurificatorImpl;
@Value("${dd.kaptcha.word.impl:''}")
private String kaptchaWordImpl;
@Value("${dd.kaptcha.background.impl:''}")
private String kaptchaBackgroundImpl;
@Value("${dd.kaptcha.background.clear.from:''}")
private String kaptchaBackgroundClearFrom;
@Value("${dd.kaptcha.background.clear.to:''}")
private String kaptchaBackgroundClearTo;
@Value("${dd.kaptcha.image.width:''}")
private String kaptchaImageWidth;
@Value("${dd.kaptcha.image.height:''}")
private String kaptchaImageHeight;
@Value("${dd.kaptcha.session.key:''}")
private String kaptchaSessionKey;
@Value("${dd.kaptcha.session.date:''}")
private String kaptchaSessionDate;
/**
* Shiro
*/
@Value("#{'${dd.shiro.filterChainDefinitions.anon}'.split(',')}")
private List<String> filterChainDefinitionsAnon = new ArrayList<>();
@Value("#{'${dd.shiro.filterChainDefinitions.authc}'.split(',')}")
private List<String> filterChainDefinitionsAuthc = new ArrayList<>();
@Value("#{'${dd.interceptors.security.excludes}'.split(',')}")
private List<String> interceptorsSecurityExcludes = new ArrayList<>();
@Value("#{'${dd.interceptors.security.includes}'.split(',')}")
private List<String> interceptorsSecurityIncludes = new ArrayList<>();
@Value("#{'${dd.interceptors.session.excludes}'.split(',')}")
private List<String> interceptorsSessionExcludes = new ArrayList<>();
@Value("#{'${dd.interceptors.session.includes}'.split(',')}")
private List<String> interceptorsSessionIncludes = new ArrayList<>();
/**
* Redis
*/
@Value("${dd.redis.host:'127.0.0.1'}")
private String redisHost;
@Value("${dd.redis.port:'6379'}")
private String redisPort;
@Value("${dd.redis.timeout:'60000'}")
private String redisTimeOut;
@Value("${dd.redis.password:''}")
private String redisPassword;
@Value("${dd.redis.shiro.database:'0'}")
private String redisShiroDatabase;
@Value("${dd.redis.shiro.expire:'600'}")
private String redisShiroExpire;
@Value("${dd.redis.pool.maxIdle:'100'}")
private String redisPoolMaxIdle;
@Value("${dd.redis.pool.minIdle:'3'}")
private String redisPoolMinIdle;
/**
* CAS
*/
@Value("${dd.cas.enable:false}")
private Boolean casEnable = false;
@Value("${dd.cas.login:''}")
private String casLoginUrl;
@Value("${dd.cas.prefix:''}")
private String casPrefixUrl;
@Value("${dd.cas.callback:''}")
private String casCallbackUrl;
private LinkedHashMap<String, String> filterChainDefinitionMap;
public String getSecurityKey() {
return securityKey;
}
public String getUserDefaultPassword() {
return userDefaultPassword;
}
public String getFileSaveType() {
return fileSaveType;
}
public String getFileSaveRoot() {
return fileSaveRoot;
}
public String getFtpHost() {
return ftpHost;
}
public String getFtpLoginUserName() {
return ftpLoginUserName;
}
public String getFtpLoginPassword() {
return ftpLoginPassword;
}
public List<String> getFilterChainDefinitionsAnon() {
return filterChainDefinitionsAnon;
}
public List<String> getFilterChainDefinitionsAuthc() {
return filterChainDefinitionsAuthc;
}
public List<String> getInterceptorsSecurityExcludes() {
return interceptorsSecurityExcludes;
}
public List<String> getInterceptorsSecurityIncludes() {
return interceptorsSecurityIncludes;
}
public List<String> getInterceptorsSessionExcludes() {
return interceptorsSessionExcludes;
}
public List<String> getInterceptorsSessionIncludes() {
return interceptorsSessionIncludes;
}
public String getRedisHost() {
return redisHost;
}
public String getRedisPort() {
return redisPort;
}
public String getRedisTimeOut() {
return redisTimeOut;
}
public String getRedisPassword() {
return redisPassword;
}
public String getRedisShiroDatabase() {
return redisShiroDatabase;
}
public String getRedisShiroExpire() {
return redisShiroExpire;
}
public String getRedisPoolMaxIdle() {
return redisPoolMaxIdle;
}
public String getRedisPoolMinIdle() {
return redisPoolMinIdle;
}
public String getBaseName() {
return baseName;
}
public String getCasLoginUrl() {
return casLoginUrl;
}
public String getCasPrefixUrl() {
return casPrefixUrl;
}
public String getCasCallbackUrl() {
return casCallbackUrl;
}
public Boolean getCasEnable() {
return casEnable;
}
public String getLoginUrl() {
return loginUrl;
}
public String getLoginOutUrl() {
return loginOutUrl;
}
public String getLoginSuccessUrl() {
return loginSuccessUrl;
}
public String getUnauthorizedUrl() {
return unauthorizedUrl;
}
public String getKaptchaBorder() {
return kaptchaBorder;
}
public String getKaptchaBorderColor() {
return kaptchaBorderColor;
}
public String getKaptchaBorderThickness() {
return kaptchaBorderThickness;
}
public String getKaptchaProducerImpl() {
return kaptchaProducerImpl;
}
public String getKaptchaTextProducerImpl() {
return kaptchaTextProducerImpl;
}
public String getKaptchaTextProducerCharString() {
return kaptchaTextProducerCharString;
}
public String getKaptchaTextProducerCharLength() {
return kaptchaTextProducerCharLength;
}
public String getKaptchaTextProducerFontNames() {
return kaptchaTextProducerFontNames;
}
public String getKaptchaTextProducerFontSize() {
return kaptchaTextProducerFontSize;
}
public String getKaptchaTextProducerFontColor() {
return kaptchaTextProducerFontColor;
}
public String getKaptchaTextProducerCharSpace() {
return kaptchaTextProducerCharSpace;
}
public String getKaptchaNoiseImpl() {
return kaptchaNoiseImpl;
}
public String getKaptchaNoiseColor() {
return kaptchaNoiseColor;
}
public String getKaptchaObscurificatorImpl() {
return kaptchaObscurificatorImpl;
}
public String getKaptchaWordImpl() {
return kaptchaWordImpl;
}
public String getKaptchaBackgroundImpl() {
return kaptchaBackgroundImpl;
}
public String getKaptchaBackgroundClearFrom() {
return kaptchaBackgroundClearFrom;
}
public String getKaptchaBackgroundClearTo() {
return kaptchaBackgroundClearTo;
}
public String getKaptchaImageWidth() {
return kaptchaImageWidth;
}
public String getKaptchaImageHeight() {
return kaptchaImageHeight;
}
public String getKaptchaSessionKey() {
return kaptchaSessionKey;
}
public String getKaptchaSessionDate() {
return kaptchaSessionDate;
}
/**
* 组装Shiro Filter
*
* @return
*/
public LinkedHashMap<String, String> getFilterChainDefinitionMap() {
filterChainDefinitionMap = new LinkedHashMap<>();
filterChainDefinitionsAnon.forEach(tempString -> filterChainDefinitionMap.put(tempString, "anon"));
filterChainDefinitionsAuthc.forEach(tempString -> filterChainDefinitionMap.put(tempString, "authc"));
return filterChainDefinitionMap;
}
}
|
<filename>js/dbhelper.js<gh_stars>0
/**
* Common database helper functions.
*/
class DBHelper {
/**
* Database URL.
* Change this to restaurants.json file location on your server.
*/
static get DATABASE_URL() {
const port = 8000 // Change this to your server port
return `./data/restaurants.json`;
}
/**
* Fetch all restaurants.
*/
static fetchRestaurants(callback) {
let xhr = new XMLHttpRequest();
xhr.open('GET', DBHelper.DATABASE_URL);
xhr.onload = () => {
if (xhr.status === 200) { // Got a success response from server!
const json = JSON.parse(xhr.responseText);
const restaurants = json.restaurants;
callback(null, restaurants);
} else { // Oops!. Got an error from server.
const error = (`Request failed. Returned status of ${xhr.status}`);
callback(error, null);
}
};
xhr.send();
}
/**
* Fetch a restaurant by its ID.
*/
static fetchRestaurantById(id, callback) {
// fetch all restaurants with proper error handling.
DBHelper.fetchRestaurants((error, restaurants) => {
if (error) {
callback(error, null);
} else {
const restaurant = restaurants.find(r => r.id == id);
if (restaurant) { // Got the restaurant
callback(null, restaurant);
} else { // Restaurant does not exist in the database
callback('Restaurant does not exist', null);
}
}
});
}
/**
* Fetch restaurants by a cuisine type with proper error handling.
*/
static fetchRestaurantByCuisine(cuisine, callback) {
// Fetch all restaurants with proper error handling
DBHelper.fetchRestaurants((error, restaurants) => {
if (error) {
callback(error, null);
} else {
// Filter restaurants to have only given cuisine type
const results = restaurants.filter(r => r.cuisine_type == cuisine);
callback(null, results);
}
});
}
/**
* Fetch restaurants by a neighborhood with proper error handling.
*/
static fetchRestaurantByNeighborhood(neighborhood, callback) {
// Fetch all restaurants
DBHelper.fetchRestaurants((error, restaurants) => {
if (error) {
callback(error, null);
} else {
// Filter restaurants to have only given neighborhood
const results = restaurants.filter(r => r.neighborhood == neighborhood);
callback(null, results);
}
});
}
/**
* Fetch restaurants by a cuisine and a neighborhood with proper error handling.
*/
static fetchRestaurantByCuisineAndNeighborhood(cuisine, neighborhood, callback) {
// Fetch all restaurants
DBHelper.fetchRestaurants((error, restaurants) => {
if (error) {
callback(error, null);
} else {
let results = restaurants
if (cuisine != 'all') { // filter by cuisine
results = results.filter(r => r.cuisine_type == cuisine);
}
if (neighborhood != 'all') { // filter by neighborhood
results = results.filter(r => r.neighborhood == neighborhood);
}
callback(null, results);
}
});
}
/**
* Fetch all neighborhoods with proper error handling.
*/
static fetchNeighborhoods(callback) {
// Fetch all restaurants
DBHelper.fetchRestaurants((error, restaurants) => {
if (error) {
callback(error, null);
} else {
// Get all neighborhoods from all restaurants
const neighborhoods = restaurants.map((v, i) => restaurants[i].neighborhood)
// Remove duplicates from neighborhoods
const uniqueNeighborhoods = neighborhoods.filter((v, i) => neighborhoods.indexOf(v) == i)
callback(null, uniqueNeighborhoods);
}
});
}
/**
* Fetch all cuisines with proper error handling.
*/
static fetchCuisines(callback) {
// Fetch all restaurants
DBHelper.fetchRestaurants((error, restaurants) => {
if (error) {
callback(error, null);
} else {
// Get all cuisines from all restaurants
const cuisines = restaurants.map((v, i) => restaurants[i].cuisine_type)
// Remove duplicates from cuisines
const uniqueCuisines = cuisines.filter((v, i) => cuisines.indexOf(v) == i)
callback(null, uniqueCuisines);
}
});
}
/**
* Restaurant page URL.
*/
static urlForRestaurant(restaurant) {
return (`./restaurant.html?id=${restaurant.id}`);
}
/**
* Restaurant list image URL.
*/
static imageUrlForRestaurantList(restaurant) {
return (`./img/${restaurant.photograph}`);
}
/**
* Restaurant page image URL.
*/
static imageUrlForRestaurantInfo(restaurant) {
return (`./img/${restaurant.photograph}`);
}
/**
* Restaurant image alt attribute
*/
static imageAltForRestaurant(restaurant) {
return (`Picture of restaurant ${restaurant.name}`);
}
/**
* Restaurant image alt attribute
*/
static ariaLabelLink(restaurant) {
return (`Click for more detail about restaurant ${restaurant.name}`);
}
/**
* Map marker for a restaurant.
*/
static mapMarkerForRestaurant(restaurant, map) {
const marker = new google.maps.Marker({
position: restaurant.latlng,
title: restaurant.name,
url: DBHelper.urlForRestaurant(restaurant),
map: map,
animation: google.maps.Animation.DROP}
);
return marker;
}
} |
const newsdata = require('newsdata.js');
module.exports = {
getUrlTypeId(option) {
let url = option.currentTarget.dataset.id;//注意这里是小写
console.log(url)
let apiLength = (url.indexOf(newsdata.API_URL) != -1) ? newsdata.API_URL.length : newsdata.API_URL2.length;
if(url.indexOf('https') == -1) {//是http而不是https
apiLength--;
}
let indexOfType = url.indexOf('?');
let indexOfId = url.indexOf('&');
console.log(indexOfType)
console.log(indexOfId)
let urlType = url.substr(apiLength + 1, indexOfType - apiLength - 1);
let urlId = ''
if (urlType == 'TopicApiForCmpp') {
urlId = url.substr(indexOfType + 1);
}
else {
if (indexOfId == -1)
urlId = url.substr(indexOfType + 1)
else
urlId = url.substr(indexOfType + 1, indexOfId - indexOfType - 1)
}
console.log(urlType);
console.log(urlId);
console.log('?urlType=' + urlType + '&' + urlId)
return '?urlType=' + urlType + '&' + urlId;
// console.log('/ng.com/ipadtestdoc?' + urlId)
// return '/ng.com/ipadtestdoc?' + urlId
},
getUrlTypeId_bkp(option) {
let url = option.currentTarget.dataset.id;//注意这里是小写
console.log(url)
let apiLength = (url.indexOf(newsdata.API_URL) != -1) ? newsdata.API_URL.length : newsdata.API_URL2.length;
let indexOfId = url.indexOf('?');
let urlId = url.substr(indexOfId + 1);
let urlType = url.substr(apiLength + 1, indexOfId - apiLength - 1);
// console.log(urlId);
// console.log(urlType);
console.log('?urlType=' + urlType + '&' + urlId)
return '?urlType=' + urlType + '&' + urlId;
},
} |
<gh_stars>0
/*
Copyright © 2020 <NAME> <EMAIL>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package commands
import (
"encoding/json"
"fmt"
log "github.com/micro/go-micro/v2/logger"
"google.golang.org/protobuf/types/known/emptypb"
"github.com/heroiclabs/nakama-common/api"
"open-match.dev/open-match/pkg/pb"
"github.com/spf13/cobra"
)
type MatchCancelRequest struct {
MatchID string
UserID string
}
func getCmdCancel(cmdBuilder *commandsBuilder) *cobra.Command {
cmd := &cobra.Command{
Use: "cancel [ticketID]",
//Aliases: []string{"r"},
Short: "**Cancel** the ticket for a new match if the match has not started yet",
Long: `**Cancel** the ticket for a new match if the match has not started yet`,
//Args: matchAll(cobra.MinimumNArgs(1)),
RunE: func(cmd *cobra.Command, args []string) error {
log.Infof("%+v\n", args)
account, err := cmdBuilder.nakamaCtx.Client.GetAccount(cmdBuilder.nakamaCtx.Ctx, &emptypb.Empty{})
if err != nil {
log.Error(err)
return err
}
ticketID, _ := cmd.Flags().GetString("ticketID")
var ticketState *TicketState
if len(args) == 0 {
ticketState, err = getLastUserTicketState(cmdBuilder, account)
}
if ticketID == "" && len(args) > 0 {
ticketState, err = getTicketState(cmdBuilder, args[0], account)
}
if err != nil {
log.Error(err)
return err
}
if ticketState == nil {
fmt.Fprintf(cmd.OutOrStdout(), fmt.Sprintf("No tickets found for <@%v>", account.CustomId))
return nil
}
if ticketState.MatchID != "" {
payload, _ := json.Marshal(MatchCancelRequest{
MatchID: ticketState.MatchID,
UserID: account.User.Id,
})
log.Infof("%+v\n", string(payload))
result, err := cmdBuilder.nakamaCtx.Client.RpcFunc(cmdBuilder.nakamaCtx.Ctx, &api.Rpc{Id: "MatchCancel", Payload: string(payload)})
if err != nil {
log.Error(err)
return err
}
if result.Payload != "" {
fmt.Fprintf(cmd.OutOrStdout(), MarshalIndent(result.Payload))
}
return nil
}
err = deleteTicketState(cmdBuilder, ticketState.Ticket.Id, ticketState.UserID)
if err != nil {
log.Error(err)
return err
}
payload, _ := json.Marshal(pb.DeleteTicketRequest{
TicketId: ticketState.Ticket.Id,
})
log.Infof("%+v\n", string(payload))
fmt.Fprintf(cmd.OutOrStdout(), fmt.Sprintf("Ticket **%v** was not assigned to any match, just deleting it", ticketState.Ticket.Id))
result, err := cmdBuilder.nakamaCtx.Client.RpcFunc(cmdBuilder.nakamaCtx.Ctx, &api.Rpc{Id: "OpenMatchFrontendTicketDelete", Payload: string(payload)})
if err != nil {
log.Error(err)
return err
}
if result.Payload != "" {
fmt.Fprintf(cmd.OutOrStdout(), MarshalIndent(result.Payload))
}
if err := createOrUpdateLastUserData(cmdBuilder, account, &UserData{
UserID: account.User.Id,
MatchID: PATCH_NULL_VALUE,
TicketID: PATCH_NULL_VALUE,
}); err != nil {
log.Error(err)
return err
}
return nil
},
}
cmd.Flags().StringP("ticketID", "t", "", "usage")
return cmd
}
|
. env/bin/activate
python3 retrain.py \
--image_dir $DERIVED_DATASETS_PATH/transformed_fish_dataset \
--architecture=mobilenet_1.0_224_quantized \
--output_graph=./models/mobilenet_1.0.224_quantized.pb \
--output_labels=./models/mobilenet_labels.txt \
--how_many_training_steps=5000 \
--print_misclassified_test_images
|
package com.androidapp.adapter.entity;
/**
* https://github.com/CymChad/BaseRecyclerViewAdapterHelper
*/
public interface MultiItemEntity {
int getItemType();
}
|
#!/usr/bin/env bash
setup_git() {
git config --global user.email "travis@travis-ci.org"
git config --global user.name "Travis CI - Bot"
}
update_nightly_branch() {
printf "Run nightly update \n"
git remote add origin-nightly https://${GH_TOKEN}@github.com/ashblue/fluid-database.git
git subtree split --prefix Assets/com.fluid.database -b nightly
git push -f origin-nightly nightly:nightly
}
setup_git
update_nightly_branch
|
def minTime(jobs):
# Sort the jobs based on the time limit
sorted_jobs = sorted(jobs, key = lambda x : x["time_limit"])
# Initialize the max time
maxTime = 0
# Loop over the sorted jobs
for job in sorted_jobs:
# Initialize the max time for the job
jobMaxTime = 0
# Get its dependencies
dependencies = job['depends_on']
# Loop over its dependencies
for dep in dependencies:
# Check if the dependecy has been processed
if dep in jobs:
# Add the time it needs to its max time
jobMaxTime = max(jobMaxTime, jobs[dep]['time_limit'])
# Add to the max time the current job needs
jobMaxTime += job["time_limit"]
# Get the max time as the max of current max, and the current job's max time
maxTime = max(maxTime, jobMaxTime)
# Store the job's max time in the job's entry
jobs[job['id']] = { "time_limit": jobMaxTime,
"depends_on": job['depends_on'] }
return maxTime
jobs = [
{id: 1, time_limit: 10, depends_on: []},
{id: 2, time_limit: 5, depends_on: [1]},
{id: 3, time_limit: 6, depends_on: [1,2]},
{id: 4, time_limit: 7, depends_on: [3,2]},
]
print(minTime(jobs))
Output:
21 |
<reponame>lukaszkorecki/dotemacs<gh_stars>1-10
class ClojureLsp < Formula
LSP_VERSION = '2022.05.03-12.35.40'
version LSP_VERSION
url "https://github.com/clojure-lsp/clojure-lsp/releases/download/#{LSP_VERSION}/clojure-lsp-native-macos-amd64.zip"
sha256 "32520aa126b0085a3582cc0f87fb62252146dc3e2a60c0c144b57dd3cc09fcd4"
def install
bin.install "clojure-lsp"
end
end
|
<filename>open-sphere-plugins/open-sensor-hub/src/main/java/io/opensphere/osh/model/OSHDataTypeInfo.java
package io.opensphere.osh.model;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import io.opensphere.core.Toolbox;
import io.opensphere.core.animation.AnimationPlan;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.lang.StringUtilities;
import io.opensphere.mantle.data.impl.DefaultDataTypeInfo;
import io.opensphere.mantle.data.impl.DefaultTimeExtents;
/** OpenSensorHub data type. */
public class OSHDataTypeInfo extends DefaultDataTypeInfo
{
/** The offering. */
private final Offering myOffering;
/** The result template. */
private final Map<String, Output> myResultTemplates = Collections.synchronizedMap(New.map());
/** Whether this data type is for real video. */
private volatile boolean myIsVideo;
/**
* The list of outputs this data type represents.
*/
private final List<Output> myOutputs = Collections.synchronizedList(New.list());
/** The animation plan, if any, created for this layer. */
private volatile AnimationPlan myPlan;
/**
* Constructor.
*
* @param tb the tool box
* @param sourcePrefix the source prefix
* @param url the URL
* @param offering the offering
* @param outputs the output
*/
public OSHDataTypeInfo(Toolbox tb, String sourcePrefix, String url, Offering offering, List<Output> outputs)
{
super(tb, sourcePrefix, getTypeKey(url, offering, outputs.get(0)), "OshKosh B'gosh", getName(offering, outputs.get(0)),
true);
myOffering = offering;
myOutputs.addAll(outputs);
setTimeExtents(new DefaultTimeExtents(offering.getSpan()), this);
setUrl(url);
setDescription(offering.getDescription());
}
@Override
public int hashCode()
{
int result = super.hashCode();
// Don't need other fields because the type key should be unique
return result;
}
@Override
public boolean equals(Object obj)
{
// Don't need other fields because the type key should be unique
return this == obj || super.equals(obj) && getClass() == obj.getClass();
}
/**
* Gets the offering.
*
* @return the offering
*/
public Offering getOffering()
{
return myOffering;
}
/**
* Gets the first output.
*
* @return the output
*/
public Output getOutput()
{
return myOutputs.get(0);
}
/**
* Gets all the outputs that make up this data type.
*
* @return The outputs this data type represents.
*/
public List<Output> getOutputs()
{
return myOutputs;
}
/**
* Gets the resultTemplate.
*
* @param output The output to get the result template for.
* @return the resultTemplate
*/
public Output getResultTemplate(Output output)
{
return myResultTemplates.get(output.getName());
}
/**
* Sets the resultTemplate.
*
* @param output The output the result template is for.
* @param resultTemplate the resultTemplate
*/
public void setResultTemplate(Output output, Output resultTemplate)
{
myResultTemplates.put(output.getName(), resultTemplate);
}
/**
* Gets the isVideo.
*
* @return the isVideo
*/
public boolean isVideo()
{
return myIsVideo;
}
/**
* Sets the isVideo.
*
* @param isVideo the isVideo
*/
public void setVideo(boolean isVideo)
{
myIsVideo = isVideo;
}
/**
* Gets the plan.
*
* @return the plan
*/
public AnimationPlan getPlan()
{
return myPlan;
}
/**
* Sets the plan.
*
* @param plan the plan
*/
public void setPlan(AnimationPlan plan)
{
myPlan = plan;
}
/**
* Gets whether this type is a near real-time streaming layer.
*
* @return whether it's NRT streaming
*/
public boolean isNrtStreaming()
{
return myOffering.getSpan().isUnboundedEnd();
}
/**
* Gets the type key.
*
* @param url the URL
* @param offering the offering
* @param output the output
* @return the type key
*/
private static String getTypeKey(String url, Offering offering, Output output)
{
return StringUtilities.concat(url, "/", offering.getId(), "/", output.getName());
}
/**
* Gets the layer name.
*
* @param offering the offering
* @param output the output
* @return the type key
*/
private static String getName(Offering offering, Output output)
{
return StringUtilities.concat(offering.getName(), " ", output.getName());
}
}
|
#!/bin/bash
if [ -f /etc/redhat-release ]; then
echo ""
elif [ -f /etc/debian_version ]; then
sudo service vsftpd restart
fi
|
<filename>artifacts/rest-webapp/src/test/java/com/springsource/insight/samples/rest/RestfulRepositoryImplTest.java
/**
* Copyright 2009-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.springsource.insight.samples.rest;
import javax.inject.Inject;
import javax.validation.ConstraintViolationException;
import org.hibernate.EmptyInterceptor;
import org.hibernate.FlushMode;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.springframework.orm.hibernate3.SessionFactoryUtils;
import org.springframework.orm.hibernate3.SessionHolder;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.AbstractJUnit4SpringContextTests;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import com.springsource.insight.samples.rest.model.RestfulData;
import com.springsource.insight.samples.rest.model.RestfulRepository;
/**
* @author lgoldstein
*/
@ContextConfiguration(locations={ "classpath:/META-INF/spring/application-context.xml" })
public class RestfulRepositoryImplTest extends AbstractJUnit4SpringContextTests {
public RestfulRepositoryImplTest() {
super();
}
@Inject protected SessionFactory _sessionFactory;
protected SessionFactory getSessionFactory ()
{
return _sessionFactory;
}
private boolean _txParticipating;
protected void manuallyStartDaoSession () {
// simulate open session in view
final SessionFactory sessFac=getSessionFactory();
if (TransactionSynchronizationManager.hasResource(sessFac)) {
// Do not modify the Session: just set the participate flag.
_txParticipating = true;
} else {
// NOTE: the session factory interceptor is overridden by an empty one, because the
// real interceptor may not function correctly in this test-specific setup.
final Session session=
SessionFactoryUtils.getSession(sessFac, EmptyInterceptor.INSTANCE, null);
session.setFlushMode(FlushMode.AUTO);
TransactionSynchronizationManager.bindResource(sessFac, new SessionHolder(session));
logger.info("Started transaction context");
}
}
protected void manuallyEndDaoSession () {
// simulate open session in view
final SessionFactory sessFac=getSessionFactory();
if (!_txParticipating) {
final SessionHolder sessionHolder=
(SessionHolder) TransactionSynchronizationManager.unbindResource(sessFac);
SessionFactoryUtils.releaseSession(sessionHolder.getSession(), sessFac);
logger.info("Ended transaction context");
}
}
@Inject private RestfulRepository _repository;
// define same names as for jUnit 3.x
@Before
public void setUp () {
manuallyStartDaoSession();
_repository.removeAll(); // make sure starting with a clean database
}
@After
public void tearDown () {
_repository.removeAll(); // clean up the database
manuallyEndDaoSession();
}
private static final int TEST_BALANCE=7031965;
@Test
public void testBalanceCreation () {
final RestfulData value=createTestValue("testBalanceCreation", TEST_BALANCE),
item=_repository.getData(value.getId().longValue());
Assert.assertNotNull("No item persisted", item);
Assert.assertEquals("Mismatched persisted data", value, item);
}
@Test
public void testBalanceUpdate () {
final int NEW_BALANCE=1704169;
final RestfulData value=createTestValue("testBalanceUpdate", TEST_BALANCE);
final Long itemId=value.getId();
final RestfulData updated=_repository.setBalance(itemId.longValue(), NEW_BALANCE);
Assert.assertNotNull("No balance updated", updated);
Assert.assertEquals("Mismatched updated balance", NEW_BALANCE, updated.getBalance());
final Long updId=updated.getId();
Assert.assertNotNull("No ID assigned for updated instance", itemId);
Assert.assertEquals("Mismatched updated instance ID", itemId, updId);
}
@Test
public void testValidationFailure () {
final RestfulData value=createTestValue("testBalanceUpdate", TEST_BALANCE);
try {
_repository.setBalance(value.getId().longValue(), Integer.MIN_VALUE);
Assert.fail("Unexpected success of illegal balance value update");
} catch(ConstraintViolationException e) {
// ignored since expected
if (logger.isDebugEnabled()) {
logger.debug("testValidationFailure() ignored exception", e);
}
}
}
private RestfulData createTestValue (final String identifier, final int balance) {
final RestfulData value=_repository.create(balance);
Assert.assertNotNull(identifier + "[No balance created]", value);
Assert.assertEquals(identifier + "[Mismatched created balance]", balance, value.getBalance());
Assert.assertNotNull(identifier + "[No ID assigned for created instance]", value.getId());
return value;
}
}
|
#!/bin/bash
NAME_ROOT=electrum-FJC
export PYTHONDONTWRITEBYTECODE=1
# Let's begin!
set -e
. "$CONTRIB"/build_tools_util.sh
pushd $WINEPREFIX/drive_c/electrum
VERSION="4.1.4"
info "Last commit: $VERSION"
# Load electrum-locale for this release
git submodule update --init
pushd ./contrib/deterministic-build/electrum-locale
if ! which msgfmt > /dev/null 2>&1; then
fail "Please install gettext"
fi
# we want the binary to have only compiled (.mo) locale files; not source (.po) files
rm -rf "$WINEPREFIX/drive_c/electrum/electrum/locale/"
for i in ./locale/*; do
dir="$WINEPREFIX/drive_c/electrum/electrum/$i/LC_MESSAGES"
mkdir -p $dir
msgfmt --output-file="$dir/electrum.mo" "$i/electrum.po" || true
done
popd
find -exec touch -d '2000-11-11T11:11:11+00:00' {} +
popd
# Install frozen dependencies
$WINE_PYTHON -m pip install --no-dependencies --no-warn-script-location \
--cache-dir "$WINE_PIP_CACHE_DIR" -r "$CONTRIB"/deterministic-build/requirements.txt
$WINE_PYTHON -m pip install --no-dependencies --no-warn-script-location \
--cache-dir "$WINE_PIP_CACHE_DIR" -r "$CONTRIB"/deterministic-build/requirements-binaries.txt
$WINE_PYTHON -m pip install --no-dependencies --no-warn-script-location \
--cache-dir "$WINE_PIP_CACHE_DIR" -r "$CONTRIB"/deterministic-build/requirements-hw.txt
pushd $WINEPREFIX/drive_c/electrum
# see https://github.com/pypa/pip/issues/2195 -- pip makes a copy of the entire directory
info "Pip installing Electrum. This might take a long time if the project folder is large."
$WINE_PYTHON -m pip install --no-dependencies --no-warn-script-location .
popd
rm -rf dist/
# build standalone and portable versions
info "Running pyinstaller..."
wine "$WINE_PYHOME/scripts/pyinstaller.exe" --noconfirm --ascii --clean --name $NAME_ROOT-$VERSION -w deterministic.spec
# set timestamps in dist, in order to make the installer reproducible
pushd dist
find -exec touch -d '2000-11-11T11:11:11+00:00' {} +
popd
info "building NSIS installer"
# $VERSION could be passed to the electrum.nsi script, but this would require some rewriting in the script itself.
wine "$WINEPREFIX/drive_c/Program Files (x86)/NSIS/makensis.exe" /DPRODUCT_VERSION=$VERSION electrum.nsi
#cd dist
#mv electrum-setup.exe $NAME_ROOT-$VERSION-setup.exe
#cd ..
info "Padding binaries to 8-byte boundaries, and fixing COFF image checksum in PE header"
# note: 8-byte boundary padding is what osslsigncode uses:
# https://github.com/mtrojnar/osslsigncode/blob/6c8ec4427a0f27c145973450def818e35d4436f6/osslsigncode.c#L3047
(
cd dist
for binary_file in ./*.exe; do
info ">> fixing $binary_file..."
# code based on https://github.com/erocarrera/pefile/blob/bbf28920a71248ed5c656c81e119779c131d9bd4/pefile.py#L5877
python3 <<EOF
pe_file = "$binary_file"
with open(pe_file, "rb") as f:
binary = bytearray(f.read())
pe_offset = int.from_bytes(binary[0x3c:0x3c+4], byteorder="little")
checksum_offset = pe_offset + 88
checksum = 0
# Pad data to 8-byte boundary.
remainder = len(binary) % 8
binary += bytes(8 - remainder)
for i in range(len(binary) // 4):
if i == checksum_offset // 4: # Skip the checksum field
continue
dword = int.from_bytes(binary[i*4:i*4+4], byteorder="little")
checksum = (checksum & 0xffffffff) + dword + (checksum >> 32)
if checksum > 2 ** 32:
checksum = (checksum & 0xffffffff) + (checksum >> 32)
checksum = (checksum & 0xffff) + (checksum >> 16)
checksum = (checksum) + (checksum >> 16)
checksum = checksum & 0xffff
checksum += len(binary)
# Set the checksum
binary[checksum_offset : checksum_offset + 4] = int.to_bytes(checksum, byteorder="little", length=4)
with open(pe_file, "wb") as f:
f.write(binary)
EOF
done
)
sha256sum dist/electrum*.exe
|
<filename>tests/Slack_web_api.js<gh_stars>1-10
var should = require('should');
var Botkit = require('../');
var path = require('path');
var tmpdir = require('os').tmpdir();
var fs = require('fs');
var winston = require('winston');
var token = process.env.TOKEN;
describe('Test', function() {
it('should have a token', function(done) {
should.exist(token);
done();
});
it('should have Botkit instance', function(done) {
should.exist(Botkit);
should.exist(Botkit.core);
should.exist(Botkit.slackbot);
done();
});
});
describe('Botkit', function() {
this.timeout(5000);
it('should start and then stop', function(done) {
var controller = Botkit.slackbot({debug: false});
var openIsCalled = false;
controller.on('rtm_open', function(bot) {
should.exist(bot);
openIsCalled = true;
});
controller.on('rtm_close', function(bot) {
should.exist(bot);
openIsCalled.should.be.true;
controller.shutdown();
done();
});
controller
.spawn({
token: token
})
.startRTM(function(err, bot, payload) {
(err === null).should.be.true;
should.exist(bot);
bot.closeRTM();
});
});
it('should have fail with false token', function(done) {
this.timeout(5000);
var controller = Botkit.slackbot({debug: false});
controller
.spawn({
token: '1234'
})
.startRTM(function(err, bot, payload) {
should.exist(err);
controller.shutdown();
done();
});
});
});
describe('Log', function() {
it('should use an external logging provider', function(done) {
var logFile = path.join(tmpdir, 'botkit.log');
var logger = new winston.Logger({
transports: [
new (winston.transports.File)({ filename: logFile })
]
});
logger.cli();
var controller = Botkit.slackbot({
debug: true,
logger: logger
});
controller
.spawn({
token: '1234'
})
.startRTM(function(err, bot, payload) {
should.exist(err);
controller.shutdown();
fs.readFile(logFile, 'utf8', function(err, res) {
(err === null).should.be.true;
should.exist(res);
done();
});
});
});
});
|
#!/bin/bash
apt install tree
echo "Install dependencies"
pip install -e .
echo "Build windows executable files"
pyinstaller -F dcdownloader/main.py --distpath pyinstaller/dist --specpath pyinstaller/spec --workpath pyinstaller/build
tree
echo "Rename output file"
mv pyinstaller/dist/main.exe pyinstaller/dist/dcdownloader_windows_build${TRAVIS_BUILD_NUMBER}.exe |
/**
* Copyright (C) 2010-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.flyway.core.migration.sql;
import org.junit.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
/**
* Test for SqlScript.
*/
public class SqlScriptSmallTest {
/**
* Class under test.
*/
private SqlScript sqlScript = new SqlScript();
/**
* Input lines.
*/
private List<String> lines = new ArrayList<String>();
@Test
public void stripSqlCommentsNoComment() {
lines.add("select * from table;");
List<String> result = sqlScript.stripSqlComments(lines);
assertEquals("select * from table;", result.get(0));
}
@Test
public void stripSqlCommentsSingleLineComment() {
lines.add("--select * from table;");
List<String> result = sqlScript.stripSqlComments(lines);
assertEquals("", result.get(0));
}
@Test
public void stripSqlCommentsMultiLineCommentSingleLine() {
lines.add("/*comment line*/");
lines.add("select * from table;");
List<String> result = sqlScript.stripSqlComments(lines);
assertEquals("", result.get(0));
assertEquals("select * from table;", result.get(1));
}
@Test
public void stripSqlCommentsMultiLineCommentMultipleLines() {
lines.add("/*comment line");
lines.add("more comment text*/");
List<String> result = sqlScript.stripSqlComments(lines);
assertEquals("", result.get(0));
assertEquals("", result.get(1));
}
@Test
public void linesToStatements() {
lines.add("select col1, col2");
lines.add("from mytable");
lines.add("where col1 > 10;");
List<SqlStatement> sqlStatements = sqlScript.linesToStatements(lines);
assertNotNull(sqlStatements);
assertEquals(1, sqlStatements.size());
SqlStatement sqlStatement = sqlStatements.get(0);
assertEquals(1, sqlStatement.getLineNumber());
assertEquals("select col1, col2\nfrom mytable\nwhere col1 > 10", sqlStatement.getSql());
}
@Test
public void parsePlaceholderComments() {
String source = "${drop_view} \"SOME_VIEW\" IF EXISTS;\n" +"CREATE ${or_replace} VIEW \"SOME_VIEW\";\n";
Map<String, String> placeholders = new HashMap<String, String>();
placeholders.put("drop_view", "--");
placeholders.put("or_replace", "OR REPLACE");
List<SqlStatement> sqlStatements = sqlScript.parse(source, new PlaceholderReplacer(placeholders, "${", "}"));
assertNotNull(sqlStatements);
assertEquals(1, sqlStatements.size());
SqlStatement sqlStatement = sqlStatements.get(0);
assertEquals(2, sqlStatement.getLineNumber());
assertEquals("CREATE OR REPLACE VIEW \"SOME_VIEW\"", sqlStatement.getSql());
}
}
|
<reponame>goldstar0415/Expo-AR-Game
// @flow
import THREE from '../../../universal/THREE';
import CombustableObject from '../core/CombustableObject';
import Factory from '../Factory';
import randomRange from '../utils/randomRange';
class Target extends CombustableObject {
loadAsync = async (scene) => {
global.TargetGeom = global.TargetGeom || new THREE.SphereBufferGeometry(20, 8, 8);
const mesh = new THREE.Mesh(global.TargetGeom.clone(), Factory.shared.materials.red);
this.add(mesh);
this.z = -250;
this.reset();
await super.loadAsync(scene, ['red', 'red']);
};
reset = () => {
super.reset();
this.y = randomRange(150, 310);
this.speed = randomRange(0.001, 0.005);
};
update(delta, time) {
this.rotation.y += 4 * delta;
this.rotation.x += 2 * delta;
this.x = Math.sin(time * this.speed) * 100;
super.update(delta, time);
}
}
export default Target;
|
/**
* @license Copyright (c) 2003-2021, CKSource - <NAME>. All rights reserved.
* For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
/**
* @module html-support/integrations/heading
*/
import { Plugin } from 'ckeditor5/src/core';
import DataSchema from '../dataschema';
/**
* Provides the General HTML Support integration with {@link module:heading/heading~Heading Heading} feature.
*
* @extends module:core/plugin~Plugin
*/
export default class HeadingElementSupport extends Plugin {
/**
* @inheritDoc
*/
static get requires() {
return [ DataSchema ];
}
/**
* @inheritDoc
*/
init() {
const editor = this.editor;
if ( !editor.plugins.has( 'HeadingEditing' ) ) {
return;
}
const dataSchema = editor.plugins.get( DataSchema );
const options = editor.config.get( 'heading.options' );
const headerModels = [];
// We are registering all elements supported by HeadingEditing
// to enable custom attributes for those elements.
for ( const option of options ) {
if ( 'model' in option && 'view' in option ) {
dataSchema.registerBlockElement( {
view: option.view,
model: option.model
} );
headerModels.push( option.model );
}
}
dataSchema.extendBlockElement( {
model: 'htmlHgroup',
modelSchema: {
allowChildren: headerModels
}
} );
}
}
|
#define the class for the Buddy System
class BuddySystem:
def __init__(self, size):
#create a list of blocks
self.blocks = ['X'] * size
#function to determine size of the block
def get_size(self, block_index):
size = 1
while block_index[size - 1] == block_index[size]:
size *= 2
return size
#function to allocate the memory
def allocate(self, size):
#find the first free block
free_block_index = 0
while free_block_index < len(self.blocks):
if self.blocks[free_block_index] == 'X':
break
free_block_index += 1
#check if the block is large enough
size_of_block = self.get_size(free_block_index)
# if the block is larger, split the block and call allocate again
if size_of_block > size:
# mark the current block as used
self.blocks[free_block_index] = 'U'
# set the subsequent block as free
self.blocks[free_block_index + size_of_block // 2] = 'X'
# call allocate again
return self.allocate(size)
else:
# mark the current block as used
self.blocks[free_block_index] = 'U'
# return the index of the block
# to the calling function
return free_block_index
#function to deallocate the memory
def deallocate(self, block_index):
# get the size of the block
size_of_block = self.get_size(block_index)
# check if the preceding block is used
if block_index - size_of_block // 2 >= 0 and self.blocks[block_index - size_of_block // 2] == 'U':
# set the first block as free
self.blocks[block_index - size_of_block // 2] = 'X'
# deallocate the preceding block
self.deallocate(block_index - size_of_block // 2)
# check if the subsequent block is used
if block_index + size_of_block // 2 < len(self.blocks) and self.blocks[block_index + size_of_block // 2] == 'U':
# set the first block as free
self.blocks[block_index] = 'X'
# deallocate the subsequent block
self.deallocate(block_index + size_of_block // 2) |
#!/bin/sh
#
# $PostgreSQL: pgsql/src/bin/ipcclean/ipcclean.sh,v 1.15 2003/11/29 19:52:04 pgsql Exp $
#
CMDNAME=`basename $0`
if [ "$1" = '-?' -o "$1" = "--help" ]; then
echo "$CMDNAME cleans up shared memory and semaphores from aborted PostgreSQL"
echo "backends."
echo
echo "Usage:"
echo " $CMDNAME"
echo
echo "Note: Since the utilities underlying this script are very different"
echo "from platform to platform, chances are that it might not work on"
echo "yours. If that is the case, please write to <pgsql-bugs@postgresql.org>"
echo "so that your platform can be supported in the future."
exit 0
fi
if [ "$USER" = 'root' -o "$LOGNAME" = 'root' ]
then
(
echo "$CMDNAME: cannot be run as root" 1>&2
echo "Please log in (using, e.g., \"su\") as the (unprivileged) user that" 1>&2
echo "owned the server process." 1>&2
) 1>&2
exit 1
fi
EffectiveUser=`id -n -u 2>/dev/null || whoami 2>/dev/null`
#-----------------------------------
# List of platform-specific hacks
# Feel free to add yours here.
#-----------------------------------
#
# This is QNX 4.25
#
if [ `uname` = 'QNX' ]; then
if ps -eA | grep -s '[p]ostmaster' >/dev/null 2>&1 ; then
echo "$CMDNAME: a postmaster is still running" 1>&2
exit 1
fi
rm -f /dev/shmem/PgS*
exit $?
fi
#
# This is based on RedHat 5.2.
#
if [ `uname` = 'Linux' ]; then
did_anything=
if ps x | grep -s '[p]ostmaster' >/dev/null 2>&1 ; then
echo "$CMDNAME: a postmaster is still running" 1>&2
exit 1
fi
# shared memory
for val in `ipcs -m -p | grep '^[0-9]' | awk '{printf "%s:%s:%s\n", $1, $3, $4}'`
do
save_IFS=$IFS
IFS=:
set X $val
shift
IFS=$save_IFS
ipcs_shmid=$1
ipcs_cpid=$2
ipcs_lpid=$3
# Note: We can do -n here, because we know the platform.
echo -n "Shared memory $ipcs_shmid ... "
# Don't do anything if process still running.
# (This check is conceptually phony, but it's
# useful anyway in practice.)
ps hj $ipcs_cpid $ipcs_lpid >/dev/null 2>&1
if [ "$?" -eq 0 ]; then
echo "skipped; process still exists (pid $ipcs_cpid or $ipcs_lpid)."
continue
fi
# try remove
ipcrm shm $ipcs_shmid
if [ "$?" -eq 0 ]; then
did_anything=t
else
exit
fi
done
# semaphores
for val in `ipcs -s -c | grep '^[0-9]' | awk '{printf "%s\n", $1}'`; do
echo -n "Semaphore $val ... "
# try remove
ipcrm sem $val
if [ "$?" -eq 0 ]; then
did_anything=t
else
exit
fi
done
[ -z "$did_anything" ] && echo "$CMDNAME: nothing removed" && exit 1
exit 0
fi # end Linux
# This is the original implementation. It seems to work
# on FreeBSD, SunOS/Solaris, HP-UX, IRIX, and probably
# some others.
ipcs | egrep '^m .*|^s .*' | egrep "$EffectiveUser" | \
awk '{printf "ipcrm -%s %s\n", $1, $2}' '-' | sh
|
#!/bin/bash -e
# Requires 3 successful passing builds before skipping
CLEAN_RUNS=3 make docker-test
|
def median_array(arr):
arr.sort()
n = len(arr)
if n == 0:
return None
if n % 2 == 0:
median_1 = arr[n//2]
median_2 = arr[n//2 - 1]
median = (median_1 + median_2)/2
else:
median = arr[n//2]
return median
arr = [11, 3, 5, 9, 1, 4, 8]
print(median_array(arr))
# Output: 5.5 |
<reponame>alex-dorokhov/gdx-pay
package com.badlogic.gdx.pay.android.googleplay.billing.converter;
import android.os.Bundle;
import com.badlogic.gdx.pay.android.googleplay.GoogleBillingConstants;
import com.badlogic.gdx.pay.android.googleplay.ResponseCode;
import static com.badlogic.gdx.pay.android.googleplay.ResponseCode.BILLING_RESPONSE_RESULT_OK;
class ResponseConverters {
protected static void assertResponseOk(Bundle skuDetailsResponse) {
int response = skuDetailsResponse.getInt(GoogleBillingConstants.RESPONSE_CODE, -1);
ResponseCode responseCode = ResponseCode.findByCode(response);
if (responseCode == null) {
throw new IllegalArgumentException("Bundle is missing key: " + GoogleBillingConstants.RESPONSE_CODE);
}
if (responseCode != BILLING_RESPONSE_RESULT_OK) {
throw new IllegalArgumentException("Unexpected response code: " + responseCode + ", response: " + skuDetailsResponse);
}
}
}
|
#!/bin/sh
#
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#
smoke_script=$1
smoke_user=$2
conf_dir=$3
client_port=$4
security_enabled=$5
kinit_path_local=$6
smoke_user_keytab=$7
export ZOOKEEPER_EXIT_CODE=0
zkhosts=` grep "^server\.[[:digit:]]" $conf_dir/zoo.cfg | cut -f 2 -d '=' | cut -f 1 -d ':' | tr '\n' ' ' `
zk_node1=`echo $zkhosts | tr ' ' '\n' | head -n 1`
echo "zk_node1=$zk_node1"
if [[ $security_enabled == "true" ]]; then
kinitcmd="$kinit_path_local -kt $smoke_user_keytab $smoke_user"
su - $smoke_user -c "$kinitcmd"
fi
# Delete /zk_smoketest znode if exists
su - $smoke_user -c "source $conf_dir/zookeeper-env.sh ; echo delete /zk_smoketest | ${smoke_script} -server $zk_node1:$client_port"
# Create /zk_smoketest znode on one zookeeper server
su - $smoke_user -c "source $conf_dir/zookeeper-env.sh ; echo create /zk_smoketest smoke_data | ${smoke_script} -server $zk_node1:$client_port"
for i in $zkhosts ; do
echo "Running test on host $i"
# Verify the data associated with znode across all the nodes in the zookeeper quorum
su - $smoke_user -c "source $conf_dir/zookeeper-env.sh ; echo 'get /zk_smoketest' | ${smoke_script} -server $i:$client_port"
su - $smoke_user -c "source $conf_dir/zookeeper-env.sh ; echo 'ls /' | ${smoke_script} -server $i:$client_port"
output=$(su - $smoke_user -c "source $conf_dir/zookeeper-env.sh ; echo 'get /zk_smoketest' | ${smoke_script} -server $i:$client_port")
echo $output | grep smoke_data
if [[ $? -ne 0 ]] ; then
echo "Data associated with znode /zk_smoketests is not consistent on host $i"
((ZOOKEEPER_EXIT_CODE=$ZOOKEEPER_EXIT_CODE+1))
fi
done
su - $smoke_user -c "source $conf_dir/zookeeper-env.sh ; echo 'delete /zk_smoketest' | ${smoke_script} -server $zk_node1:$client_port"
if [[ "$ZOOKEEPER_EXIT_CODE" -ne "0" ]] ; then
echo "Zookeeper Smoke Test: Failed"
else
echo "Zookeeper Smoke Test: Passed"
fi
exit $ZOOKEEPER_EXIT_CODE
|
#
# Cookbook:: acme
# Recipe:: default
#
# Copyright:: 2017, The Authors, All Rights Reserved.
chef_dk 'my_chef_dk' do
global_shell_init true
action :install
end
kubectl 'install latest kubectl'
gcloud 'install the lates gcloud'
|
#!/bin/sh
#
# K2HR3 Container Registration Sidecar
#
# Copyright 2019 Yahoo! Japan Corporation.
#
# K2HR3 is K2hdkc based Resource and Roles and policy Rules, gathers
# common management information for the cloud.
# K2HR3 can dynamically manage information as "who", "what", "operate".
# These are stored as roles, resources, policies in K2hdkc, and the
# client system can dynamically read and modify these information.
#
# For the full copyright and license information, please view
# the license file that was distributed with this source code.
#
# AUTHOR: Takeshi Nakatani
# CREATE: Thu Jul 4 2019
# REVISION:
#
########################################################################
# k2hr3-k8s-init.sh
########################################################################
# This shell script is for registration/deletion the container created in
# kubernetes to/from the K2HR3 role member.
# This file is expected to be launched as a Sidecar container.
########################################################################
#
# Environments
#
# This script expects the following environment variables to be set.
# These values are used as elements of CUK data when registering to K2HR3 Role members.
#
# K2HR3_NODE_NAME node name on this container's node(spec.nodeName)
# K2HR3_NODE_IP node host ip address on this container's node(status.hostIP)
# K2HR3_POD_NAME pod name containing this container(metadata.name)
# K2HR3_POD_NAMESPACE pod namespace for this container(metadata.namespace)
# K2HR3_POD_SERVICE_ACCOUNT pod service account for this container(spec.serviceAccountName)
# K2HR3_POD_ID pod id containing this container(metadata.uid)
# K2HR3_POD_IP pod ip address containing this container(status.podIP)
#
# The following values are also added to CUK data.
#
# K2HR3_CONTAINER_ID This value is the <docker id> that this script reads from /proc/<pid>/cgroups.
# (kubernetes uses this <docker id> as the <container id>.)
#
K2HR3_CONTAINER_ID=""
#
# Files on volume disk
#
# This script outputs the following files under the volume disk.
# These file contents can be used when accessing K2HR3.
# It also contains a script for removing containers from K2HR3 role members.
#
# K2HR3_FILE_ROLE yrn full path to the role
# K2HR3_FILE_CUK cuk value for url argument to K2HR3 API(PUT/GET/DELETE/etc)
# K2HR3_FILE_CUKENC urlencoded cuk value
# K2HR3_FILE_APIARG packed cuk argument("extra=...&cuk=value") to K2HR3 API(PUT/GET/DELETE/etc)
# K2HR3_FILE_DEINIT_SH Shell script to delete from K2HR3 role member.
#
K2HR3_FILE_ROLE="k2hr3-role"
K2HR3_FILE_CUK="k2hr3-cuk"
K2HR3_FILE_CUKENC="k2hr3-cukencode"
K2HR3_FILE_APIARG="k2hr3-apiarg"
K2HR3_FILE_DEINIT_SH="k2hr3-k8s-deinit.sh"
#
# Options
#
func_usage()
{
echo ""
echo "Usage: $1 [ -reg | -del ] [options...]"
echo " -reg | -del Specifies the behavior(registration or deletion) of this script."
echo " -rtoken <K2HR3 Role token> The Role token for registration(not be omitted for registration)."
echo " -role <K2HR3 Role YRN path> The YRN full path of the Role to be registered as a member(not be omitted)."
echo " -host <K2HR3 API server> The hostname or IP address of the K2HR3 API server(not be omitted)."
echo " -port <K2HR3 API port> The port number of the K2HR3 API server(443 or 80 is set by default)."
echo " -schema <K2HR3 API schema> The schema(http or https) of the K2HR3 API(\"https\" is set by default)."
echo " -uri <K2HR3 API uri path> The Role member registration/deletion URI path(\"/v1/role\" is set by default)."
echo " -volume <mount path> The path where volume disk was mounted(\"/k2hr3-volume\" is set by default)."
echo ""
echo "Environments"
echo " K2HR3_NODE_NAME node name on this container's node(spec.nodeName)"
echo " K2HR3_NODE_IP node host ip address on this container's node(status.hostIP)"
echo " K2HR3_POD_NAME pod name containing this container(metadata.name)"
echo " K2HR3_POD_NAMESPACE pod namespace for this container(metadata.namespace)"
echo " K2HR3_POD_SERVICE_ACCOUNT pod service account for this container(spec.serviceAccountName)"
echo " K2HR3_POD_ID pod id containing this container(metadata.uid)"
echo " K2HR3_POD_IP pod ip address containing this container(status.podIP)"
echo ""
}
#
# Common
#
PRGNAME=`basename $0`
SRCTOP=`cd $(dirname $0); pwd`
#
# Parse options
#
K2HR3_BEHAVIOR=
K2HR3_ROLE_TOKEN=""
K2HR3_ROLE_YRN=""
K2HR3_API_HOST=""
K2HR3_API_PORT=
K2HR3_API_SCHEMA=""
K2HR3_API_URI=""
K2HR3_VOLUME_PATH=""
while [ $# -ne 0 ]; do
if [ "X$1" = "X" ]; then
break
elif [ "X$1" = "X-h" -o "X$1" = "X-H" -o "X$1" = "X--help" -o "X$1" = "X--HELP" ]; then
func_usage $PRGNAME
exit 0
elif [ "X$1" = "X-reg" -o "X$1" = "X-REG" ]; then
if [ "X${K2HR3_BEHAVIOR}" != "X" ]; then
echo "[ERROR] ${PRGNAME} : already set behavior(registration or deletion)." 1>&2
exit 1
fi
K2HR3_BEHAVIOR="reg"
elif [ "X$1" = "X-del" -o "X$1" = "X-DEL" ]; then
if [ "X${K2HR3_BEHAVIOR}" != "X" ]; then
echo "[ERROR] ${PRGNAME} : already set behavior(registration or deletion)." 1>&2
exit 1
fi
K2HR3_BEHAVIOR="del"
elif [ "X$1" = "X-rtoken" -o "X$1" = "X-RTOKEN" ]; then
if [ "X${K2HR3_ROLE_TOKEN}" != "X" ]; then
echo "[ERROR] ${PRGNAME} : already set role token(${K2HR3_ROLE_TOKEN})." 1>&2
exit 1
fi
shift
if [ $# -eq 0 ]; then
echo "[ERROR] ${PRGNAME} : -rtoken option is specified without parameter." 1>&2
exit 1
fi
K2HR3_ROLE_TOKEN=$1
elif [ "X$1" = "X-role" -o "X$1" = "X-ROLE" ]; then
if [ "X${K2HR3_ROLE_YRN}" != "X" ]; then
echo "[ERROR] ${PRGNAME} : already set role yrn full path(${K2HR3_ROLE_YRN})." 1>&2
exit 1
fi
shift
if [ $# -eq 0 ]; then
echo "[ERROR] ${PRGNAME} : -role option is specified without parameter." 1>&2
exit 1
fi
K2HR3_ROLE_YRN=$1
elif [ "X$1" = "X-host" -o "X$1" = "X-HOST" ]; then
if [ "X${K2HR3_API_HOST}" != "X" ]; then
echo "[ERROR] ${PRGNAME} : already set K2HR3 API server(${K2HR3_API_HOST})." 1>&2
exit 1
fi
shift
if [ $# -eq 0 ]; then
echo "[ERROR] ${PRGNAME} : -host option is specified without parameter." 1>&2
exit 1
fi
K2HR3_API_HOST=$1
elif [ "X$1" = "X-port" -o "X$1" = "X-PORT" ]; then
if [ "X${K2HR3_API_PORT}" != "X" ]; then
echo "[ERROR] ${PRGNAME} : already set K2HR3 API port(${K2HR3_API_PORT})." 1>&2
exit 1
fi
shift
if [ $# -eq 0 ]; then
echo "[ERROR] ${PRGNAME} : -port option is specified without parameter." 1>&2
exit 1
fi
# check number
expr "$1" + 1 >/dev/null 2>&1
if [ $? -ge 2 ]; then
echo "[ERROR] ${PRGNAME} : -port option parameter is not number($1)." 1>&2
exit 1
fi
K2HR3_API_PORT=$1
elif [ "X$1" = "X-schema" -o "X$1" = "X-SCHEMA" ]; then
if [ "X${K2HR3_API_SCHEMA}" != "X" ]; then
echo "[ERROR] ${PRGNAME} : already set K2HR3 API schema(${K2HR3_API_SCHEMA})." 1>&2
exit 1
fi
shift
if [ $# -eq 0 ]; then
echo "[ERROR] ${PRGNAME} : -schema option is specified without parameter." 1>&2
exit 1
fi
if [ "X$1" = "Xhttp" -o "X$1" = "XHTTP" ]; then
K2HR3_API_SCHEMA="http"
elif [ "X$1" = "Xhttps" -o "X$1" = "XHTTPS" ]; then
K2HR3_API_SCHEMA="https"
else
echo "[ERROR] ${PRGNAME} : -schema option parameter is wrong value($1)." 1>&2
exit 1
fi
elif [ "X$1" = "X-uri" -o "X$1" = "X-URI" ]; then
if [ "X${K2HR3_API_URI}" != "X" ]; then
echo "[ERROR] ${PRGNAME} : already set registration/deletion URI path(${K2HR3_API_URI})." 1>&2
exit 1
fi
shift
if [ $# -eq 0 ]; then
echo "[ERROR] ${PRGNAME} : -uri option is specified without parameter." 1>&2
exit 1
fi
K2HR3_API_URI=$1
elif [ "X$1" = "X-volume" -o "X$1" = "X-VOLUME" ]; then
if [ "X${K2HR3_VOLUME_PATH}" != "X" ]; then
echo "[ERROR] ${PRGNAME} : already set volume disk path(${K2HR3_VOLUME_PATH})." 1>&2
exit 1
fi
shift
if [ $# -eq 0 ]; then
echo "[ERROR] ${PRGNAME} : -volume option is specified without parameter." 1>&2
exit 1
fi
K2HR3_VOLUME_PATH=$1
else
echo "[ERROR] ${PRGNAME} : unknown option($1) is specified." 1>&2
exit 1
fi
shift
done
#
# Check options
#
if [ "X${K2HR3_BEHAVIOR}" = "X" ]; then
echo "[ERROR] ${PRGNAME} : Must specify the behavior option of this script: registration(-reg) or deletion(-del)." 1>&2
exit 1
fi
if [ "X${K2HR3_ROLE_TOKEN}" = "X" ]; then
if [ "X${K2HR3_BEHAVIOR}" = "Xreg" ]; then
echo "[ERROR] ${PRGNAME} : -rtoken option is not specified." 1>&2
exit 1
fi
fi
if [ "X${K2HR3_ROLE_YRN}" = "X" ]; then
echo "[ERROR] ${PRGNAME} : -role option is not specified." 1>&2
exit 1
fi
if [ "X${K2HR3_API_HOST}" = "X" ]; then
echo "[ERROR] ${PRGNAME} : -host option is not specified." 1>&2
exit 1
fi
if [ "X${K2HR3_API_PORT}" = "X" -a "X${K2HR3_API_SCHEMA}" = "X" ]; then
K2HR3_API_PORT=443
K2HR3_API_SCHEMA="https"
elif [ "X${K2HR3_API_PORT}" != "X" -a "X${K2HR3_API_SCHEMA}" = "X" ]; then
if [ ${K2HR3_API_PORT} -eq 80 ]; then
K2HR3_API_SCHEMA="http"
else
K2HR3_API_SCHEMA="https"
fi
elif [ "X${K2HR3_API_PORT}" = "X" -a "X${K2HR3_API_SCHEMA}" != "X" ]; then
if [ "X${K2HR3_API_SCHEMA}" = "Xhttp" ]; then
K2HR3_API_PORT=80
else
K2HR3_API_PORT=443
fi
fi
if [ "X${K2HR3_API_URI}" = "X" ]; then
K2HR3_API_URI="/v1/role"
fi
if [ "X${K2HR3_VOLUME_PATH}" = "X" ]; then
K2HR3_VOLUME_PATH="/k2hr3-volume"
fi
if [ "X${K2HR3_BEHAVIOR}" = "Xreg" ]; then
if [ ! -d ${K2HR3_VOLUME_PATH} ]; then
echo "[ERROR] ${PRGNAME} : volume disk(${K2HR3_VOLUME_PATH}) is not found or not directory." 1>&2
exit 1
fi
fi
#
# Processing
#
if [ "X${K2HR3_BEHAVIOR}" = "Xreg" ]; then
#
# Registration
#
#
# Make container id with checking pod id
#
local_proc_ids=`ls -1 /proc/ | grep -E "[0-9]+" 2>/dev/null`
if [ $? -ne 0 ]; then
echo "[ERROR] ${PRGNAME} : Could not find any /proc/<process id> directory." 1>&2
exit 1
fi
local_uid_containerid=""
for local_procid in ${local_proc_ids}; do
if [ ! -f /proc/${local_procid}/cgroup ]; then
continue
fi
local_all_line=`cat /proc/${local_procid}/cgroup`
if [ $? -ne 0 ]; then
continue
fi
for local_line in ${local_all_line}; do
local_uid_containerid=`echo ${local_line} | sed -e 's#.*pod##g' -e 's#\.slice##g' -e 's#\.scope##g' -e 's#docker-##g' 2>/dev/null`
if [ $? -ne 0 ]; then
continue
fi
if [ "X${local_uid_containerid}" != "X" ]; then
break
fi
done
if [ "X${local_uid_containerid}" != "X" ]; then
break
fi
done
if [ "X${local_uid_containerid}" != "X" ]; then
K2HR3_TMP_POD_ID=`echo ${local_uid_containerid} | sed -e 's#/# #g' 2>/dev/null | awk '{print $1}' 2>/dev/null`
K2HR3_CONTAINER_ID=`echo ${local_uid_containerid} | sed -e 's#/# #g' 2>/dev/null | awk '{print $2}' 2>/dev/null`
if [ "X${K2HR3_POD_ID}" = "X" ]; then
K2HR3_POD_ID=${K2HR3_TMP_POD_ID}
else
if [ "X${K2HR3_POD_ID}" != "X${K2HR3_TMP_POD_ID}" ]; then
echo "[WARNING] ${PRGNAME} : Specified pod id(${K2HR3_POD_ID}) is not correct, so that use current pod id(${K2HR3_TMP_POD_ID}) instead of it." 1>&2
K2HR3_POD_ID=${K2HR3_TMP_POD_ID}
fi
fi
fi
if [ -z ${K2HR3_CONTAINER_ID} ]; then
echo "[ERROR] ${PRGNAME} : Could not get container id." 1>&2
exit 1
fi
#
# Check all parameters in environment
#
if [ -z ${K2HR3_NODE_NAME} ]; then
echo "[ERROR] ${PRGNAME} : Environment K2HR3_NODE_NAME is not specified." 1>&2
exit 1
fi
if [ -z ${K2HR3_NODE_IP} ]; then
echo "[ERROR] ${PRGNAME} : Environment K2HR3_NODE_IP is not specified." 1>&2
exit 1
fi
if [ -z ${K2HR3_POD_NAME} ]; then
echo "[ERROR] ${PRGNAME} : Environment K2HR3_POD_NAME is not specified." 1>&2
exit 1
fi
if [ -z ${K2HR3_POD_NAMESPACE} ]; then
echo "[ERROR] ${PRGNAME} : Environment K2HR3_POD_NAMESPACE is not specified." 1>&2
exit 1
fi
if [ -z ${K2HR3_POD_SERVICE_ACCOUNT} ]; then
echo "[ERROR] ${PRGNAME} : Environment K2HR3_POD_SERVICE_ACCOUNT is not specified." 1>&2
exit 1
fi
if [ -z ${K2HR3_POD_ID} ]; then
echo "[ERROR] ${PRGNAME} : Environment K2HR3_POD_ID is not specified." 1>&2
exit 1
fi
if [ -z ${K2HR3_POD_IP} ]; then
echo "[ERROR] ${PRGNAME} : Environment K2HR3_POD_IP is not specified." 1>&2
exit 1
fi
#
# Make CUK parameter
#
# The CUK parameter is a base64 url encoded value from following JSON object string(sorted keys by a-z).
# {
# "k8s_namespace": ${K2HR3_POD_NAMESPACE}
# "k8s_service_account": ${K2HR3_POD_SERVICE_ACCOUNT}
# "k8s_node_name": ${K2HR3_NODE_NAME},
# "k8s_node_ip": ${K2HR3_NODE_IP},
# "k8s_pod_name": ${K2HR3_POD_NAME},
# "k8s_pod_id": ${K2HR3_POD_ID}
# "k8s_pod_ip": ${K2HR3_POD_IP}
# "k8s_container_id": ${K2HR3_CONTAINER_ID}
# "k8s_k2hr3_rand": "random 32 byte value formatted hex string"
# }
#
# Base64 URL encoding converts the following characters.
# '+' to '-'
# '/' to '_'
# '='(end word) to '%3d'
#
K2HR3_REG_RAND=`od -vAn -tx8 -N16 < /dev/urandom 2>/dev/null | tr -d '[:blank:]' 2>/dev/null`
if [ $? -ne 0 ]; then
echo "[ERROR] ${PRGNAME} : Could not make 64 bytes random value for CUK value." 1>&2
exit 1
fi
local_cuk_string="{
\"k8s_container_id\":\"${K2HR3_CONTAINER_ID}\",
\"k8s_k2hr3_rand\":\"${K2HR3_REG_RAND}\",
\"k8s_namespace\":\"${K2HR3_POD_NAMESPACE}\",
\"k8s_node_ip\":\"${K2HR3_NODE_IP}\",
\"k8s_node_name\":\"${K2HR3_NODE_NAME}\",
\"k8s_pod_id\":\"${K2HR3_POD_ID}\",
\"k8s_pod_ip\":\"${K2HR3_POD_IP}\",
\"k8s_pod_name\":\"${K2HR3_POD_NAME}\",
\"k8s_service_account\":\"${K2HR3_POD_SERVICE_ACCOUNT}\"
}"
local_cuk_base64=`echo -n ${local_cuk_string} 2>/dev/null | sed -e 's/ //g' 2>/dev/null | base64 2>/dev/null | tr -d '\n' 2>/dev/null`
if [ $? -ne 0 ]; then
echo "[ERROR] ${PRGNAME} : Could not make base64 string for CUK value." 1>&2
exit 1
fi
local_cuk_base64_urlenc=`echo -n ${local_cuk_base64} 2>/dev/null | sed -e 's/+/-/g' -e 's#/#_#g' -e 's/=/%3d/g' 2>/dev/null`
if [ $? -ne 0 ]; then
echo "[ERROR] ${PRGNAME} : Could not make base64 url encode string for CUK value." 1>&2
exit 1
fi
#
# Make EXTRA parameter
#
# Currently, the value of "extra" is "k8s-auto-v1" only.
#
local_extra_string="k8s-auto-v1"
#
# Call K2HR3 REST API
#
# Example:
# curl -s -S -X PUT -H "x-auth-token: R=<ROLE TOKEN>" "http(s)://<k2hr3 api host>:<port>/<uri>/<role yrn>?extra=k8s-auto-v1&cuk=<cuk parameter>"
#
curl -s -S -X PUT -H "x-auth-token: R=${K2HR3_ROLE_TOKEN}" "${K2HR3_API_SCHEMA}://${K2HR3_API_HOST}:${K2HR3_API_PORT}${K2HR3_API_URI}/${K2HR3_ROLE_YRN}?extra=${local_extra_string}&cuk=${local_cuk_base64_urlenc}"
if [ $? -ne 0 ]; then
echo "[ERROR] ${PRGNAME} : Failed registration to role member." 1>&2
exit 1
fi
#
# Make files in volume disk
#
echo -n "${K2HR3_ROLE_YRN}" > ${K2HR3_VOLUME_PATH}/${K2HR3_FILE_ROLE}
echo -n "${local_cuk_base64}" > ${K2HR3_VOLUME_PATH}/${K2HR3_FILE_CUK}
echo -n "${local_cuk_base64_urlenc}" > ${K2HR3_VOLUME_PATH}/${K2HR3_FILE_CUKENC}
echo -n "cuk=${local_cuk_base64_urlenc}" > ${K2HR3_VOLUME_PATH}/${K2HR3_FILE_APIARG}
cat << EOT > ${K2HR3_VOLUME_PATH}/${K2HR3_FILE_DEINIT_SH}
#!/bin/sh
curl -s -S -X DELETE "${K2HR3_API_SCHEMA}://${K2HR3_API_HOST}:${K2HR3_API_PORT}${K2HR3_API_URI}/${K2HR3_ROLE_YRN}?cuk=${local_cuk_base64_urlenc}"
if [ $? -ne 0 ]; then
echo "[ERROR] Failed deletion from role member." 1>&2
exit 1
fi
exit 0
EOT
chmod 0500 ${K2HR3_VOLUME_PATH}/${K2HR3_FILE_DEINIT_SH}
else
#
# Deletion
#
#
# Call K2HR3 REST API
#
# Example:
# curl -s -S -X DELETE "http(s)://<k2hr3 api host>:<port>/<uri>/<role yrn>?cuk=<cuk parameter>"
#
K2HR3_API_PARAMS=`cat ${K2HR3_VOLUME_PATH}/${K2HR3_FILE_APIARG} 2>/dev/null`
curl -s -S -X DELETE "${K2HR3_API_SCHEMA}://${K2HR3_API_HOST}:${K2HR3_API_PORT}${K2HR3_API_URI}/${K2HR3_ROLE_YRN}?${K2HR3_API_PARAMS}"
if [ $? -ne 0 ]; then
echo "[ERROR] ${PRGNAME} : Failed deletion from role member." 1>&2
exit 1
fi
fi
#
# VIM modelines
#
# vim:set ts=4 fenc=utf-8:
#
|
<filename>node_modules/@carbon/icons/umd/event--schedule/20.js
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global.EventSchedule20 = factory());
}(this, (function () { 'use strict';
var _20 = {
elem: 'svg',
attrs: {
xmlns: 'http://www.w3.org/2000/svg',
viewBox: '0 0 32 32',
width: 20,
height: 20,
},
content: [
{
elem: 'path',
attrs: {
d:
'M21 30a8 8 0 1 1 8-8 8 8 0 0 1-8 8zm0-14a6 6 0 1 0 6 6 6 6 0 0 0-6-6z',
},
},
{
elem: 'path',
attrs: { d: 'M22.59 25L20 22.41V18h2v3.59l2 2L22.59 25z' },
},
{
elem: 'path',
attrs: {
d:
'M28 6a2 2 0 0 0-2-2h-4V2h-2v2h-8V2h-2v2H6a2 2 0 0 0-2 2v20a2 2 0 0 0 2 2h4v-2H6V6h4v2h2V6h8v2h2V6h4v6h2z',
},
},
],
name: 'event--schedule',
size: 20,
};
return _20;
})));
|
import otherpackage.OtherPackagePublicEnum;
public class Main {
/** used by {@link #basisCall} */
static private int basisTestValue = 12;
static public void main(String[] args) throws Exception {
boolean timing = (args.length >= 1) && args[0].equals("--timing");
run(timing);
}
static public void run(boolean timing) {
preTest();
long time0 = System.nanoTime();
int count1 = test1(500);
long time1 = System.nanoTime();
int count2 = test2(500);
long time2 = System.nanoTime();
int count3 = test3(500);
long time3 = System.nanoTime();
int count4 = basis(2000);
long time4 = System.nanoTime();
System.out.println("basis: performed " + count4 + " iterations");
System.out.println("test1: performed " + count1 + " iterations");
System.out.println("test2: performed " + count2 + " iterations");
System.out.println("test3: performed " + count3 + " iterations");
double msec1 = (time1 - time0) / (double) count1 / 1000000;
double msec2 = (time2 - time1) / (double) count2 / 1000000;
double msec3 = (time3 - time2) / (double) count3 / 1000000;
double basisMsec = (time4 - time3) / (double) count4 / 1000000;
double avg = (msec1 + msec2 + msec3) / 3;
if (avg < (basisMsec * 25)) {
System.out.println("Timing is acceptable.");
} else {
System.out.println("Iterations are taking too long!");
timing = true;
}
if (timing) {
System.out.printf("basis time: %.3g msec\n", basisMsec);
System.out.printf("test1: %.3g msec per iteration\n", msec1);
System.out.printf("test2: %.3g msec per iteration\n", msec2);
System.out.printf("test3: %.3g msec per iteration\n", msec3);
}
}
static public void preTest() {
/*
* This is meant to ensure that the basic enum functionality
* really is working.
*/
Class<SamePackagePublicEnum> c = SamePackagePublicEnum.class;
System.out.println(Enum.valueOf(c, "FOUR"));
System.out.println(Enum.valueOf(c, "ONE"));
System.out.println(Enum.valueOf(c, "FOURTEEN"));
System.out.println(Enum.valueOf(c, "NINE"));
System.out.println(Enum.valueOf(c, "FIVE"));
System.out.println(Enum.valueOf(c, "TWELVE"));
System.out.println(Enum.valueOf(c, "ZERO").getClass().getName());
}
static public int basis(int iters) {
/*
* The basis time is the time taken to call a static method
* passing two arguments, which in turn accesses a static
* variable, compares a string, and does a little trivial math
* and a trivial comparison. (That is, this is a mini
* "omnibus" performance metric.) This is clearly going to be
* much faster than Enum.valueOf(), which is why we multiply
* the time before testing.
*/
for (int i = iters; i > 0; i--) {
basisCall(i, "aname");
basisCall(i, "bname");
basisCall(i, "cname");
basisCall(i, "dname");
basisCall(i, "ename");
basisCall(i, "fname");
basisCall(i, "gname");
basisCall(i, "hname");
basisCall(i, "iname");
basisCall(i, "jname");
basisCall(i, "kname");
basisCall(i, "lname");
basisCall(i, "mname");
basisCall(i, "nname");
basisCall(i, "oname");
basisCall(i, "pname");
basisCall(i, "qname");
basisCall(i, "rname");
basisCall(i, "sname");
basisCall(i, "tname");
}
return iters * 20;
}
static public int basisCall(int i, String name) {
int compare = name.compareTo("fuzzbot");
if (i < (basisTestValue * compare)) {
return basisTestValue;
} else {
return i;
}
}
static public int test1(int iters) {
Class<SamePackagePublicEnum> c = SamePackagePublicEnum.class;
for (int i = iters; i > 0; i--) {
Enum.valueOf(c, "ZERO");
Enum.valueOf(c, "ONE");
Enum.valueOf(c, "TWO");
Enum.valueOf(c, "THREE");
Enum.valueOf(c, "FOUR");
Enum.valueOf(c, "FIVE");
Enum.valueOf(c, "SIX");
Enum.valueOf(c, "SEVEN");
Enum.valueOf(c, "EIGHT");
Enum.valueOf(c, "NINE");
Enum.valueOf(c, "TEN");
Enum.valueOf(c, "ELEVEN");
Enum.valueOf(c, "TWELVE");
Enum.valueOf(c, "THIRTEEN");
Enum.valueOf(c, "FOURTEEN");
Enum.valueOf(c, "FIFTEEN");
Enum.valueOf(c, "SIXTEEN");
Enum.valueOf(c, "SEVENTEEN");
Enum.valueOf(c, "EIGHTEEN");
Enum.valueOf(c, "NINETEEN");
}
return iters * 20;
}
static public int test2(int iters) {
Class<SamePackagePrivateEnum> c = SamePackagePrivateEnum.class;
for (int i = iters; i > 0; i--) {
Enum.valueOf(c, "ZERO");
Enum.valueOf(c, "ONE");
Enum.valueOf(c, "TWO");
Enum.valueOf(c, "THREE");
Enum.valueOf(c, "FOUR");
Enum.valueOf(c, "FIVE");
Enum.valueOf(c, "SIX");
Enum.valueOf(c, "SEVEN");
Enum.valueOf(c, "EIGHT");
Enum.valueOf(c, "NINE");
Enum.valueOf(c, "TEN");
Enum.valueOf(c, "ELEVEN");
Enum.valueOf(c, "TWELVE");
Enum.valueOf(c, "THIRTEEN");
Enum.valueOf(c, "FOURTEEN");
Enum.valueOf(c, "FIFTEEN");
Enum.valueOf(c, "SIXTEEN");
Enum.valueOf(c, "SEVENTEEN");
Enum.valueOf(c, "EIGHTEEN");
Enum.valueOf(c, "NINETEEN");
}
return iters * 20;
}
static public int test3(int iters) {
Class<OtherPackagePublicEnum> c = OtherPackagePublicEnum.class;
for (int i = iters; i > 0; i--) {
Enum.valueOf(c, "ZERO");
Enum.valueOf(c, "ONE");
Enum.valueOf(c, "TWO");
Enum.valueOf(c, "THREE");
Enum.valueOf(c, "FOUR");
Enum.valueOf(c, "FIVE");
Enum.valueOf(c, "SIX");
Enum.valueOf(c, "SEVEN");
Enum.valueOf(c, "EIGHT");
Enum.valueOf(c, "NINE");
Enum.valueOf(c, "TEN");
Enum.valueOf(c, "ELEVEN");
Enum.valueOf(c, "TWELVE");
Enum.valueOf(c, "THIRTEEN");
Enum.valueOf(c, "FOURTEEN");
Enum.valueOf(c, "FIFTEEN");
Enum.valueOf(c, "SIXTEEN");
Enum.valueOf(c, "SEVENTEEN");
Enum.valueOf(c, "EIGHTEEN");
Enum.valueOf(c, "NINETEEN");
}
return iters * 20;
}
}
|
#!/bin/bash
# platform = multi_platform_fedora,Red Hat Enterprise Linux 8
file="/etc/ssh/ssh_config.d/02-ospp.conf"
echo -e "Match final all\n\
GSSAPIAuthentication no\n\
Ciphers aes256-ctr,aes256-cbc,aes128-ctr,aes128-cbc\n\
PubkeyAcceptedKeyTypes ssh-rsa,ecdsa-sha2-nistp384,ecdsa-sha2-nistp256\n\
MACs hmac-sha2-512,hmac-sha2-256\n\
KexAlgorithms ecdh-sha2-nistp521,ecdh-sha2-nistp384,ecdh-sha2-nistp256,diffie-hellman-group14-sha1\n" > "$file"
|
import * as jwt from 'jsonwebtoken';
import { Injectable, Logger } from '@nestjs/common';
import { UserService } from '../user/user.service';
import { JwtPayload } from './interfaces/jwt-payload.interface';
import { User } from '../user/entities/user.entity';
import { UserDto } from '../user/dto/user.dto';
import { debug } from 'console';
import { RegistrationStatus } from './interfaces/registration-status.interface';
import { UserCreateDto } from '../user/dto/user-create.dto';
import { TokenResponse } from './interfaces/token-response.interface';
import { toUserDto } from '@user/mapper';
@Injectable()
export class AuthService {
constructor(private readonly usersService: UserService) {}
private readonly logger = new Logger(AuthService.name);
async register(user: UserCreateDto) : Promise<RegistrationStatus>{
let status: RegistrationStatus = {
success: true,
message: 'user register successfully',
};
try {
await this.usersService.create(user);
} catch (err) {
status = { success: false, message: err };
}
return status;
}
createToken(user: User) : TokenResponse{
const expiresIn = 3600;
const accessToken = jwt.sign(
{
id: user.id,
email: user.email,
firstname: user.firstName,
lastname: user.lastName,
},
'Codebrains',
{ expiresIn },
);
const tokenResponse : TokenResponse = {
expiresIn,
accessToken
}
return tokenResponse;
}
async validateUserToken(payload: JwtPayload): Promise<UserDto> {
const user : User = await this.usersService.findById(payload.id)
const userDto : UserDto = toUserDto(user)
return userDto
}
async validateUser(email: string, password: string): Promise<UserDto> {
const user = await this.usersService.findByEmail(email);
if (user && user.comparePassword(password)) {
this.logger.log('password check success');
const { password, ...result } = user;
return result;
}
return null;
}
}
|
<gh_stars>0
import { Schema, model, SchemaTypes } from 'mongoose';
import { Users } from './index';
var friendRelationshipSchema = new Schema({
from: {
type: SchemaTypes.ObjectId,
ref: 'Users',
required: true,
},
to: {
type: SchemaTypes.ObjectId,
ref: 'Users',
required: true,
},
accepted: {
type: Boolean,
default: false,
}
},
{
timestamps: true,
})
// friendRelationshipSchema.post('save', function (doc, next) {
// Users.updateOne({ _id: doc.from }, { $push: { friendRequests: doc._id } }, (err) => {
// if (err)
// next(err);
// });
// Users.updateOne({ _id: doc.to }, { $push: { friendRequests: doc._id } }, (err) => {
// if (err)
// next(err);
// });
// next();
// })
friendRelationshipSchema.post('updateOne', function (doc, next) {
const { from, to } = this._conditions;
Users.updateOne({ _id: from }, { $push: { friends: to } }, (err) => {
if (err)
next(err);
});
Users.updateOne({ _id: to }, { $push: { friends: from } }, (err) => {
if (err)
next(err);
});
next();
})
var FriendRelationships = model('Friend-requests', friendRelationshipSchema)
module.exports = FriendRelationships |
#!/bin/bash
# Copyright (c) 2019 P3TERX
# From https://github.com/P3TERX/Actions-OpenWrt
set -eo pipefail
if [ -z "${OPENWRT_COMPILE_DIR}" ] || [ -z "${OPENWRT_CUR_DIR}" ] || [ -z "${OPENWRT_SOURCE_DIR}" ]; then
echo "::error::'OPENWRT_COMPILE_DIR', 'OPENWRT_CUR_DIR' or 'OPENWRT_SOURCE_DIR' is empty" >&2
exit 1
fi
if [ "x${TEST}" = "x1" ]; then
mkdir -p "${OPENWRT_COMPILE_DIR}/bin/targets/x86/64/packages"
mkdir -p "${OPENWRT_COMPILE_DIR}/bin/packages"
echo "Dummy firmware" > "${OPENWRT_COMPILE_DIR}/bin/targets/x86/64/firmware.bin"
echo "Dummy packages" > "${OPENWRT_COMPILE_DIR}/bin/targets/x86/64/packages/packages.tar.gz"
echo "Dummy packages" > "${OPENWRT_COMPILE_DIR}/bin/packages/packages.tar.gz"
exit 0
fi
compile() {
(
cd "${OPENWRT_CUR_DIR}"
if [ "x${MODE}" = "xm" ]; then
local nthread=$(($(nproc) + 1))
echo "${nthread} thread compile: $*"
make -j${nthread} "$@"
elif [ "x${MODE}" = "xs" ]; then
echo "Fallback to single thread compile: $*"
make -j1 V=s "$@"
else
echo "No MODE specified" >&2
exit 1
fi
)
}
echo "Executing pre_compile.sh"
if [ -f "${BUILDER_PROFILE_DIR}/pre_compile.sh" ]; then
/bin/bash "${BUILDER_PROFILE_DIR}/pre_compile.sh"
fi
echo "Compiling..."
if [ "x${OPT_PACKAGE_ONLY}" != "x1" ]; then
compile
else
compile "package/compile"
compile "package/index"
fi
cd "${OPENWRT_CUR_DIR}"
cp .config bin/
|
import type Player from '@mafia/structures/Player';
import type { Awaited } from '@sapphire/framework';
import type { Message } from 'discord.js';
import { Attack, Defence } from '../managers/NightActionsManager';
import { allRoles } from '../roles';
import type Executioner from '../roles/neutral/Executioner';
import type Faction from './Faction';
const INNOCENT_FACTIONS = ['Town', 'Survivor', 'Jester', 'Amnesiac', '<NAME>', 'Juggernaut', 'Godfather', 'Executioner'];
export interface CanUseActionData {
check: boolean;
reason?: string;
}
abstract class Role {
public name = '';
public description = '';
public modifiers: RoleModifiers = {
voteWeight: 1,
innocence: null,
defence: null,
attack: null
};
public constructor(public player: Player) {}
public get display(): string {
if (this.player.cleaned && !this.player.isAlive) return 'Cleaned';
return this.name;
}
// just for easier accessibility
public get game() {
return this.player.game;
}
public get innocence() {
return INNOCENT_FACTIONS.includes(this.faction.name);
}
public get defence() {
return Defence.None;
}
public get actualDefence() {
return this.modifiers.defence ?? this.defence;
}
// eslint-disable-next-line @typescript-eslint/class-literal-property-style
public get voteWeight() {
return this.modifiers.voteWeight;
}
public init() {
// noop
}
public async onDeath() {
if (this.faction.name !== 'Town' || this.player.deathReason.includes('eliminated')) return;
const executionersInGame = this.game.players.filter((player) => player.isAlive && player.role.name === 'Executioner');
for (const executioner of executionersInGame) {
if (this.player.user.id === (executioner.role as Executioner).target.user.id) {
await executioner.user.send(this.game.t('roles/neutral:executionerTargetNKed'));
const Jester = allRoles.get('Jester')!;
executioner.role = new Jester(executioner);
await executioner.sendPM();
}
}
}
public onNight() {
// noop
}
public onDay() {
// noop
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
public onVisit(visitor: Player) {
// noop
}
public canUseAction(): CanUseActionData {
return { check: false, reason: '' };
}
// Role categories such as Random Town, Neutral Evil
public static categories: string[] = [];
public static aliases: string[] = [];
public static unique = false;
}
interface Role {
onPmCommand(message: Message, command: string, ...args: string[]): void;
onNight(): Awaited<any>;
onDay(): Awaited<any>;
onDeath(): Awaited<any>;
onVisit(visitor: Player): Awaited<any>;
faction: Faction;
}
export interface RoleModifiers {
voteWeight: number;
innocence: boolean | null;
defence: Defence | null;
attack: Attack | null;
}
export default Role;
|
<gh_stars>0
#pragma once
#include <exception>
namespace fastnet::exceptions {
/// <summary>
/// This exception is thrown when a channel was requested that does not exist\n
/// \b Codes -\n
/// 0: Requested bad name\n
/// 1: Requested bad id\n
/// </summary>
class DLL_EXPORT InvalidChannelException : std::exception {
public:
InvalidChannelException(uint64_t code) : std::exception("No channel found matching criteria", code) {}
};
/// <summary>
/// This exception is thrown when an invalid endpoint operation was requested\n
/// \b Codes - \n
/// 0: The endpoint is bad \n
/// </summary>
class DLL_EXPORT InvalidEndPointException : std::exception {
public:
InvalidEndPointException(uint64_t code) : std::exception("The given endpoint is invalid", code) {}
};
/// <summary>
/// This exception is thrown when a peer has an invalid config\n
/// \b Codes -\n
/// 0: No Config\n
/// 1: No Channels\n
/// 2: No Unreliable Channel\n
/// 3: Max Connections < 1\n
/// 4: Send Rate < 1\n
/// </summary>
class DLL_EXPORT InvalidPeerConfigException : std::exception {
public:
InvalidPeerConfigException(uint64_t code) : std::exception("This peer either has none, or an invalid configuration", code) {}
};
}
|
package user
import (
"errors"
"github.com/ungerik/go-start/view"
)
// The confirmation code will be passed in the GET parameter "code"
func EmailConfirmationView(profileURL view.URL) view.View {
return view.DynamicView(
func(ctx *view.Context) (view.View, error) {
confirmationCode, ok := ctx.Request.Params["code"]
if !ok {
return view.DIV("error", view.HTML("Invalid email confirmation code!")), nil
}
userID, email, confirmed, err := ConfirmEmail(confirmationCode)
if !confirmed {
return view.DIV("error", view.HTML("Invalid email confirmation code!")), err
}
LoginID(ctx.Session, userID)
return view.Views{
view.DIV("success", view.Printf("Email address %s confirmed!", email)),
&view.If{
Condition: profileURL != nil,
Content: view.P(
view.HTML("Continue to your "),
view.A(profileURL, "profile..."),
),
},
}, nil
},
)
}
func NewLoginForm(buttonText, class, errorMessageClass, successMessageClass string, redirectURL view.URL) view.View {
return view.DynamicView(
func(ctx *view.Context) (v view.View, err error) {
if from, ok := ctx.Request.Params["from"]; ok {
redirectURL = view.StringURL(from)
}
model := &LoginFormModel{}
if email, ok := ctx.Request.Params["email"]; ok {
model.Email.Set(email)
}
form := &view.Form{
Class: class,
ErrorMessageClass: errorMessageClass,
SuccessMessageClass: successMessageClass,
SuccessMessage: "Login successful",
SubmitButtonText: buttonText,
FormID: "gostart_user_login",
GetModel: view.FormModel(model),
OnSubmit: func(form *view.Form, formModel interface{}, ctx *view.Context) (string, view.URL, error) {
m := formModel.(*LoginFormModel)
ok, err := LoginEmailPassword(ctx.Session, m.Email.Get(), m.Password.Get())
if err != nil {
if view.Config.Debug.Mode {
return "", nil, err
} else {
return "", nil, errors.New("An internal error ocoured")
}
}
if !ok {
return "", nil, errors.New("Wrong email and password combination")
}
return "", redirectURL, nil
},
}
return form, nil
},
)
}
// If redirect is nil, the redirect will go to "/"
func LogoutView(redirect view.URL) view.View {
return view.RenderView(
func(ctx *view.Context) (err error) {
Logout(ctx.Session)
if redirect != nil {
return view.Redirect(redirect.URL(ctx))
}
return view.Redirect("/")
},
)
}
// confirmationPage must have the confirmation code as first URL parameter
func NewSignupForm(buttonText, class, errorMessageClass, successMessageClass string, confirmationURL, redirectURL view.URL) *view.Form {
return &view.Form{
Class: class,
ErrorMessageClass: errorMessageClass,
SuccessMessageClass: successMessageClass,
SuccessMessage: Config.ConfirmationMessage.Sent,
SubmitButtonText: buttonText,
FormID: "gostart_user_signup",
GetModel: func(form *view.Form, ctx *view.Context) (interface{}, error) {
return &EmailPasswordFormModel{}, nil
},
OnSubmit: func(form *view.Form, formModel interface{}, ctx *view.Context) (string, view.URL, error) {
m := formModel.(*EmailPasswordFormModel)
email := m.Email.Get()
password := m.Password1.Get()
var user User
found, err := WithEmail(email, &user)
if err != nil {
return "", nil, err
}
if found {
if user.EmailPasswordConfirmed() {
return "", nil, errors.New("A user with that email and a password already exists")
}
user.Password.SetHashed(password)
} else {
// Config.Collection.InitDocument(&user)
err = user.SetEmailPassword(email, password)
if err != nil {
return "", nil, err
}
}
err = <-user.Email[0].SendConfirmationEmail(ctx, confirmationURL)
if err != nil {
return "", nil, err
}
if found {
err = Config.Collection.UpdateSubDocumentWithID(user.ID, "", &user)
} else {
err = Config.Collection.InitAndSaveDocument(&user)
}
return "", redirectURL, err
},
}
}
|
load test_helpers
setup() {
cd ./tests/completion
cleanup
}
@test "completion: should return completion if no lets.yaml" {
cd ./no_lets_file
cleanup
run lets completion
printf "%s\n" "${lines[@]}"
[[ $status == 0 ]]
[[ "${lines[0]}" == "Generates completion scripts for bash, zsh" ]]
[[ ! -d .lets ]]
}
@test "completion: should return completion if lets.yaml exists" {
run lets completion
printf "%s\n" "${lines[@]}"
[[ $status == 0 ]]
[[ "${lines[0]}" == "Generates completion scripts for bash, zsh" ]]
[[ -d .lets ]]
}
@test "completion: should return list of commands" {
run lets completion --list
printf "%s\n" "${lines[@]}"
[[ $status == 0 ]]
[[ "${lines[0]}" == "bar" ]]
[[ "${lines[1]}" == "foo" ]]
}
@test "completion: should return verbose list of commands" {
run lets completion --list --verbose
printf "%s\n" "${lines[@]}"
[[ $status == 0 ]]
[[ "${lines[0]}" == "bar:Print bar" ]]
[[ "${lines[1]}" == "foo:Print foo" ]]
}
|
import { EnvironmentInterface } from './environment-interface';
export const environment: EnvironmentInterface = {
production: false,
environment: 'WEB',
restInterval: 10000
};
|
var structMPU__Type =
[
[ "CTRL", "structMPU__Type.html#a769178ef949f0d5d8f18ddbd9e4e926f", null ],
[ "RASR", "structMPU__Type.html#a8f00c4a5e31b0a8d103ed3b0732c17a3", null ],
[ "RASR_A1", "structMPU__Type.html#a1658326c6762637eeef8a79bb467445e", null ],
[ "RASR_A2", "structMPU__Type.html#a37131c513d8a8d211b402e5dfda97205", null ],
[ "RASR_A3", "structMPU__Type.html#a7d15172b163797736a6c6b4dcc0fa3dd", null ],
[ "RBAR", "structMPU__Type.html#a990c609b26d990b8ba832b110adfd353", null ],
[ "RBAR_A1", "structMPU__Type.html#af8b510a85b175edfd8dd8cc93e967066", null ],
[ "RBAR_A2", "structMPU__Type.html#a80d534f0dfc080c841e1772c7a68e1a2", null ],
[ "RBAR_A3", "structMPU__Type.html#a207f6e9c3af753367554cc06df300a55", null ],
[ "RNR", "structMPU__Type.html#a2f7a117a12cb661c76edc4765453f05c", null ],
[ "TYPE", "structMPU__Type.html#aba02af87f77577c725cf73879cabb609", null ]
]; |
package cn.icepear.dandelion.upm.biz.service.impl;
import cn.icepear.dandelion.common.core.constant.CommonConstants;
import cn.icepear.dandelion.common.core.utils.R;
import cn.icepear.dandelion.common.security.service.DandelionUser;
import cn.icepear.dandelion.common.security.utils.SecurityUtils;
import cn.icepear.dandelion.upm.api.domain.entity.SysRole;
import cn.icepear.dandelion.upm.api.domain.entity.SysRoleFunction;
import cn.icepear.dandelion.upm.api.domain.entity.SysRoleMenu;
import cn.icepear.dandelion.upm.api.domain.vo.RoleVO;
import cn.icepear.dandelion.upm.biz.mapper.SysRoleFunctionMapper;
import cn.icepear.dandelion.upm.biz.mapper.SysRoleMapper;
import cn.icepear.dandelion.upm.biz.mapper.SysRoleMenuMapper;
import cn.icepear.dandelion.upm.biz.service.SysRoleService;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
/**
* @author rim-wood
* @description 角色菜单管理service实现
* @date Created on 2019-04-18.
*/
@Service
public class SysRoleServiceImpl extends ServiceImpl<SysRoleMapper, SysRole> implements SysRoleService {
@Autowired
private SysRoleMenuMapper sysRoleMenuMapper;
@Autowired
private SysRoleMapper sysRoleMapper;
@Autowired
private SysRoleFunctionMapper sysRoleFunctionMapper;
/**
* 通过用户ID,查询角色信息
*
* @param userId
* @return
*/
@Override
public List<SysRole> listRolesByUserId(Long userId) {
return baseMapper.listRolesByUserId(userId);
}
/**
* 通过角色ID,删除角色,并清空角色菜单缓存
*
* @param id
* @return
*/
@Override
@CacheEvict(value = "menu_details", allEntries = true)
@Transactional(rollbackFor = Exception.class)
public Boolean removeRoleById(Long id) {
sysRoleMenuMapper.delete(Wrappers
.<SysRoleMenu>update().lambda()
.eq(SysRoleMenu::getRoleId, id));
return this.removeById(id);
}
@Override
@Transactional(rollbackFor = Exception.class)
public R<RoleVO> addRoleVO(RoleVO roleVo, Boolean isAdmin) {
RoleVO getRoleVO = sysRoleMapper.getRoleVo(roleVo);
DandelionUser user = SecurityUtils.getUser();
roleVo.setDeptId(user.getGrandparentDeptId().toString());
roleVo.setCreator(user.getRealName());
roleVo.setCreateTime(LocalDateTime.now());
if (getRoleVO == null){
//保存角色对象
sysRoleMapper.saveSysRole(roleVo);
//保存角色对应的菜单列表
List<SysRoleMenu> sysRoleMenus = roleVo.getMenuIdList().stream().map(menuId ->
new SysRoleMenu(roleVo.getRoleId(), menuId)
).collect(Collectors.toList());
sysRoleMenuMapper.saveSysRoleMenu(sysRoleMenus);
//保存角色对应的功能列表
List<SysRoleFunction> sysRoleFunctions = roleVo.getFunctionIdList().stream().map(functionId ->
new SysRoleFunction(roleVo.getRoleId(), functionId)
).collect(Collectors.toList());
sysRoleFunctionMapper.saveSysRoleFunction(sysRoleFunctions);
return new R<>(CommonConstants.SUCCESS,"添加成功",roleVo);
}else {
if (getRoleVO.getDelFlag() == 0){
return new R<>(CommonConstants.SUCCESS,"角色已存在,不允许重复添加",null);
}else{
roleVo.setRoleId(getRoleVO.getRoleId());
return updateRoleVO(roleVo,isAdmin);
}
}
}
@Override
@Transactional(rollbackFor = Exception.class)
@CacheEvict(value = "menu_details", key = "'role-' + #roleVo.getRoleId() + '_menu'")
public R<RoleVO> updateRoleVO(RoleVO roleVo,Boolean isAdmin) {
SysRole sysRole = this.getById(roleVo.getRoleId());
if (!isAdmin && sysRole.getSysDefault().equals(1)){
return new R<>(CommonConstants.FAIL, "权限不够,不可修改或删除系统默认角色", null);
}
DandelionUser user = SecurityUtils.getUser();
roleVo.setUpdateTime(LocalDateTime.now());
roleVo.setUpdator(user.getRealName());
sysRoleMapper.updateRole(roleVo);
//如果菜单列表不为空,则需要更新
if(roleVo.getMenuIdList().size() > 0){
//删除角色菜单信息并添加新的角色菜单信息进行更新
sysRoleMenuMapper.deleteMenuByRoleId(roleVo.getRoleId());
//保存角色对应的菜单列表
List<SysRoleMenu> sysRoleMenus = roleVo.getMenuIdList().stream().map(menuId ->
new SysRoleMenu(roleVo.getRoleId(), menuId)
).collect(Collectors.toList());
sysRoleMenuMapper.saveSysRoleMenu(sysRoleMenus);
}
//如果功能列表不为空,则需要更新
if(roleVo.getFunctionIdList().size() > 0){
//删除角色原有的功能信息并添加新的角色功能信息进行更新
sysRoleFunctionMapper.deleteFunctionByRoleId(roleVo.getRoleId());
//保存角色对应的功能列表
List<SysRoleFunction> sysRoleFunctions = roleVo.getFunctionIdList().stream().map(functionId ->
new SysRoleFunction(roleVo.getRoleId(), functionId)
).collect(Collectors.toList());
sysRoleFunctionMapper.saveSysRoleFunction(sysRoleFunctions);
}
return new R<>(CommonConstants.SUCCESS,"修改成功",roleVo);
}
@Override
public RoleVO getSysRoleById(Long roleId) {
return sysRoleMapper.getSysRoleById(roleId);
}
@Override
public List<SysRole> getAllList(Long deptId) {
return sysRoleMapper.getSysRoleByDeptId(deptId);
}
}
|
<reponame>richardmarston/cim4j
package cim4j;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import cim4j.UnderexcitationLimiterDynamics;
import java.lang.ArrayIndexOutOfBoundsException;
import java.lang.IllegalArgumentException;
import cim4j.Boolean;
import cim4j.ProprietaryParameterDynamics;
/*
Underexcitation limiter function block whose dynamic behaviour is described by
*/
public class UnderexcitationLimiterUserDefined extends UnderexcitationLimiterDynamics
{
private BaseClass[] UnderexcitationLimiterUserDefined_class_attributes;
private BaseClass[] UnderexcitationLimiterUserDefined_primitive_attributes;
private java.lang.String rdfid;
public void setRdfid(java.lang.String id) {
rdfid = id;
}
private abstract interface PrimitiveBuilder {
public abstract BaseClass construct(java.lang.String value);
};
private enum UnderexcitationLimiterUserDefined_primitive_builder implements PrimitiveBuilder {
proprietary(){
public BaseClass construct (java.lang.String value) {
return new Boolean(value);
}
},
LAST_ENUM() {
public BaseClass construct (java.lang.String value) {
return new cim4j.Integer("0");
}
};
}
private enum UnderexcitationLimiterUserDefined_class_attributes_enum {
proprietary,
ProprietaryParameterDynamics,
LAST_ENUM;
}
public UnderexcitationLimiterUserDefined() {
UnderexcitationLimiterUserDefined_primitive_attributes = new BaseClass[UnderexcitationLimiterUserDefined_primitive_builder.values().length];
UnderexcitationLimiterUserDefined_class_attributes = new BaseClass[UnderexcitationLimiterUserDefined_class_attributes_enum.values().length];
}
public void updateAttributeInArray(UnderexcitationLimiterUserDefined_class_attributes_enum attrEnum, BaseClass value) {
try {
UnderexcitationLimiterUserDefined_class_attributes[attrEnum.ordinal()] = value;
}
catch (ArrayIndexOutOfBoundsException aoobe) {
System.out.println("No such attribute: " + attrEnum.name() + ": " + aoobe.getMessage());
}
}
public void updateAttributeInArray(UnderexcitationLimiterUserDefined_primitive_builder attrEnum, BaseClass value) {
try {
UnderexcitationLimiterUserDefined_primitive_attributes[attrEnum.ordinal()] = value;
}
catch (ArrayIndexOutOfBoundsException aoobe) {
System.out.println("No such attribute: " + attrEnum.name() + ": " + aoobe.getMessage());
}
}
public void setAttribute(java.lang.String attrName, BaseClass value) {
try {
UnderexcitationLimiterUserDefined_class_attributes_enum attrEnum = UnderexcitationLimiterUserDefined_class_attributes_enum.valueOf(attrName);
updateAttributeInArray(attrEnum, value);
System.out.println("Updated UnderexcitationLimiterUserDefined, setting " + attrName);
}
catch (IllegalArgumentException iae)
{
super.setAttribute(attrName, value);
}
}
/* If the attribute is a String, it is a primitive and we will make it into a BaseClass */
public void setAttribute(java.lang.String attrName, java.lang.String value) {
try {
UnderexcitationLimiterUserDefined_primitive_builder attrEnum = UnderexcitationLimiterUserDefined_primitive_builder.valueOf(attrName);
updateAttributeInArray(attrEnum, attrEnum.construct(value));
System.out.println("Updated UnderexcitationLimiterUserDefined, setting " + attrName + " to: " + value);
}
catch (IllegalArgumentException iae)
{
super.setAttribute(attrName, value);
}
}
public java.lang.String toString(boolean topClass) {
java.lang.String result = "";
java.lang.String indent = "";
if (topClass) {
for (UnderexcitationLimiterUserDefined_primitive_builder attrEnum: UnderexcitationLimiterUserDefined_primitive_builder.values()) {
BaseClass bc = UnderexcitationLimiterUserDefined_primitive_attributes[attrEnum.ordinal()];
if (bc != null) {
result += " UnderexcitationLimiterUserDefined." + attrEnum.name() + "(" + bc.debugString() + ")" + " " + bc.toString(false) + System.lineSeparator();
}
}
for (UnderexcitationLimiterUserDefined_class_attributes_enum attrEnum: UnderexcitationLimiterUserDefined_class_attributes_enum.values()) {
BaseClass bc = UnderexcitationLimiterUserDefined_class_attributes[attrEnum.ordinal()];
if (bc != null) {
result += " UnderexcitationLimiterUserDefined." + attrEnum.name() + "(" + bc.debugString() + ")" + " " + bc.toString(false) + System.lineSeparator();
}
}
result += super.toString(true);
}
else {
result += "(UnderexcitationLimiterUserDefined) RDFID: " + rdfid;
}
return result;
}
public final java.lang.String debugName = "UnderexcitationLimiterUserDefined";
public java.lang.String debugString()
{
return debugName;
}
public void setValue(java.lang.String s) {
System.out.println(debugString() + " is not sure what to do with " + s);
}
public BaseClass construct() {
return new UnderexcitationLimiterUserDefined();
}
};
|
#!/usr/bin/env sh
# AUTOGENERATED FILE! DO NOT MODIFY
# >>>> PKGS: trash-cli
sudo apt-get install -y \
trash-cli
|
<filename>mitmproxy/net/timeout_helper.py
from time import time
from OpenSSL.SSL import WantWriteError, WantReadError
from mitmproxy.exceptions import TcpTimeout
class TimeoutHelper:
@staticmethod
def wrap_with_timeout(openssl_callable, timeout):
start = time()
while True:
try:
return openssl_callable()
except (WantReadError, WantWriteError):
if timeout is None or time() - start >= timeout:
raise TcpTimeout
|
#!/bin/bash
set -e
echo "Killing any running nodes"
killall node &> /dev/null || true
echo "Building bootstrap util"
go build ./cmd/bootstrap
echo "Building node"
go build ./cmd/node
echo "Building client"
go build ./cmd/client
echo "Removing old bootstrap and rebootstrapping"
rm -Rf bootstrap.d
mkdir -p bootstrap.d
./bootstrap
echo
echo "Starting nodes"
for ii in $(seq 0 3) ; do
echo " starting node $ii"
CONFIG="bootstrap.d/node${ii}/config/node-config.yaml"
RUNDIR="bootstrap.d/node${ii}/run/"
./node --nodeConfig="${CONFIG}" --runDir="${RUNDIR}" --eventLog &> "${RUNDIR}/node.log" &
done
echo
echo To inject some requests into the system run:
echo
echo ./client --clientConfig bootstrap.d/client0/config/client-config.yamlclient-config.yaml
echo
|
<reponame>rsuite/rsuite-icons<filename>src/icons/legacy/Hourglass.tsx
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import HourglassSvg from '@rsuite/icon-font/lib/legacy/Hourglass';
const Hourglass = createSvgIcon({
as: HourglassSvg,
ariaLabel: 'hourglass',
category: 'legacy',
displayName: 'Hourglass'
});
export default Hourglass;
|
<reponame>zhlibing/app_api
package com.zhuanghl.jfinal.interceptor;
import com.jfinal.aop.Interceptor;
import com.jfinal.core.ActionInvocation;
import com.jfinal.core.Controller;
import com.jfinal.upload.UploadFile;
import com.zhuanghl.jfinal.common.bean.BaseResponse;
import com.zhuanghl.jfinal.common.bean.Code;
import com.zhuanghl.jfinal.common.utils.FileUtils;
import com.zhuanghl.jfinal.common.utils.StringUtils;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
/**
* 文件上传总的控制器,所有文件上传类表单均拆分成文件上传和文本提交
*
* @author zhuanghl
*/
public class PublishInterceptor implements Interceptor {
@Override
public void intercept(ActionInvocation ai) {
Controller controller = ai.getController();
try {
List<UploadFile> fileList = controller.getFiles();//已接收到的文件
if (fileList != null && !fileList.isEmpty()) {
List<String> urls = new ArrayList<>();//用于保存上传成功的文件地址
List<String> failedFiles = new ArrayList<String>(); //用于保存未成功上传的文件名
for (UploadFile uploadFile : fileList) {
File file = uploadFile.getFile();
String urlPath = FileUtils.saveUploadFile(file);
if (StringUtils.isEmpty(urlPath)) {
failedFiles.add(uploadFile.getParameterName());//标记为上传失败
} else {
//返回相对路径,用于响应
urls.add(urlPath + file.getName());
}
}
controller.setAttr("success_images_url", urls);
if (failedFiles.size() > 0) {
controller.setAttr("failed_images_url", failedFiles);
}
} else {
controller.setAttr("success_images_url", "");
}
} catch (Exception e) {
e.printStackTrace();
}
String userId = controller.getPara("userId");
if (StringUtils.isEmpty(userId)) {
controller.renderJson(new BaseResponse(Code.ARGUMENT_ERROR, "userId can not be null"));
return;
}
controller.setAttr("userId", userId);
ai.invoke();
}
} |
<gh_stars>1-10
package com.ssafy.sub.repo;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.springframework.data.jpa.repository.support.QuerydslRepositorySupport;
import org.springframework.stereotype.Repository;
import com.ssafy.sub.dto.ContestFeed;
import com.ssafy.sub.dto.QContestFeedLike;
import com.ssafy.sub.dto.QUser;
@Repository
public class ContestFeedQueryDsl extends QuerydslRepositorySupport{
public ContestFeedQueryDsl() {
super(ContestFeed.class);
}
// 여자 좋아요 수
public Long findLikeCountFemale(int fid) {
QContestFeedLike contestFeedLike = QContestFeedLike.contestFeedLike;
QUser user = QUser.user;
return from(contestFeedLike)
.leftJoin(user)
.on(contestFeedLike.contestFeedLikeKey.uid.eq(user.id))
.where(user.usex.eq(2)
.and(contestFeedLike.contestFeedLikeKey.cfid.eq(fid)))
.distinct()
.fetchCount();
}
// 남자 좋아요 수
public Long findLikeCountMale(int fid) {
QContestFeedLike contestFeedLike = QContestFeedLike.contestFeedLike;
QUser user = QUser.user;
return from(contestFeedLike)
.leftJoin(user)
.on(contestFeedLike.contestFeedLikeKey.uid.eq(user.id))
.where(user.usex.eq(1)
.and(contestFeedLike.contestFeedLikeKey.cfid.eq(fid)))
.distinct()
.fetchCount();
}
// 10대 좋아요 수
public Long findLikeAge10(int fid) throws ParseException {
QContestFeedLike contestFeedLike = QContestFeedLike.contestFeedLike;
QUser user = QUser.user;
System.out.println(user.ubirth.after(new Date(2002)));
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
Date date1 = sdf.parse("2002-01-01");
return from(contestFeedLike)
.leftJoin(user)
.on(contestFeedLike.contestFeedLikeKey.uid.eq(user.id))
.where(user.ubirth.after(date1)
.and(contestFeedLike.contestFeedLikeKey.cfid.eq(fid)))
.distinct()
.fetchCount();
}
// 20대 좋아요 수
public Long findLikeAge20(int fid) throws ParseException {
QContestFeedLike contestFeedLike = QContestFeedLike.contestFeedLike;
QUser user = QUser.user;
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
Date date1 = sdf.parse("2001-12-31");
Date date2 = sdf.parse("1992-01-01");
return from(contestFeedLike)
.leftJoin(user)
.on(contestFeedLike.contestFeedLikeKey.uid.eq(user.id))
.where(user.ubirth.after(date2).and(user.ubirth.before(date1))
.and(contestFeedLike.contestFeedLikeKey.cfid.eq(fid)))
.distinct()
.fetchCount();
}
// 30대 좋아요 수
public Long findLikeAge30(int fid) throws ParseException {
QContestFeedLike contestFeedLike = QContestFeedLike.contestFeedLike;
QUser user = QUser.user;
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
Date date1 = sdf.parse("1991-12-31");
Date date2 = sdf.parse("1982-01-01");
return from(contestFeedLike)
.leftJoin(user)
.on(contestFeedLike.contestFeedLikeKey.uid.eq(user.id))
.where(user.ubirth.after(date2).and(user.ubirth.before(date1))
.and(contestFeedLike.contestFeedLikeKey.cfid.eq(fid)))
.distinct()
.fetchCount();
}
// 40대 좋아요 수
public Long findLikeAge40(int fid) throws ParseException {
QContestFeedLike contestFeedLike = QContestFeedLike.contestFeedLike;
QUser user = QUser.user;
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
Date date1 = sdf.parse("1981-12-31");
Date date2 = sdf.parse("1972-01-01");
return from(contestFeedLike)
.leftJoin(user)
.on(contestFeedLike.contestFeedLikeKey.uid.eq(user.id))
.where(user.ubirth.after(date2).and(user.ubirth.before(date1))
.and(contestFeedLike.contestFeedLikeKey.cfid.eq(fid)))
.distinct()
.fetchCount();
}
// 50대 좋아요 수
public Long findLikeAge50(int fid) throws ParseException {
QContestFeedLike contestFeedLike = QContestFeedLike.contestFeedLike;
QUser user = QUser.user;
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
Date date1 = sdf.parse("1971-12-31");
return from(contestFeedLike)
.leftJoin(user)
.on(contestFeedLike.contestFeedLikeKey.uid.eq(user.id))
.where(user.ubirth.before(date1)
.and(contestFeedLike.contestFeedLikeKey.cfid.eq(fid)))
.distinct()
.fetchCount();
}
}
|
package com.katus.data;
import java.io.Serializable;
/**
* @author <NAME>
* @version 1.0, 2021-10-07
*/
public interface Recognizable<ID extends Serializable> {
ID id();
void setId(ID id);
default ID getId() {
return id();
}
}
|
<gh_stars>0
/*
* Copyright 2014-2020 The Ideal Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE file or at
* https://developers.google.com/open-source/licenses/bsd
*/
package ideal.development.types;
import ideal.library.elements.*;
import ideal.runtime.elements.*;
import ideal.runtime.logs.*;
import ideal.development.elements.*;
import ideal.development.flavors.*;
import ideal.development.kinds.*;
import ideal.development.annotations.dont_display;
import javax.annotation.Nullable;
public abstract class base_principal_type extends base_type implements principal_type {
protected @Nullable flavor_profile the_flavor_profile;
protected declaration_pass last_pass;
@dont_display
private @Nullable declaration the_declaration;
protected base_principal_type(@Nullable flavor_profile the_flavor_profile,
declaration_pass last_pass, @Nullable declaration the_declaration) {
this.the_flavor_profile = the_flavor_profile;
this.last_pass = last_pass;
this.the_declaration = the_declaration;
}
@Override
public principal_type principal() {
return this;
}
@Override
public type_flavor get_flavor() {
return flavors.nameonly_flavor;
}
@Override
public boolean has_flavor_profile() {
return the_flavor_profile != null;
}
@Override
public flavor_profile get_flavor_profile() {
if (the_flavor_profile == null) {
// TODO: signal error instead of panicing.
utilities.panic("Unset profile in " + this);
}
assert the_flavor_profile != null;
return the_flavor_profile;
}
@Override
public type get_flavored(type_flavor flavor) {
if (the_flavor_profile == null) {
if (get_kind() == type_kinds.procedure_kind || get_kind() == type_kinds.reference_kind) {
the_flavor_profile = default_flavor_profile();
} else {
utilities.panic("No profile for " + this);
}
}
return do_get_flavored(this, the_flavor_profile.map(flavor));
}
public void set_flavor_profile(flavor_profile the_flavor_profile) {
assert this.the_flavor_profile == null;
for (type_flavor flavor : flavors.all_flavors) {
if (!the_flavor_profile.supports(flavor)) {
if (((type_flavor_impl) flavor).types.contains_key(this)) {
utilities.panic("Already used " + flavor + " of " + this);
}
}
}
this.the_flavor_profile = the_flavor_profile;
}
public declaration_pass get_pass() {
return last_pass;
}
@Override
public final @Nullable declaration get_declaration() {
return the_declaration;
}
public void set_declaration(declaration the_declaration) {
assert this.the_declaration == null : "Already declared " + this;
assert the_declaration != null;
this.the_declaration = the_declaration;
}
public void process_declaration(declaration_pass pass) {
if (pass.is_before(last_pass) || pass == last_pass) {
return;
}
if (last_pass.is_before(declaration_pass.TYPES_AND_PROMOTIONS)) {
do_declare(declaration_pass.TYPES_AND_PROMOTIONS);
}
if (pass == declaration_pass.METHODS_AND_VARIABLES &&
last_pass.is_before(declaration_pass.METHODS_AND_VARIABLES)) {
do_declare(declaration_pass.METHODS_AND_VARIABLES);
}
}
protected final void do_declare(declaration_pass pass) {
assert pass.ordinal() == last_pass.ordinal() + 1;
last_pass = pass;
do_declare_actual(pass);
}
protected void do_declare_actual(declaration_pass pass) {
assert pass != declaration_pass.NONE;
type_declaration_context the_context = get_context();
assert the_context != null;
the_context.declare_type(this, pass);
}
public abstract flavor_profile default_flavor_profile();
@Override
public final string to_string() {
return describe(type_format.FULL);
// return new base_string(describe(type_format.FULL) + "@" + System.identityHashCode(this));
}
}
|
from enum import Enum
class GraphPosition(Enum):
Center = 1
TopLeft = 2
BottomRight = 3
class LabelMode(Enum):
RightOnly = 1
LeftOnly = 2
Both = 3
class GraphDisplay:
def __init__(self, entries, label_text_size, graph_position, label_mode):
self.entries = entries
self.label_text_size = label_text_size
self.graph_position = graph_position
self.label_mode = label_mode
def set_entries(self, entries):
self.entries = entries
def get_entries(self):
return self.entries
def set_label_text_size(self, label_text_size):
self.label_text_size = label_text_size
def get_label_text_size(self):
return self.label_text_size
def set_graph_position(self, graph_position):
self.graph_position = graph_position
def get_graph_position(self):
return self.graph_position
def set_label_mode(self, label_mode):
self.label_mode = label_mode
def get_label_mode(self):
return self.label_mode |
<gh_stars>0
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_night_shelter_outline = void 0;
var ic_night_shelter_outline = {
"viewBox": "0 0 24 24",
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M12,5.5l6,4.5v9H6v-9L12,5.5 M12,3L4,9v12h16V9L12,3L12,3z M15,12h-3.5v3.5H8V11H7v7h1v-1.5h8V18h1v-4 C17,12.9,16.1,12,15,12z M9.75,12.5c-0.69,0-1.25,0.56-1.25,1.25C8.5,14.44,9.06,15,9.75,15S11,14.44,11,13.75 C11,13.06,10.44,12.5,9.75,12.5z"
},
"children": []
}]
};
exports.ic_night_shelter_outline = ic_night_shelter_outline; |
#!/bin/bash
test=$(find . -name "README*")
count=0
for i in $test; do
echo $i
echo $i | rev | cut -d'/' -f1 | rev
#echo " " >> "${i}"
#$(git add $i)
#$(git commit -m "Commit to readme $i")
count=$((count+1))
#$(git push)
done
echo $count
log=$(git log)
echo $log | cut -d'commit' -f1
|
<gh_stars>0
/*
Copyright 2017 <NAME>
*/
#include <condition_variable>
#include <mutex>
namespace thr
{
template<typename C>
class semaphore
{
public:
semaphore();
explicit semaphore(C count);
void acquire();
void release();
private:
std::mutex mMutex;
std::condition_variable mCondVar;
C mCount;
};
}
namespace thr
{
////////////////////////////////////////////////////////////////////////////////
template<typename C>
inline semaphore<C>::semaphore()
: mMutex()
, mCondVar()
, mCount(0)
{
}
////////////////////////////////////////////////////////////////////////////////
template<typename C>
inline semaphore<C>::semaphore(C count)
: mMutex()
, mCondVar()
, mCount(count)
{
}
////////////////////////////////////////////////////////////////////////////////
template<typename C>
inline void semaphore<C>::acquire()
{
std::unique_lock<std::mutex> lock(mMutex);
while (mCount == 0)
{
mCondVar.wait(lock);
}
--mCount;
}
////////////////////////////////////////////////////////////////////////////////
template<typename C>
inline void semaphore<C>::release()
{
std::lock_guard<std::mutex> lock(mMutex);
++mCount;
mCondVar.notify_one();
}
////////////////////////////////////////////////////////////////////////////////
}
|
<filename>Scala/src/main/scala/com/krailis/scala_99_problems/Arithmetic/P37.scala
package com.krailis.scala_99_problems.Arithmetic
import com.krailis.scala_99_problems.Arithmetic.P36.IntPrimeFactorsMultiplicity
import scala.annotation.tailrec
import scala.math.pow
object P37 {
implicit class IntTotientAlternative(n: Int) {
def totient2: Int = {
@tailrec
def computeTotient(x: Double, ps: List[(Int, Int)]): Int = (x, ps) match {
case (phi, Nil) => phi.toInt
case (phi, t :: ts) => computeTotient(phi*(t._1-1)*pow(t._1, t._2-1), ts)
}
computeTotient(1, n.primeFactorsMultiplicity)
}
}
}
|
<gh_stars>0
import { RepositoryModel } from './repository.model';
export interface UserModel {
name: string;
avatarUrl: string;
id: number;
repositories: RepositoryModel[];
}
|
///////////////////////////////////////////////////////////////////////////////
/// \file proto_typeof.hpp
/// Type registrations so that proto1 expression templates can be used together
/// with the Boost.Typeof library.
//
// Copyright 2007 <NAME>. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_XPRESSIVE_PROTO_PROTO_TYPEOF_H
#define BOOST_XPRESSIVE_PROTO_PROTO_TYPEOF_H
// MS compatible compilers support #pragma once
#if defined(_MSC_VER) && (_MSC_VER >= 1020)
# pragma once
#endif
#include <boost/config.hpp>
#include <boost/typeof/typeof.hpp>
#include <boost/xpressive/proto/v1_/proto_fwd.hpp>
#include BOOST_TYPEOF_INCREMENT_REGISTRATION_GROUP()
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::unary_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::binary_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::nary_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::noop_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::unary_plus_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::unary_minus_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::unary_star_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::complement_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::address_of_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::logical_not_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::pre_inc_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::pre_dec_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::post_inc_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::post_dec_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::left_shift_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::right_shift_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::multiply_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::divide_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::modulus_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::add_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::subtract_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::less_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::greater_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::less_equal_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::greater_equal_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::equal_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::not_equal_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::logical_or_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::logical_and_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::bitand_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::bitor_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::bitxor_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::comma_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::mem_ptr_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::assign_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::left_shift_assign_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::right_shift_assign_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::multiply_assign_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::divide_assign_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::modulus_assign_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::add_assign_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::subtract_assign_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::bitand_assign_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::bitor_assign_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::bitxor_assign_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::subscript_tag)
BOOST_TYPEOF_REGISTER_TYPE(boost::proto1::function_tag)
BOOST_TYPEOF_REGISTER_TEMPLATE(boost::proto1::unary_op, (typename)(typename))
BOOST_TYPEOF_REGISTER_TEMPLATE(boost::proto1::binary_op, (typename)(typename)(typename))
#endif
|
function calculatePrintCost(pages, paperSize, inkColor) {
let cost = 0;
// Calculate the cost for paper size
if (paperSize == "A4") {
cost += 0.15;
} else if (paperSize == "A3") {
cost += 0.30;
}
// Calculate the cost for ink color
if (inkColor == "black") {
cost += 0.05;
} else if (inkColor == "color") {
cost += 0.25;
}
// Calculate the cost for number of pages
cost += 0.10 * pages;
return cost;
}
let result = calculatePrintCost(100, "A4", "black");
console.log(result); // Outputs: 17.5 |
import org.apache.jmeter.samplers.AbstractSampler;
import org.apache.jorphan.gui.JLabeledTextField;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.log.Logger;
import org.apache.jmeter.protocol.http.sampler.HTTPSampler;
import org.apache.jmeter.protocol.http.sampler.HTTPSampleResult;
import org.apache.jmeter.protocol.http.sampler.HTTPSamplerBase;
import org.apache.jmeter.testelement.TestStateListener;
import org.apache.jmeter.testelement.property.JMeterProperty;
import org.apache.jmeter.testelement.property.StringProperty;
import org.apache.jmeter.testelement.property.TestElementProperty;
import org.apache.jmeter.util.JMeterUtils;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.FlowLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.IOException;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JPanel;
public class MyCustomSampler extends AbstractSampler implements TestStateListener {
private static final Logger log = LoggingManager.getLoggerForClass();
private static final String SERVER_NAME = "MyCustomSampler.serverName";
private static final String PORT = "MyCustomSampler.port";
private static final String PATH = "MyCustomSampler.path";
private JLabeledTextField serverNameField;
private JLabeledTextField portField;
private JLabeledTextField pathField;
public MyCustomSampler() {
setName("My Custom Sampler");
}
@Override
public void addTestElement(TestElement el) {
if (el instanceof MyCustomSampler) {
configureTestElement(el);
}
}
@Override
public void clear() {
super.clear();
serverNameField.setText("");
portField.setText("");
pathField.setText("");
}
@Override
public void testStarted() {
// Perform any necessary initialization when the test starts
}
@Override
public void testEnded() {
// Perform any necessary cleanup when the test ends
}
@Override
public void testEnded(String host) {
testEnded();
}
@Override
public HTTPSampleResult sample(org.apache.jmeter.samplers.Entry e) {
HTTPSamplerBase sampler = new HTTPSampler();
sampler.setDomain(serverNameField.getText());
sampler.setPort(Integer.parseInt(portField.getText()));
sampler.setPath(pathField.getText());
sampler.setMethod(HTTPSamplerBase.GET);
try {
return sampler.sample(e);
} catch (IOException ex) {
log.error("Failed to sample", ex);
return errorResult(ex, sampler);
}
}
private HTTPSampleResult errorResult(Exception ex, HTTPSamplerBase sampler) {
HTTPSampleResult res = new HTTPSampleResult();
res.setSampleLabel("My Custom Sampler");
res.setResponseCode("500");
res.setResponseMessage("Internal Server Error");
res.setSuccessful(false);
res.setResponseData(ex.getMessage().getBytes());
res.setDataType(HTTPSampleResult.TEXT);
res.setSamplerData(sampler.toString());
return res;
}
@Override
public String getStaticLabel() {
return "My Custom Sampler";
}
@Override
public String getLabelResource() {
return this.getClass().getSimpleName();
}
@Override
public void configure(TestElement el) {
super.configure(el);
serverNameField.setText(el.getPropertyAsString(SERVER_NAME));
portField.setText(el.getPropertyAsString(PORT));
pathField.setText(el.getPropertyAsString(PATH));
}
@Override
public TestElementProperty getProperty(String propName) {
if (propName.equals(SERVER_NAME)) {
return new StringProperty(SERVER_NAME, serverNameField.getText());
} else if (propName.equals(PORT)) {
return new StringProperty(PORT, portField.getText());
} else if (propName.equals(PATH)) {
return new StringProperty(PATH, pathField.getText());
} else {
return super.getProperty(propName);
}
}
@Override
public void add(JPanel panel, GridBagConstraints c) {
super.add(panel, c);
initGui(panel);
}
private void initGui(JPanel panel) {
serverNameField = new JLabeledTextField("Server Name:");
portField = new JLabeledTextField("Port:");
pathField = new JLabeledTextField("Path:");
JPanel paramPanel = new JPanel(new FlowLayout(FlowLayout.LEFT));
paramPanel.add(serverNameField);
paramPanel.add(portField);
paramPanel.add(pathField);
panel.add(paramPanel, BorderLayout.CENTER);
}
} |
<gh_stars>0
#!/usr/bin/env node
var tileReduce = require('tile-reduce')
var path = require('path')
var numFeatures = 0
tileReduce({
bbox: [-117.31, 32.78, -114.06, 29.82],
zoom: 12,
map: path.join(__dirname, '/map.js'),
sources: [
{
name: 'osm',
mbtiles: path.join(__dirname, '../data/osm.mbtiles')
}
]
})
.on('reduce', num => {
numFeatures += num
})
.on('end', () => console.log('count: %d', numFeatures))
|
<reponame>shammishailaj/ghd
package utils
import (
"fmt"
"github.com/couchbase/gocb"
)
type CouchbaseConfig struct {
HostName string // Hostname of the SQL server
Port int64 // Port number of the SQL server
UserName string // Username part of the SQL server credentials
Password string // Password for the a fore-defined UserName
Bucket string // bucket to open
BucketPass string // bucket to open
}
func (c *CouchbaseConfig) Values(hostName string, port int64, userName, password, bucket, bucketPass string) {
c.HostName = hostName
c.Port = port
c.UserName = userName
c.Bucket = bucket
c.Password = password
c.BucketPass = bucketPass
}
func (c *CouchbaseConfig) URL() string {
return fmt.Sprintf("http://%s:%d", c.HostName, c.Port)
}
func (c *CouchbaseConfig) String() string {
return fmt.Sprintf("CouchbaseConfig = %#v", c)
}
func (c *CouchbaseConfig) Connect() (*gocb.Cluster, error) {
cluster, err := gocb.Connect(c.URL())
if err != nil {
fmt.Printf("ERROR CONNECTING TO CLUSTER: %s", err.Error())
}
err = cluster.Authenticate(gocb.PasswordAuthenticator{
Username: c.UserName,
Password: c.Password,
})
if err != nil {
fmt.Printf("ERROR Authentication BUCKET: %s", err.Error())
}
return cluster, err
}
|
class XMLPageGenerator:
def __init__(self):
self.family = 'wikipedia'
self.code = 'en'
self.dry = True
def generate_page(self, title, content):
xml_string = f"<page>\n\t<title>{title}</title>\n\t<content>{content}</content>\n</page>"
return xml_string |
<reponame>lananh265/social-network<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.alignBottom = void 0;
var alignBottom = {
"viewBox": "0 0 8 8",
"children": [{
"name": "path",
"attribs": {
"d": "M.09 0c-.06 0-.09.04-.09.09v4.81c0 .05.04.09.09.09h1.81c.05 0 .09-.04.09-.09v-4.81c0-.06-.04-.09-.09-.09h-1.81zm6 0c-.05 0-.09.04-.09.09v4.81c0 .05.04.09.09.09h1.81c.05 0 .09-.04.09-.09v-4.81c0-.06-.04-.09-.09-.09h-1.81zm-3 2c-.06 0-.09.04-.09.09v2.81c0 .05.04.09.09.09h1.81c.05 0 .09-.04.09-.09v-2.81c0-.06-.04-.09-.09-.09h-1.81zm-3.09 4v1h8v-1h-8z"
}
}]
};
exports.alignBottom = alignBottom; |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package ed.biodare2.backend.features.search.lucene;
import ed.biodare2.backend.features.search.ExperimentVisibility;
import ed.biodare2.backend.features.search.SortOption;
import static ed.biodare2.backend.features.search.lucene.Fields.*;
import static ed.biodare2.backend.features.search.lucene.LuceneConfiguration.configAnalyser;
import ed.biodare2.backend.web.rest.HandlingException;
import ed.biodare2.backend.web.rest.ListWrapper;
import java.util.Optional;
import javax.annotation.PreDestroy;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.apache.lucene.queryparser.classic.MultiFieldQueryParser;
import org.apache.lucene.queryparser.classic.ParseException;
/**
*
* @author tzielins
*/
@Service
public class LuceneExperimentsSearcher implements AutoCloseable {
final Logger log = LoggerFactory.getLogger(this.getClass());
final LuceneSearcher searcher;
final Analyzer analyzer;
final static int MAX_HITS = 10_000;
@Autowired
public LuceneExperimentsSearcher(LuceneSearcher searcher) {
this.searcher = searcher;
this.analyzer = configAnalyser();
}
@Override
@PreDestroy
public void close() throws Exception {
searcher.close();
log.info("Searcher closed");
}
public ListWrapper<Long> findAllVisible(ExperimentVisibility visibility,
SortOption sorting, boolean asc, int pageIndex, int pageSize) {
Query query = new MatchAllDocsQuery();
return find(query, visibility, sorting, asc, pageIndex, pageSize);
}
public ListWrapper<Long> findVisible(String queryString, ExperimentVisibility visibility,
SortOption sorting, boolean asc, int pageIndex, int pageSize) {
Query query = parseQuery(queryString);
// log.info("\nWill searech for:\n{}\n\n",query.toString());
return find(query, visibility, sorting, asc, pageIndex, pageSize);
}
protected ListWrapper<Long> find(Query query, ExperimentVisibility visibility,
SortOption sorting, boolean asc, int pageIndex, int pageSize) {
query = addVisibilityFilter(query, visibility);
Optional<Sort> sort = sortCriteria(sorting, asc);
return searcher.search(query, sort, pageIndex, pageSize);
}
protected Query visibilityFilter(ExperimentVisibility visibility) {
Query personal;
if (visibility.user.isPresent()) {
Term ownerTerm = new Term(OWNER, visibility.user.get());
personal = new TermQuery(ownerTerm);
} else {
personal = new MatchNoDocsQuery();
}
Query isPublic;
if (visibility.showPublic) {
isPublic = new TermQuery(new Term(IS_PUBLIC, ""+true));
} else {
isPublic = new MatchNoDocsQuery();
}
return new BooleanQuery.Builder()
.add(personal, BooleanClause.Occur.SHOULD )
.add(isPublic, BooleanClause.Occur.SHOULD )
.build();
}
Query addVisibilityFilter(Query query, ExperimentVisibility visibility) {
Query visiblityFilter = visibilityFilter(visibility);
return new BooleanQuery.Builder()
.add(query, BooleanClause.Occur.MUST )
.add(visiblityFilter, BooleanClause.Occur.FILTER )
.build();
}
protected Optional<Sort> sortCriteria(SortOption options, boolean asc) {
switch(options) {
case RANK: return Optional.empty();
case ID: return Optional.of(new Sort(new SortField(ID_S, SortField.Type.LONG, !asc)));
case NAME: return Optional.of(new Sort(new SortField(NAME_S, SortField.Type.STRING, !asc)));
case FIRST_AUTHOR: return Optional.of(new Sort(new SortField(FIRST_AUTHOR_S, SortField.Type.STRING, !asc)));
case UPLOAD_DATE: return Optional.of(new Sort(new SortField(UPLOADED_S, SortField.Type.LONG, !asc)));
case MODIFICATION_DATE: return Optional.of(new Sort(new SortField(MODIFIED_S, SortField.Type.LONG, !asc)));
case EXECUTION_DATE: return Optional.of(new Sort(new SortField(EXECUTED_S, SortField.Type.LONG, !asc)));
default: throw new IllegalArgumentException("Unsuported sorting option "+options);
}
}
Query parseQuery(String queryString) {
String[] fields = {NAME, PURPOSE, AUTHORS, WHOLE_CONTENT};
BooleanClause.Occur[] flags = new BooleanClause.Occur[fields.length];
for (int i = 0; i< flags.length; i++) {
flags[i] = BooleanClause.Occur.SHOULD;
}
try {
return MultiFieldQueryParser.parse(queryString, fields, flags, analyzer);
} catch (ParseException e) {
throw new HandlingException("Could not parse query: "+queryString+"; "+e.getMessage(),e);
}
}
}
|
function formatMapMarkers(markers) {
return markers.map(marker => `Marker: ${marker.name}, Location: ${marker.latitude}, ${marker.longitude}`).join('\n');
}
class MapComponent extends React.Component {
render() {
const markers = [
{ name: "Marker1", latitude: 40.7128, longitude: -74.0060 },
{ name: "Marker2", latitude: 34.0522, longitude: -118.2437 },
{ name: "Marker3", latitude: 41.8781, longitude: -87.6298 }
];
const formattedMarkers = formatMapMarkers(markers);
return (
<div className="map-container">
<div className="map-loading">
<img src={mapLoading.default} width="100%" height="100%" />
<div className="map-markers">
<pre>{formattedMarkers}</pre>
</div>
</div>
</div>
);
}
} |
<filename>apps/react-scrappy/src/app/components/translations-list/index.tsx
import React, { useEffect, useState } from 'react';
import { useQuery } from '@apollo/client';
import { ITranslation } from '@ng-scrappy/models';
import TranslationCard from './translation-card';
import Error from './../error';
import { WORDS_LIST_QUERY } from '../../queries/translations.queries';
import { useScroll } from '../../hooks/use-scroll.hook';
import { useHistory } from 'react-router-dom';
export function TranslationList() {
const history = useHistory();
const language = history.location.pathname.split('/')[1];
const limit = 6;
const scrollHeight = useScroll();
const { loading, error, data, fetchMore, networkStatus } = useQuery(
WORDS_LIST_QUERY,
{
variables: { language },
notifyOnNetworkStatusChange: true,
}
);
// Updates cursor for pagination
const [pointer, setCursor] = useState(null);
useEffect(() => {
if (data?.dictionary[0].language === language) {
const last = data.dictionary[data.dictionary.length - 1];
const cursor = last.word ?? null;
const cursorLang = last.language ?? null;
setCursor({ cursor, cursorLang });
} else {
setCursor({ cursor: null, cursorLang: language });
}
return () => setCursor(null);
}, [data, language]);
// Trigger loader animation
const [isRefetching, setLoader] = useState(false);
useEffect(() => {
const isRefetching = networkStatus === 3 || loading;
setLoader(isRefetching);
return () => setLoader(false);
}, [networkStatus, loading]);
// Trigger infinite scroll
useEffect(() => {
const list = document.getElementById('list');
const listHeight = list.clientHeight + list.offsetTop;
const height = scrollHeight.y + window.screenY;
// HACKY: 850 is the approx height of the list with one request-batch
const isInRange = listHeight - height < 850;
const isInitialLoad = height === 0;
// FIX: When user quickly scrolls down, triggering a load, then up, leads to crash
setTimeout(() => {
const loadMoreButton = document.querySelector('#buttonLoadMore');
if (isInitialLoad || (isInRange && loadMoreButton)) {
(loadMoreButton as any).click();
}
}, 200);
}, [scrollHeight]);
if (error) return <Error />;
return (
<div id="list">
{data && pointer?.cursorLang === data.dictionary[0].language
? data.dictionary.map((trl: ITranslation) => (
<TranslationCard key={trl.word} trl={trl} />
))
: [...Array(5)].map((x, i) => <TranslationCard key={i} trl={null} />)}
<div className="row d-flex justify-content-center mb-3">
{isRefetching ? (
<Loader />
) : (
<div
className="btn btn-warning d-none"
id="buttonLoadMore"
onClick={() =>
fetchMore({
variables: {
cursor: pointer?.cursor,
language,
limit,
},
})
}
>
Load More
</div>
)}
</div>
</div>
);
}
const Loader = () => {
return (
<div>
{[...Array(5)].map((x, i) => (
<div
key={i}
className="m-2 spinner-grow spinner-grow-sm text-dark"
role="status"
>
<span className="sr-only">Loading...</span>
</div>
))}
</div>
);
};
|
window.YTD.block.part0 = [
{
"blocking" : {
"accountId" : "1642661971",
"userLink" : "https://twitter.com/intent/user?user_id=1642661971"
}
}
] |
<reponame>AsToNlele/drift-frontend<gh_stars>1-10
import React from 'react';
import PropTypes from 'prop-types';
import { Switch } from '@patternfly/react-core';
function NotificationsSwitch(props) {
const { baselineData, classname, isChecked, handleNotificationToggle, hasLabel } = props;
return (
<Switch
className={ classname }
aria-label='notifications-toggle'
isChecked={ isChecked }
onChange={ () => handleNotificationToggle(baselineData) }
labelOff={ hasLabel ? 'Notifications are disabled' : null }
label={ hasLabel ? 'Notifications are enabled' : null }
isReversed
/>
);
}
NotificationsSwitch.propTypes = {
baselineData: PropTypes.object,
classname: PropTypes.string,
handleNotificationToggle: PropTypes.func,
hasLabel: PropTypes.bool,
isChecked: PropTypes.bool
};
export default NotificationsSwitch;
|
<filename>viper-remote/cache.go
package remote
import "fmt"
func agolloKey(appID, endpoint string) string {
return fmt.Sprintf("%s-%s", appID, endpoint)
}
|
package com.serchinastico.mechrunner.schedule.ui.activity;
/*
* Copyright (C) 2015 <NAME>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import com.melnykov.fab.FloatingActionButton;
import com.serchinastico.mechrunner.R;
import com.serchinastico.mechrunner.schedule.domain.model.Schedule;
import com.serchinastico.mechrunner.schedule.domain.model.Step;
import com.serchinastico.mechrunner.schedule.ui.action.OpenCreateStep;
import com.serchinastico.mechrunner.schedule.ui.action.OpenEditScheduleDetails;
import com.serchinastico.mechrunner.schedule.ui.action.ViewsList;
import com.serchinastico.mechrunner.schedule.ui.presenter.EditSchedulePresenter;
import com.serchinastico.mechrunner.schedule.ui.view.DividerDecoration;
import com.serchinastico.mechrunner.schedule.ui.view.steplist.StepListAdapter;
import butterknife.ButterKnife;
import butterknife.InjectView;
import butterknife.OnClick;
public class EditScheduleActivity extends ActionBarActivity implements EditSchedulePresenter.View {
public static final String SCHEDULE_EXTRA = "schedule";
private static final int EDIT_SCHEDULE = 1;
private static final int GET_NEW_STEP = 2;
private static final int DEFAULT_REPETITIONS_COUNT = -1;
@InjectView(R.id.steps_list)
RecyclerView stepsList;
@InjectView(R.id.add_step_button)
FloatingActionButton addStepButton;
@InjectView(R.id.create_schedule_toolbar)
Toolbar toolbar;
private EditSchedulePresenter presenter;
private RecyclerView.Adapter stepsListAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_create_schedule);
ButterKnife.inject(this);
presenter = new EditSchedulePresenter(this);
presenter.setView(this);
presenter.create();
Schedule schedule = loadSchedule();
presenter.setSchedule(schedule);
configureViews(schedule);
}
private void configureViews(Schedule schedule) {
RecyclerView.LayoutManager layoutManager = new LinearLayoutManager(this);
stepsList.setLayoutManager(layoutManager);
stepsListAdapter = new StepListAdapter(schedule);
stepsList.setAdapter(stepsListAdapter);
stepsList.addItemDecoration(new DividerDecoration());
addStepButton.attachToRecyclerView(stepsList);
toolbar.setTitle(schedule.getName());
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
}
@OnClick(R.id.add_step_button)
void onAddStepButtonClick() {
OpenCreateStep action = new OpenCreateStep(this, getCommonViews());
action.execute(GET_NEW_STEP);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
switch (requestCode) {
case EDIT_SCHEDULE:
handleEditScheduleResult(resultCode, data);
break;
case GET_NEW_STEP:
handleGetNewStepResult(resultCode, data);
break;
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.menu_create_schedule, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.action_settings:
OpenEditScheduleDetails action = new OpenEditScheduleDetails(this, getCommonViews());
action.execute(EDIT_SCHEDULE);
break;
}
return super.onOptionsItemSelected(item);
}
@Override
public void updateSteps() {
stepsListAdapter.notifyDataSetChanged();
}
@Override
public void setTitle(final String title) {
toolbar.setTitle(title);
}
private void handleEditScheduleResult(int resultCode, Intent data) {
if (resultCode == RESULT_OK) {
String name = data.getStringExtra(
EditScheduleDetailsActivity.SCHEDULE_NAME_EXTRA);
int repetitionsCount = data.getIntExtra(
EditScheduleDetailsActivity.SCHEDULE_REPETITIONS_COUNT_EXTRA,
DEFAULT_REPETITIONS_COUNT);
presenter.setScheduleDetails(name, repetitionsCount);
}
}
private void handleGetNewStepResult(int resultCode, Intent data) {
if (resultCode == RESULT_OK) {
Step newStep = (Step) data.getSerializableExtra(CreateStepActivity.NEW_STEP_EXTRA);
presenter.addStep(newStep);
}
}
private Schedule loadSchedule() {
return (Schedule) getIntent().getSerializableExtra(SCHEDULE_EXTRA);
}
private ViewsList getCommonViews() {
ViewsList commonViews = new ViewsList();
commonViews.add(addStepButton);
commonViews.add(toolbar);
return commonViews;
}
}
|
<filename>vit_keras/layers.py
# pylint: disable=arguments-differ,missing-function-docstring,missing-class-docstring,unexpected-keyword-arg,no-value-for-parameter
import tensorflow as tf
import tensorflow_addons as tfa
class ClassToken(tf.keras.layers.Layer):
"""Append a class token to an input layer."""
def build(self, input_shape):
cls_init = tf.zeros_initializer()
self.hidden_size = input_shape[-1]
self.cls = tf.Variable(
name="cls",
initial_value=cls_init(shape=(1, 1, self.hidden_size), dtype="float32"),
trainable=True,
)
def call(self, inputs):
batch_size = tf.shape(inputs)[0]
cls_broadcasted = tf.cast(
tf.broadcast_to(self.cls, [batch_size, 1, self.hidden_size]),
dtype=inputs.dtype,
)
return tf.concat([cls_broadcasted, inputs], 1)
class AddPositionEmbs(tf.keras.layers.Layer):
"""Adds (optionally learned) positional embeddings to the inputs."""
def build(self, input_shape):
assert (
len(input_shape) == 3
), f"Number of dimensions should be 3, got {len(input_shape)}"
self.pe = tf.Variable(
name="pos_embedding",
initial_value=tf.random_normal_initializer(stddev=0.06)(
shape=(1, input_shape[1], input_shape[2])
),
dtype="float32",
trainable=True,
)
def call(self, inputs):
return inputs + tf.cast(self.pe, dtype=inputs.dtype)
class MultiHeadSelfAttention(tf.keras.layers.Layer):
def __init__(self, *args, num_heads, **kwargs):
super().__init__(*args, **kwargs)
self.num_heads = num_heads
def build(self, input_shape):
hidden_size = input_shape[-1]
num_heads = self.num_heads
if hidden_size % num_heads != 0:
raise ValueError(
f"embedding dimension = {hidden_size} should be divisible by number of heads = {num_heads}"
)
self.hidden_size = hidden_size
self.projection_dim = hidden_size // num_heads
self.query_dense = tf.keras.layers.Dense(hidden_size, name="query")
self.key_dense = tf.keras.layers.Dense(hidden_size, name="key")
self.value_dense = tf.keras.layers.Dense(hidden_size, name="value")
self.combine_heads = tf.keras.layers.Dense(hidden_size, name="out")
# pylint: disable=no-self-use
def attention(self, query, key, value):
score = tf.matmul(query, key, transpose_b=True)
dim_key = tf.cast(tf.shape(key)[-1], score.dtype)
scaled_score = score / tf.math.sqrt(dim_key)
weights = tf.nn.softmax(scaled_score, axis=-1)
output = tf.matmul(weights, value)
return output, weights
def separate_heads(self, x, batch_size):
x = tf.reshape(x, (batch_size, -1, self.num_heads, self.projection_dim))
return tf.transpose(x, perm=[0, 2, 1, 3])
def call(self, inputs):
batch_size = tf.shape(inputs)[0]
query = self.query_dense(inputs)
key = self.key_dense(inputs)
value = self.value_dense(inputs)
query = self.separate_heads(query, batch_size)
key = self.separate_heads(key, batch_size)
value = self.separate_heads(value, batch_size)
attention, weights = self.attention(query, key, value)
attention = tf.transpose(attention, perm=[0, 2, 1, 3])
concat_attention = tf.reshape(attention, (batch_size, -1, self.hidden_size))
output = self.combine_heads(concat_attention)
return output, weights
class TransformerBlock(tf.keras.layers.Layer):
"""Implements a Transformer block."""
def __init__(self, *args, num_heads, mlp_dim, dropout, **kwargs):
super().__init__(*args, **kwargs)
self.num_heads = num_heads
self.mlp_dim = mlp_dim
self.dropout = dropout
def build(self, input_shape):
self.att = MultiHeadSelfAttention(
num_heads=self.num_heads,
name="MultiHeadDotProductAttention_1",
)
self.mlpblock = tf.keras.Sequential(
[
tf.keras.layers.Dense(
self.mlp_dim,
activation="linear",
name=f"{self.name}/Dense_0",
),
tf.keras.layers.Lambda(
lambda x: tf.keras.activations.gelu(x, approximate=False)
)
if hasattr(tf.keras.activations, "gelu")
else tf.keras.layers.Lambda(
lambda x: tfa.activations.gelu(x, approximate=False)
),
tf.keras.layers.Dropout(self.dropout),
tf.keras.layers.Dense(input_shape[-1], name=f"{self.name}/Dense_1"),
tf.keras.layers.Dropout(self.dropout),
],
name="MlpBlock_3",
)
self.layernorm1 = tf.keras.layers.LayerNormalization(
epsilon=1e-6, name="LayerNorm_0"
)
self.layernorm2 = tf.keras.layers.LayerNormalization(
epsilon=1e-6, name="LayerNorm_2"
)
self.dropout_layer = tf.keras.layers.Dropout(self.dropout)
def call(self, inputs, training):
x = self.layernorm1(inputs)
x, weights = self.att(x)
x = self.dropout_layer(x, training=training)
x = x + inputs
y = self.layernorm2(x)
y = self.mlpblock(y)
return x + y, weights
def get_config(self):
return {"num_heads": self.num_heads,
"mlp_dim": self.mlp_dim,
"dropout": self.dropout}
|
<reponame>Abhiraj441/zerocord
module.exports = async bot => {
console.log(`${bot.user.username} is online`)
bot.user.setActivity("c?help • " + bot.guilds.size + " servers" , {type: "PLAYING"})
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.