text stringlengths 1 1.05M |
|---|
<reponame>groupon/nakala
/*
Copyright (c) 2013, Groupon, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
Neither the name of GROUPON nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.groupon.nakala.core;
import opennlp.tools.namefind.NameFinderME;
import opennlp.tools.namefind.TokenNameFinderModel;
import opennlp.tools.util.InvalidFormatException;
import opennlp.tools.util.Span;
import org.apache.log4j.Logger;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
/**
* @author <EMAIL>
*/
public final class NameFinderWrapper {
public static final int DATE = 1;
public static final int LOCATION = 2;
public static final int MONEY = 4;
public static final int ORGANIZATION = 8;
public static final int PERCENTAGE = 16;
public static final int PERSON = 32;
private static final Logger logger = Logger.getLogger(NameFinderWrapper.class);
private static String dateModelFile = "/nakala/opennlp_models/en-ner-date.bin";
private static String locationModelFile = "/nakala/opennlp_models/en-ner-location.bin";
private static String moneyModelFile = "/nakala/opennlp_models/en-ner-money.bin";
private static String organizationModelFile = "/nakala/opennlp_models/en-ner-organization.bin";
private static String percentageModelFile = "/nakala/opennlp_models/en-ner-percentage.bin";
private static String personModelFile = "/nakala/opennlp_models/en-ner-person.bin";
private Map<Integer, NameFinderME> ner;
private NameFinderWrapper() {
}
public static NameFinderWrapper getInstance(int types) throws InvalidFormatException, IOException {
NameFinderWrapper n = new NameFinderWrapper();
n.ner = new HashMap<Integer, NameFinderME>();
if ((types & DATE) != 0) {
n.ner.put(DATE, nerFromFile(n.getClass(), dateModelFile));
}
if ((types & LOCATION) != 0) {
n.ner.put(LOCATION, nerFromFile(n.getClass(), locationModelFile));
}
if ((types & MONEY) != 0) {
n.ner.put(MONEY, nerFromFile(n.getClass(), moneyModelFile));
}
if ((types & ORGANIZATION) != 0) {
n.ner.put(ORGANIZATION, nerFromFile(n.getClass(), organizationModelFile));
}
if ((types & PERCENTAGE) != 0) {
n.ner.put(PERCENTAGE, nerFromFile(n.getClass(), percentageModelFile));
}
if ((types & PERSON) != 0) {
n.ner.put(PERSON, nerFromFile(n.getClass(), personModelFile));
}
return n;
}
@SuppressWarnings("rawtypes")
private static NameFinderME nerFromFile(Class c, String modelFile) throws InvalidFormatException, IOException {
InputStream in = c.getResourceAsStream(modelFile);
TokenNameFinderModel model = new TokenNameFinderModel(in);
return new NameFinderME(model);
}
public Span[] find(String[] tokens, int type) {
try {
return ner.get(type).find(tokens);
} catch (NullPointerException e) {
logger.warn("NER model for type " + type + " not loaded.");
return null;
}
}
public static void main(String[] args) {
String[] tokens = {"John", "Jameson", ",", "who", "was", "from",
"Ireland", ",", "was", "a", "great", "bewer"};
try {
NameFinderWrapper nfw = NameFinderWrapper.getInstance(PERSON | LOCATION);
Span[] spans = nfw.find(tokens, PERSON);
for (String token : tokens) {
System.out.println(token);
}
System.out.println("Person:");
for (Span span : spans) {
System.out.println(span);
}
spans = nfw.find(tokens, LOCATION);
System.out.println("\nLocation:");
for (Span span : spans) {
System.out.println(span);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from os import path
HERE = path.dirname(path.abspath(__file__))
CHECK_NAME = 'cassandra_nodetool'
CASSANDRA_CONTAINER_NAME = 'dd-test-cassandra'
CASSANDRA_CONTAINER_NAME_2 = 'dd-test-cassandra2'
CONFIG_INSTANCE = {
'nodetool': 'docker exec {} nodetool'.format(CASSANDRA_CONTAINER_NAME),
'keyspaces': ['system', 'test'],
'username': 'controlRole',
'password': '<PASSWORD>',
'tags': ['foo', 'bar']
}
PORT = "7199"
|
def convert_currency(amount, source_currency, target_currency):
conversion_rates = {
"EUR": 1.18,
"GBP": 0.72,
"JPY": 110.25
}
if source_currency.upper() == "USD" and target_currency.upper() in conversion_rates:
converted = amount * conversion_rates[target_currency.upper()]
return converted
elif target_currency.upper() == "USD" and source_currency.upper() in conversion_rates:
converted = amount / conversion_rates[source_currency.upper()]
return converted
else:
return "Currency not supported" |
#!/usr/bin/env bash
oc start-build hello-quarkus --from-dir=. --follow |
<gh_stars>0
$(document).ready(function(){
// PhoneGap Device event
document.addEventListener("deviceready", onDeviceReady, false);
});
function onDeviceReady(){
$.ajaxSetup({ cache: true });
$.getScript('//connect.facebook.net/en_US/sdk.js', function(){
FB.init({
appId: '213414532382126',
version: 'v2.5' // or v2.0, v2.1, v2.2, v2.3
});
$('#loginbutton,#feedbutton').removeAttr('disabled');
FB.getLoginStatus(updateStatusCallback);
});
// Login Button handler
$('#fb_login').click(function(){
login();
});
$('#fb_logout').click(function(){
logout();
});
$('#fb_form').click(function(e){
e.preventDefault();
var post = $('#post').val();
submitPost(post);
});
}
function updateStatusCallback(response){
if (response.status === 'connected') {
localStorage.setItem("is_logged_in", "1");
} else if (response.status === 'not_authorized') {
localStorage.setItem("is_logged_in", "0");
} else {
localStorage.setItem("is_logged_in", "0");
}
refresh();
}
function login(){
FB.login(function(response) {
if (response.status === 'connected') {
localStorage.setItem("is_logged_in", "1");
refresh();
}
}, {scope: 'email, public_profile, user_posts, publish_actions, user_photos'});
}
function refresh(){
if (localStorage.is_logged_in == "1") {
getPicture();
getPosts();
$('#fb_login').hide();
$('#fb_logout').show();
$('#fb_form').show();
$('.posts').show();
} else {
removePicture();
$('#fb_logout').hide();
$('#fb_login').show();
$('#fb_form').hide();
$('.posts').hide();
}
}
function getPicture(){
FB.api('/me/picture', function(response){
$('#pic').attr('src', response.data.url);
});
}
function logout(){
FB.logout(function(response){
localStorage.setItem("is_logged_in", "0");
refresh();
});
}
function removePicture(){
$('#pic').attr('src', 'img/logo.png');
}
function getPosts(){
FB.api('/me/feed', function(response){
if (!response || response.error) {
console.log('Error occurred');
} else {
console.log(response.data);
for (i = 0; i < response.data.length; i++) {
if (response.data[i].status_type == 'mobile_status_update') {
var post_html =
'<div class="post"' +
'<p>' + response.data[i].message + '</p>'+
'p><em>Posted: ' + response.data[i].created_time + '</em></p>' +
'</div>';
$('#fb_posts').append(post_html);
}
}
}
});
}
function submitPost(post) {
FB.api('me/feed', 'post', {message: post}, function(response){
if (!response || response.error) {
console.log('Error occurred');
} else {
console.log(response.data);
var post_html = '<div class="post">' +
'<p>' + post + '</p>' +
'</div>';
$('#fb_posts').prepend(post_html);
}
});
} |
/*
* File: Initialize.h
* Author: <NAME>
*
* Created on 22. April 2019, 13:44
*/
#ifndef INITIALIZE_H
#define INITIALIZE_H
//******************************************************************************
// Functions
//******************************************************************************
void Hardware_Init(void);
void INIT_OSC (void);
//******************************************************************************
// Global variables
//******************************************************************************
#endif //INITIALIZE_H |
//
// Target_KRLogin.h
// KRLogin
//
// Created by LX on 2017/12/15.
// Copyright © 2017年 Ace. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface Target_KRLogin : NSObject
- (UIViewController *)Action_LoginViewController:(NSDictionary *)params;
@end
|
#! /bin/bash
if [ ! -d "virtualenv_py2" ]; then
echo "Creating virtual environment virtualenv_py2"
virtualenv --python=/usr/bin/python2.7 virtualenv_py2
source virtualenv_py2/bin/activate
pip install -r requirements_py2.txt
fi
# Enter the python virtual enviro on the current shell
echo "Entering virtual environment virtualenv_py2"
bash --rcfile <(echo '. virtualenv_py2/bin/activate')
# use echo $BASHPID to check the bash prompt process id
|
#!/bin/sh
set -e
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
> "$RESOURCES_TO_COPY"
XCASSET_FILES=()
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
case "${TARGETED_DEVICE_FAMILY}" in
1,2)
TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
;;
1)
TARGET_DEVICE_ARGS="--target-device iphone"
;;
2)
TARGET_DEVICE_ARGS="--target-device ipad"
;;
3)
TARGET_DEVICE_ARGS="--target-device tv"
;;
4)
TARGET_DEVICE_ARGS="--target-device watch"
;;
*)
TARGET_DEVICE_ARGS="--target-device mac"
;;
esac
install_resource()
{
if [[ "$1" = /* ]] ; then
RESOURCE_PATH="$1"
else
RESOURCE_PATH="${PODS_ROOT}/$1"
fi
if [[ ! -e "$RESOURCE_PATH" ]] ; then
cat << EOM
error: Resource "$RESOURCE_PATH" not found. Run 'pod install' to update the copy resources script.
EOM
exit 1
fi
case $RESOURCE_PATH in
*.storyboard)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.xib)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.framework)
echo "mkdir -p ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
mkdir -p "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" $RESOURCE_PATH ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
;;
*.xcdatamodel)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH"`.mom\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodel`.mom"
;;
*.xcdatamodeld)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd"
;;
*.xcmappingmodel)
echo "xcrun mapc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm\"" || true
xcrun mapc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm"
;;
*.xcassets)
ABSOLUTE_XCASSET_FILE="$RESOURCE_PATH"
XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
;;
*)
echo "$RESOURCE_PATH" || true
echo "$RESOURCE_PATH" >> "$RESOURCES_TO_COPY"
;;
esac
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/FirebaseUI/FirebaseAuthUI.bundle"
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/FirebaseUI/FirebaseFacebookAuthUI.bundle"
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/FirebaseUI/FirebaseGoogleAuthUI.bundle"
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/FirebaseUI/FirebasePhoneAuthUI.bundle"
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/FirebaseUI/FirebaseTwitterAuthUI.bundle"
install_resource "${PODS_ROOT}/GoogleSignIn/Resources/GoogleSignIn.bundle"
install_resource "${PODS_ROOT}/TwitterKit/iOS/TwitterKit.framework/TwitterKitResources.bundle"
install_resource "${PODS_ROOT}/TwitterKit/iOS/TwitterKit.framework/TwitterShareExtensionUIResources.bundle"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/FirebaseUI/FirebaseAuthUI.bundle"
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/FirebaseUI/FirebaseFacebookAuthUI.bundle"
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/FirebaseUI/FirebaseGoogleAuthUI.bundle"
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/FirebaseUI/FirebasePhoneAuthUI.bundle"
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/FirebaseUI/FirebaseTwitterAuthUI.bundle"
install_resource "${PODS_ROOT}/GoogleSignIn/Resources/GoogleSignIn.bundle"
install_resource "${PODS_ROOT}/TwitterKit/iOS/TwitterKit.framework/TwitterKitResources.bundle"
install_resource "${PODS_ROOT}/TwitterKit/iOS/TwitterKit.framework/TwitterShareExtensionUIResources.bundle"
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
rm -f "$RESOURCES_TO_COPY"
if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "$XCASSET_FILES" ]
then
# Find all other xcassets (this unfortunately includes those of path pods and other targets).
OTHER_XCASSETS=$(find "$PWD" -iname "*.xcassets" -type d)
while read line; do
if [[ $line != "${PODS_ROOT}*" ]]; then
XCASSET_FILES+=("$line")
fi
done <<<"$OTHER_XCASSETS"
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
|
WGET_ARGS=( https://download.kde.org/stable/applications/18.04.3/ -A '*.tar.xz' )
|
from datetime import datetime
def get_day_of_week(dates):
day_of_week_list = []
for date in dates:
year, month, day = map(int, date.split('-'))
day_of_week = datetime(year, month, day).strftime('%a')
day_of_week_list.append(day_of_week)
return day_of_week_list
# Test the function
input_dates = ["2022-01-15", "2022-01-16", "2022-01-17"]
output_days = get_day_of_week(input_dates)
print(output_days) # Output: ['Sat', 'Sun', 'Mon'] |
const path = require('path');
module.exports.Generator = class Generator {
getName() {
// this is the name your generator will appear in the list under
return 'Serverless application';
}
async getQuestions() {
const projectCode = path.basename(path.dirname(process.cwd()));
// see inquirer docs to get more information on the format of questions
// https://www.npmjs.com/package/inquirer#questions
return [
{
message: 'Application code',
name: 'application_code',
},
{
message: 'Add contact form?',
name: 'use_contact_form',
type: 'confirm',
default: false,
},
{
message: 'Project name',
name: 'project_name',
},
{
message: 'Function name',
name: 'function_name',
},
{
message: 'Project domain',
name: 'project_domain',
when: (answers) => answers.use_contact_form,
},
{
message: 'GitHub account name',
name: 'github_account_name',
default: 'gannochenko',
},
{
message: 'GitHub repository name',
name: 'github_repository_name',
default: () => {
return projectCode;
},
},
{
message: 'Author name',
name: 'author_name',
default: '<NAME>',
},
{
message: 'Author email',
name: 'author_email',
default: '<EMAIL>',
},
];
}
async refineAnswers(answers) {
// parent project code
answers.project_code = path.basename(path.dirname(process.cwd()));
answers.project_code_kebab = this.util.textConverter.toKebab(
answers.project_code,
);
answers.project_code_tf = answers.project_code_kebab.replace(/[^a-zA-Z0-9_]/g, '-');
// here it is possible to alter some answers before the generation starts
answers.application_code_kebab = this.util.textConverter.toKebab(
answers.application_code,
);
answers.application_code_tf = answers.application_code_kebab.replace(/[^a-zA-Z0-9_]/g, '-');
answers.use_function = !!answers.function_name;
return answers;
}
async getDependencies(answers) {
const { use_contact_form } = answers;
return {
destination: '[application_code_kebab]/',
packages: ['cors', !!use_contact_form && 'axios', !!use_contact_form && 'pug', 'aws-sdk'],
};
}
async getDevDependencies(answers) {
const { use_contact_form } = answers;
return {
destination: '[application_code_kebab]/',
packages: [
'@types/cors',
'@types/ejs',
'@types/express',
'dotenv-cli',
'dotenv-webpack',
'express',
'ts-loader',
'ts-node',
'typescript',
'webpack',
'webpack-cli',
'webpack-node-externals',
!!use_contact_form && '@types/pug',
'serverless',
'serverless-offline',
'serverless-webpack',
],
};
}
async onAfterExecution() {
const applicationCode = this.answers.application_code_kebab;
const applicationFolder = path.join(
this.context.destinationPath,
applicationCode,
);
await this.util.execa('chmod', ['-R', '+x', './script'], {
cwd: applicationFolder,
stdio: ['inherit', 'inherit', 'inherit'],
});
await this.util.execa('mv', [
path.join(applicationFolder, `cd.${applicationCode}.yml`, '../../.github/workflows')
], {
cwd: applicationFolder,
stdio: ['inherit', 'inherit', 'inherit'],
});
}
};
|
'use strict'
/**
* adonis-websocket
*
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
const Macroable = require('macroable')
/**
* An instance of this class is passed to all websocket
* handlers and middleware.
*
* @binding Adonis/Src/WsContext
* @alias WsContext
* @group Ws
*
* @class WsContext
* @constructor
*
* @example
* ```js
* const WsContext = use('WsContext')
*
* WsContext.getter('view', function () {
* return new View()
* }, true)
*
* // The last option `true` means the getter is singleton.
* ```
*/
class WsContext extends Macroable {
constructor (req) {
super()
/**
* Websocket req object
*
* @attribute req
*
* @type {Object}
*/
this.req = req
this.constructor._readyFns
.filter((fn) => typeof (fn) === 'function')
.forEach((fn) => fn(this))
}
/**
* Hydrate the context constructor
*
* @method hydrate
*
* @return {void}
*/
static hydrate () {
super.hydrate()
this._readyFns = []
}
/**
* Define onReady callbacks to be executed
* once the request context is instantiated
*
* @method onReady
*
* @param {Function} fn
*
* @chainable
*/
static onReady (fn) {
this._readyFns.push(fn)
return this
}
}
/**
* Defining _macros and _getters property
* for Macroable class
*
* @type {Object}
*/
WsContext._macros = {}
WsContext._getters = {}
WsContext._readyFns = []
module.exports = WsContext
|
<reponame>jenineellis/CWCAdmin
import React from "react";
import { Nav, Navbar } from 'react-bootstrap';
// import "./style.css";
function NavBar(props) {
const handleLogout = () => props.updateGlobalState('User', null);
return (
<div className="nav">
<Navbar fixed='top'>
<Nav variant="pills" defaultActiveKey="/" as="ul">
<Nav.Item as="li">
<Nav.Link href="/dashboard">Dashboard</Nav.Link>
</Nav.Item>
<Nav.Item as="li">
<Nav.Link eventKey="/inventory">Orders</Nav.Link>
</Nav.Item>
<Nav.Item as="li">
<Nav.Link eventKey="/users">Users</Nav.Link>
</Nav.Item>
<Nav.Item as="li">
<Nav.Link eventKey="/producers">Producers</Nav.Link>
</Nav.Item>
<Nav.Item as="li">
<Nav.Link onClick={handleLogout}>Logout</Nav.Link>
</Nav.Item>
</Nav>
</Navbar>
</div>
);
}
export default NavBar;
// const handleShow = () => props.updateGlobalState('showModal', true);
// return (
// <div className="nav-admin">
// <Navbar fixed='top'>
// <Nav variant="pills" defaultActiveKey="/admin" as="ul">
// <Nav.Item as="li">
// <Nav.Link href="/admin">Dashboard</Nav.Link>
// </Nav.Item>
// <Nav.Item as="li">
// <Nav.Link eventKey="/admin/inventory">Orders</Nav.Link>
// </Nav.Item>
// <Nav.Item as="li">
// <Nav.Link eventKey="/admin/users">Users</Nav.Link>
// </Nav.Item>
// <Nav.Item as="li">
// <Nav.Link eventKey="/admin/producers">Producers</Nav.Link>
// </Nav.Item>
// <Nav.Item as="li">
// <Nav.Link onClick={handleShow}>Login</Nav.Link>
// </Nav.Item>
// </Nav>
// </Navbar>
// </div>
// );
// } |
. "$(dirname "$0")"/common.sh
# Optional script to simplify the initial setup: run this script to pub-get all packages and generate code.
# If you don't want all packages, you can also run the commands manually for each package when you actually need it.
# log colors setup
INFO='\033[1;34m'
WARN='\033[1;31m'
NC='\033[0m' # no color
function initialize() {
local tool=$1
local dir=$2
local action=${3:-}
if [[ ! -x "$(command -v "$tool")" ]]; then
echo -e "${WARN}Command '$tool' not found. Skipping setup of directory '$dir'${NC}"
return
fi
echo -e "${INFO}Setting up directory '$dir'${NC}"
(
cd "$dir" || exit 1
$tool pub get
if [[ "$action" == "generate" ]]; then
if [[ "$(basename "$tool")" == "flutter" ]]; then
# Flutter ~2.0 fails: The pubspec.lock file has changed since the .dart_tool/package_config.json file was generated, please run "pub get" again.
# So we do exactly as suggested... Looks like something to do with path dependency_overrides. Try to remove the workaround with the next stable release.
local generateCmd="$tool pub run build_runner build"
$generateCmd || ($tool pub get && $generateCmd)
else
$tool run build_runner build
fi
fi
)
}
initialize dart generator
initialize dart benchmark generate
initialize dart objectbox generate
initialize flutter objectbox/example/flutter/objectbox_demo generate
initialize flutter objectbox/example/flutter/objectbox_demo_sync generate
|
# redis service setup
CFG_FILE="${SYNOPKG_PKGVAR}/redis.conf"
SERVICE_COMMAND="${SYNOPKG_PKGDEST}/bin/redis-server ${CFG_FILE}"
SVC_BACKGROUND=y
SVC_WRITE_PID=y
service_postinst ()
{
# Use 15% of total physical memory with maximum of 64MB
MEMORY=`awk '/MemTotal/{memory=$2/1024*0.15; if (memory > 64) memory=64; printf "%0.f", memory}' /proc/meminfo`
# Set the maximum memory to use in configuration file
sed -i -e "s/@maxmemory@/${MEMORY}mb/g" ${CFG_FILE}
}
|
#include <iostream>
#include <GL/glew.h>
class Game {
private:
int score;
int health;
static GLfloat g_vertex_buffer_data[100000];
public:
Game() : score(0), health(100) {}
void updateScore(int points) {
score += points;
if (score < 0) {
score = 0; // Ensure score doesn't go below 0
}
}
void updateHealth(int damage) {
health -= damage;
if (health < 0) {
health = 0; // Ensure health doesn't go below 0
}
}
int getScore() {
return score;
}
int getHealth() {
return health;
}
};
// Define the static member array outside the class
GLfloat Game::g_vertex_buffer_data[100000];
int main() {
Game game;
game.updateScore(50);
game.updateHealth(30);
std::cout << "Score: " << game.getScore() << std::endl; // Output: Score: 50
std::cout << "Health: " << game.getHealth() << std::endl; // Output: Health: 70
return 0;
} |
<reponame>minuk8932/Algorithm_BaekJoon
package string_handle;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
/**
*
* @author minchoba
* 백준 16360번: Go Latin
*
* @see https://www.acmicpc.net/problem/16360/
*
*/
public class Boj16360 {
private static final String[] FORMAT = {"a as", "i ios", "y ios", "l les",
"n anes", "ne anes", "o os", "r res", "t tas", "u us", "v ves", "w was"};
private static final String DEFAULT = "us";
private static final String NEW_LINE = "\n";
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int N = Integer.parseInt(br.readLine());
String[] str = new String[N];
for(int i = 0; i < N; i++) {
str[i] = br.readLine();
}
System.out.println(process(N, str));
}
private static StringBuilder process(int n, String[] arr) {
StringBuilder sb = new StringBuilder();
for(String str: arr) {
boolean flag = false;
for(String f: FORMAT) {
StringTokenizer st = new StringTokenizer(f);
String post = st.nextToken();
String change = st.nextToken();
int leng = post.length();
int loop = leng;
int strLeng = str.length();
int diff = 0;
while(loop-- > 0) {
if(post.charAt(leng - diff - 1) != str.charAt(strLeng - diff - 1)) break; // 접미사가 부분적으로 다른 경우
diff++;
}
if(diff == leng) { // 겹치는 접미사만큼 자르고 뒤에 새로운 접미사를 붙여줌
flag = true;
sb.append(str.substring(0, strLeng - leng)).append(change).append(NEW_LINE);
break;
}
}
if(!flag) sb.append(str).append(DEFAULT).append(NEW_LINE); // 조건에 해당하는 접미사가 없는 경우
}
return sb;
}
}
|
<gh_stars>1-10
function DebugLogger(server) {
const { responseModifierMiddleware, requestBodyJSONMiddleware } = require('../../utils/middlewares');
const { createHandlerAppendToLog, createHandlerReadFromLog } = require('./controllers');
const appendToLog = createHandlerAppendToLog(server);
const readFromLog = createHandlerReadFromLog(server);
server.use(`/log/*`, responseModifierMiddleware);
server.use(`/log/*`, requestBodyJSONMiddleware);
server.post(`/log/add/:anchorID/:logLevel`, appendToLog);
server.get(`/log/get/:anchorID`, readFromLog);
}
module.exports = DebugLogger;
|
<reponame>YS-L/pgbm
#ifndef EVAL_H_
#define EVAL_H_
#include <vector>
class DataMatrix;
class Metric {
public:
virtual ~Metric() { };
virtual double Evaluate(const std::vector<double>& predictions,
const DataMatrix& data) const = 0;
virtual const char* Name() const = 0;
};
class Accuracy: public Metric {
public:
virtual double Evaluate(const std::vector<double>& predictions,
const DataMatrix& data) const;
virtual const char* Name() const {
return "Accuracy";
}
};
#endif
|
UPDATE USER_BASE set email='<EMAIL>' where id=1;
UPDATE USER_BASE set email='<EMAIL>' where id=2;
UPDATE USER_BASE set email='<EMAIL>' where id=3;
UPDATE USER_BASE set email='<EMAIL>' where id=4;
UPDATE USER_BASE set email='<EMAIL>' where id=5;
UPDATE USER_BASE set email='<EMAIL>' where id=6;
UPDATE USER_BASE set email='<EMAIL>' where id=7;
UPDATE USER_BASE set mobile='18012345678';
delete from USER_ATTR;
delete from USER_SCHEMA;
|
<reponame>bate/c3nav
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2016-12-24 18:03
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mapdata', '0032_auto_20161223_2225'),
]
operations = [
migrations.AddField(
model_name='locationgroup',
name='can_describe',
field=models.BooleanField(default=True, verbose_name='can be used to describe a position'),
),
migrations.AlterField(
model_name='locationgroup',
name='compiled_room',
field=models.BooleanField(default=False, verbose_name='is a compiled room'),
),
]
|
package cn.crabapples.spring.test;
import java.io.*;
/**
* TODO
*
* @author Mr.He
* 2020/3/8 9:53
* e-mail <EMAIL>
* qq 294046317
* pc-name 29404
*/
public class Utils {
public static void saveObj(Object obj, String f) {
File file = new File("d:/" + f);
try {
ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(file));
oos.writeObject(obj);
oos.flush();
oos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public static Object readObj(String f) {
File file = new File("d:/" + f);
try {
ObjectInputStream ois = new ObjectInputStream(new FileInputStream(file));
return ois.readObject();
} catch (IOException | ClassNotFoundException e) {
e.printStackTrace();
}
return null;
}
}
|
import SwiftUI
import MapKit
struct ContentView: View {
@State private var region = MKCoordinateRegion(
center: CLLocationCoordinate2D(latitude: 0, longitude: 0),
span: MKCoordinateSpan(latitudeDelta: 0.1, longitudeDelta: 0.1)
)
var body: some View {
MapKit(region: $region , showsUserLocation: true)
}
} |
def find_words(s, words):
# Create a hash set to keep track of the given words
words_set = set(words)
# Iterate over each word in the string and check
# if it is present in the hash set or not
for w in s.split():
if w in words_set:
# If present, remove the word from the hash set
words_set.remove(w)
# Check if any words are left in the hash set
if len(words_set) == 0:
return True
else:
return False
# example
find_words("This is a sample text and sentence.", ["this", "sentence", "text", "sample"]) # returns true |
import os, pygame
from pygame import mixer
pygame.font.init()
mixer.init()
GameTitle = "Western Defender"
# Game and View State
SCREEN = pygame.Rect(0,0, 800, int(800 * 0.8))
ViewScreen = pygame.display.set_mode((SCREEN.width, SCREEN.height))
GameRunning = True
PlayerTypes = {
"player": "player"
}
EnemyTypes = {
"alien1": "alien1",
}
# Color constants
BackgroundColor = (220, 220, 144)
GroundColor = (220, 220, 144)
WHITE = (255,255,255)
BLACK = (0,0,0)
RED = (255,0,0)
YELLOW = (255,255,0)
# Fonts
TitleFont = pygame.font.SysFont('comicsans', 40)
MediumFont = pygame.font.SysFont('comicsans', 30)
SmallFont = pygame.font.SysFont('comicsans', 20)
#music
pygame.mixer.music.load(os.path.join('assets', 'music', 'track1.ogg')),
pygame.mixer.music.queue(os.path.join('assets', 'music', 'track2.ogg'))
pygame.mixer.music.set_volume(0.5)
pygame.mixer.music.play(-1, 0.0, 5000)
# Images
StartImg = pygame.image.load(os.path.join('assets', 'sprites', 'button', 'single_player.png'))
VictoryImg = pygame.image.load(os.path.join('assets', 'sprites', 'button', 'victory.png'))
Level1Img = pygame.image.load(os.path.join('assets', 'sprites', 'button', 'level1.png'))
Level2Img = pygame.image.load(os.path.join('assets', 'sprites', 'button', 'level2.png'))
Level3Img = pygame.image.load(os.path.join('assets', 'sprites', 'button', 'level3.png'))
Level4Img = pygame.image.load(os.path.join('assets', 'sprites', 'button', 'level4.png'))
BulletImg = pygame.image.load(os.path.join('assets', 'sprites', 'icons', 'bullet.png')).convert_alpha()
FlameImg = pygame.image.load(os.path.join('assets', 'sprites', 'icons', 'flame.png')).convert_alpha()
HudChrome = pygame.image.load(os.path.join('assets', 'sprites', 'ui', 'hudChrome.png'))
ShotgunImg = pygame.image.load(os.path.join('assets', 'sprites', 'icons', 'shotgun.png'))
RocketImg = pygame.image.load(os.path.join('assets', 'sprites', 'icons', 'rocket.png'))
FlamethrowerImg = pygame.image.load(os.path.join('assets', 'sprites', 'icons', 'flamethrower.png'))
#background
mountain_img = pygame.image.load(os.path.join('assets', 'background', 'mountain.png')).convert_alpha()
pine1_img = pygame.image.load(os.path.join('assets', 'background', 'pine1.png')).convert_alpha()
pine2_img = pygame.image.load(os.path.join('assets', 'background', 'pine2.png')).convert_alpha()
sky_img = pygame.image.load(os.path.join('assets', 'background', 'sky_cloud.png')).convert_alpha()
#cutscenes
CutScene1_img = pygame.image.load(os.path.join('assets', 'sprites', 'cutscenes', '01.png'))
CutScene2_img = pygame.image.load(os.path.join('assets', 'sprites', 'cutscenes', '02.png'))
CutScene3_img = pygame.image.load(os.path.join('assets', 'sprites', 'cutscenes', '03.png'))
TimeMachineList = [
pygame.image.load(os.path.join('assets', 'sprites', 'timemachine', 'timemachine_unfinished.png')).convert_alpha(),
pygame.image.load(os.path.join('assets', 'sprites', 'timemachine', 'timemachine_capsule_red.png')).convert_alpha(),
pygame.image.load(os.path.join('assets', 'sprites', 'timemachine', 'timemachine_capsule_green.png')).convert_alpha(),
pygame.image.load(os.path.join('assets', 'sprites', 'timemachine', 'timemachine_capsule_blue.png')).convert_alpha(),
pygame.image.load(os.path.join('assets', 'sprites', 'timemachine', 'timemachine_complete_0.png')).convert_alpha(),
pygame.image.load(os.path.join('assets', 'sprites', 'timemachine', 'timemachine_complete_1.png')).convert_alpha(),
]
# level tiles
ROWS = 16
TileSize = SCREEN.height // ROWS
TileTypes = 29
TileList = []
for x in range(TileTypes):
img = pygame.image.load(os.path.join('assets', 'tiles', f'{x}.png'))
img = pygame.transform.scale(img, (TileSize, TileSize))
TileList.append(img)
PlayerSprites = {
"player": {
"shotgun": [],
"flamethrower": [],
"rocket": [],
}
}
EnemySprites = {
"alien1": {
"laser": [],
},
}
# Physics constants
Clock = pygame.time.Clock()
FPS = 60
GRAVITY = 0.75
CharacterScale = 2
ScrollThreashold = TileSize*3
def getCharacterSprites(characterTypes, animationTypes, weaponTpes, spriteList):
for char_type in characterTypes:
for weapon in weaponTpes:
for animation in animationTypes:
temp_list = []
num_of_frames = len(os.listdir(os.path.join('assets', 'sprites', 'characters', char_type, weapon, animation)))
for i in range(num_of_frames):
i = str(i)
img = pygame.image.load(os.path.join('assets', 'sprites', 'characters', char_type, weapon, animation, f'{i}.png')).convert_alpha()
img = pygame.transform.scale(img, (int(img.get_width() * CharacterScale), int(img.get_height() * CharacterScale)))
temp_list.append(img)
spriteList[char_type][weapon].append(temp_list)
animation_types = ['Idle', 'Run', 'Jump', 'Death', 'Fly', 'Shoot']
weapon_types = ["shotgun", "flamethrower", "rocket"]
getCharacterSprites(PlayerTypes, animation_types, weapon_types, PlayerSprites)
getCharacterSprites(EnemyTypes, animation_types, ["laser"], EnemySprites)
|
#
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
#title install-tools.sh
#description This script will setup the Cloud9 IDE with the prerequisite packages and code for the workshop.
#author @chrkas
#contributors @buzzsurfr @dalbhanj @cloudymind
#date 2020-04-15
#version 0.2
#==============================================================================
# Install jq
sudo yum -y -q install jq
# Update awscli
pip install --user --upgrade awscli
# Install bash-completion
sudo yum install bash-completion -y -q
# Install kubectl
curl -o kubectl curl -LO https://storage.googleapis.com/kubernetes-release/release/`curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt`/bin/linux/amd64/kubectl
chmod +x kubectl && sudo mv kubectl /usr/local/bin/
echo "source <(kubectl completion bash)" >> ~/.bashrc
# Install Heptio Authenticator
curl -o aws-iam-authenticator curl -o aws-iam-authenticator https://amazon-eks.s3.us-west-2.amazonaws.com/1.15.10/2020-02-22/bin/linux/amd64/aws-iam-authenticator
chmod +x ./aws-iam-authenticator && sudo mv aws-iam-authenticator /usr/local/bin/
# Configure AWS CLI
availability_zone=$(curl http://169.254.169.254/latest/meta-data/placement/availability-zone)
export AWS_DEFAULT_REGION=${availability_zone%?}
# Install eksctl
curl --silent --location "https://github.com/weaveworks/eksctl/releases/download/latest_release/eksctl_$(uname -s)_amd64.tar.gz" | tar xz -C /tmp
sudo mv /tmp/eksctl /usr/local/bin
# Install helm
curl "https://raw.githubusercontent.com/kubernetes/helm/master/scripts/get" > get_helm.sh
chmod +x get_helm.sh
./get_helm.sh
# Persist lab variables
echo "AWS_DEFAULT_REGION=$AWS_DEFAULT_REGION" >> ~/.bash_profile
echo "export AWS_REGION=${AWS_DEFAULT_REGION}" >> ~/.bash_profile
aws configure set default.region ${AWS_DEFAULT_REGION}
aws configure get default.region
# Create SSH key
ssh-keygen -t rsa -N "" -f ~/.ssh/id_rsa
if [ ! -d "reinvent2018-dev303-code/" ]; then
# Download lab Repository
git clone https://github.com/vitorpe/reinvent2018-dev303-code
fi |
// (C) 2007-2020 GoodData Corporation
import KpiContent from "./KpiContent";
export { KpiContent };
export * from "./types";
|
#pragma once
#include "../helpers.h"
#include "inputmanager.h"
#include "../text/textformatter.h"
#include "../rendering/renderviewport.h"
#include "../rendering/textmetrics.h"
#include "../rendering/textselectionrender.h"
#include "../text/incrementalformattedtext.h"
#include "textoperations.h"
#include <string>
#include <memory>
#include <glm/vec2.hpp>
class Font;
struct RenderStyle;
class TextRender;
class Text;
/**
* The input state
*/
struct InputState {
glm::vec2 viewPosition = glm::vec2();
std::int64_t caretLineIndex = 0;
std::int64_t caretCharIndex = 0;
TextSelection selection;
bool showSelection = false;
/**
* Returns the draw position of the input stat
* @param renderStyle The render style
*/
glm::vec2 getDrawPosition(const RenderStyle& renderStyle) const;
};
using KeyboardCommandFunction = std::function<void ()>;
/**
* The key modifiers
*/
enum class KeyModifier : std::uint64_t {
None = 0,
Control = 1 << 0,
Shift = 1 << 1,
Alt = 1 << 2,
};
/**
* Represents a keyboard command
*/
struct KeyboardCommand {
int key;
KeyModifier keyModifiers;
KeyboardCommandFunction command;
};
/**
* Defines how the formatting is performed
*/
enum class PerformFormattingType : std::uint32_t {
Full,
Partial,
Incremental
};
/**
* Defines how character entries are handled
*/
enum class CharacterInputType {
Native, // Uses the native GLFW method
Custom // Uses a custom implementation
};
using TriggerFunction = std::function<void ()>;
/**
* Represents a text view
*/
class TextView {
private:
GLFWwindow* mWindow;
CharacterInputType mCharacterInputType = CharacterInputType::Native;
Font& mFont;
const RenderStyle& mRenderStyle;
TextMetrics mTextMetrics;
const RenderViewPort& mViewPort;
TextOperations mTextOperations;
InputManager mInputManager;
InputState mInputState;
std::vector<KeyboardCommand> mKeyboardCommands;
std::unordered_map<Char, TriggerFunction> mCharTriggers;
bool mTriggered = false;
const float mScrollSpeed = 4.0f;
Text& mText;
bool mDrawCaret = false;
TimePoint mLastCaretUpdate;
TextSelectionRender mTextSelectionRender;
bool mSelectionStarted = false;
TextSelection mPotentialSelection;
/**
* Returns the current line the caret is at
*/
const FormattedLine& currentLine() const;
/**
* Returns the current line number
*/
std::size_t currentLineNumber() const;
/**
* Returns the length of the current line
*/
std::size_t currentLineLength() const;
/**
* Returns the width of the current line
*/
float currentLineWidth() const;
/**
* Returns the number of lines
*/
std::size_t numLines();
/**
* Moves the caret in the x position by the given amount
* @param diff The amount to move
*/
void moveCaretX(std::int64_t diff);
/**
* Sets the caret in the x position to the given value
* @param position The new value
*/
void setCaretX(std::int64_t position);
/**
* Moves the caret in the y position by the given amount
* @param diff The amount to move
*/
void moveCaretY(std::int64_t diff);
/**
* Clamps the view position y
* @param caretScreenPositionY The y position of the caret on the screen
*/
void clampViewPositionY(float caretScreenPositionY);
/**
* Updates the input
* @param windowState The window state
*/
void updateInput(const WindowState& windowState);
/**
* Inserts the given character
* @param character The character
* @param moveCaret Indicates if the caret is moved
*/
void insertCharacter(Char character, bool moveCaret = true);
/**
* The action for insertion
* @param character The character to insert
* @param moveCaret Indicates if the caret is moved
*/
void insertAction(Char character, bool moveCaret = true);
/**
* Inserts a new line
*/
void insertLine();
/**
* Pastes from the clipboard
*/
void paste();
/**
* Deletes the current line
* @param mode How to delete the line
*/
void deleteLine(Text::DeleteLineMode mode);
/**
* Deletes the current selection
*/
void deleteSelection();
/**
* Deletes the given character on the current line
* @param charIndex The char index
*/
void deleteCharacter(std::int64_t charIndex);
/**
* The action for the backspace button
*/
void backspaceAction();
/**
* The action for the delete button
*/
void deleteAction();
/**
* Replaces the current selection with the given character
* @param character The character
* @param moveCaret Indicates if the caret is moved
*/
void replaceSelection(Char character, bool moveCaret = true);
/**
* Updates the editing
* @param windowState The window state
*/
void updateEditing(const WindowState& windowState);
/**
* Moves the view by the given amount in the y direction
* @param diff The diff
*/
void moveViewY(float diff);
/**
* Updates the view movement
* @param windowState The window state
*/
void updateViewMovement(const WindowState& windowState);
/**
* Returns the position of the mouse in the text
*/
std::pair<std::int64_t, std::int64_t> getMouseTextPosition();
/**
* Updates text selection
* @param windowState The window state
*/
void updateTextSelection(const WindowState& windowState);
/**
* Updates the mouse movement
* @param windowState The window state
*/
void updateMouseMovement(const WindowState& windowState);
/**
* Returns the view port for the text part
*/
RenderViewPort getTextViewPort() const;
public:
/**
* Creates a new text view
* @param window The window
* @param font The font
* @param rules The formatting rules
* @param viewPort The view port
* @param renderStyle The render style
* @param text The text
*/
TextView(GLFWwindow* window,
Font& font,
std::unique_ptr<FormatterRules> rules,
const RenderViewPort& viewPort,
const RenderStyle& renderStyle,
Text& text);
/**
* Returns the text
*/
Text& text();
/**
* Updates the text view
* @param windowState The window state
*/
void update(const WindowState& windowState);
/**
* Renders the current view
* @param windowState The window state
* @param textRender The text text render
*/
void render(const WindowState& windowState, TextRender& textRender);
}; |
package controllers;
import model.Task;
import play.*;
import play.data.Form;
import play.mvc.Controller;
import play.mvc.Result;
import play.mvc.Results;
import views.html.*;
public class Application extends Controller {
public Result tasks() {
Form<Task> taskForm = Form.form(Task.class) ;
return ok( views.html.index.render(Task.all(),taskForm ));
}
public Result newTask() {
return Results.TODO;
}
public Result deleteTask(long id) {
return Results.TODO;
}
public Result index() {
return redirect(routes.Application.tasks());
}
}
|
<reponame>rinaldasl/front-end-quiz<filename>app/actions/action-item.js<gh_stars>0
import itemsService from 'services/items';
export const PREFIX = 'actionItem';
export const ACTION_SEND_REQ = `${PREFIX}.ACTION_SEND_REQ`;
export const ACTION_SEND_RES = `${PREFIX}.ACTION_SEND_RES`;
export const DESTROY = `${PREFIX}.DESTROY`;
export function getItemById(id) {
return dispatch => {
dispatch({ type: ACTION_SEND_REQ });
itemsService.fetchItemById(id)
.then(data => dispatch({
type: ACTION_SEND_RES,
payload: { data }
}))
.catch(err => console.error(err));
};
}
export function destroy() {
return dispatch => {
dispatch({ type: DESTROY });
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alipay.sofa.ark.common.util;
/**
* A helper class, which buffers one line of input. It provides for simple line editing, e.g.
* insertion, deletion, backspace, left and right movement
*
* @author qilong.zql
* @since 0.4.0
*/
public class SimpleByteBuffer {
private final static int BUFFER_CHUNK = 20;
private byte[] buffer;
private int pos = 0;
private int size = 0;
public SimpleByteBuffer() {
buffer = new byte[BUFFER_CHUNK];
}
private void resize() {
final byte[] next = new byte[buffer.length << 1];
System.arraycopy(buffer, 0, next, 0, buffer.length);
buffer = next;
}
public void add(byte b) {
if (size >= buffer.length) {
resize();
}
buffer[size++] = b;
}
public void insert(byte b) {
if (size >= buffer.length) {
resize();
}
final int gap = size - pos;
if (gap > 0) {
System.arraycopy(buffer, pos, buffer, pos + 1, gap);
}
buffer[pos++] = b;
size++;
}
public byte goRight() {
if (pos < size) {
return buffer[pos++];
}
return -1;
}
public boolean goLeft() {
if (pos > 0) {
pos--;
return true;
}
return false;
}
public void backSpace() {
if (pos > 0) {
System.arraycopy(buffer, pos, buffer, pos - 1, size - pos);
pos--;
size--;
}
}
public void delete() {
if (pos < size) {
System.arraycopy(buffer, pos + 1, buffer, pos, size - pos);
size--;
}
}
public byte[] getBuffer() {
byte[] data = new byte[size];
System.arraycopy(buffer, 0, data, 0, size);
return data;
}
public byte[] getAndClearBuffer() {
byte[] data = new byte[size];
System.arraycopy(buffer, 0, data, 0, size);
size = 0;
pos = 0;
return data;
}
public int getPos() {
return pos;
}
public int getSize() {
return size;
}
public int getGap() {
return size - pos;
}
} |
'use strict';
exports.__esModule = true;
var _create = require('../../utils/create');
var _create2 = _interopRequireDefault(_create);
var _field = require('../../field');
var _field2 = _interopRequireDefault(_field);
var _email = require('../../utils/validate/email');
var _email2 = _interopRequireDefault(_email);
var _number = require('../../utils/validate/number');
var _number2 = _interopRequireDefault(_number);
var _SkuImgUploader = require('./SkuImgUploader');
var _SkuImgUploader2 = _interopRequireDefault(_SkuImgUploader);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.default = (0, _create2.default)({
render: function render() {
var _vm = this;var _h = _vm.$createElement;var _c = _vm._self._c || _h;return _c('cell-group', { staticClass: "van-sku-messages" }, [_vm._l(_vm.messages, function (message, index) {
return [message.type === 'image' ? _c('cell', { key: _vm.goodsId + "-" + index, staticClass: "van-sku-messages__image-cell", attrs: { "label": _vm.$t('onePic'), "required": message.required == '1', "title": message.name } }, [_c('sku-img-uploader', { attrs: { "upload-img": _vm.messageConfig.uploadImg, "max-size": _vm.messageConfig.uploadMaxSize }, model: { value: _vm.messageValues[index].value, callback: function callback($$v) {
_vm.$set(_vm.messageValues[index], "value", $$v);
}, expression: "messageValues[index].value" } })], 1) : _c('field', { key: _vm.goodsId + "-" + index, attrs: { "required": message.required == '1', "label": message.name, "placeholder": _vm.getPlaceholder(message), "type": _vm.getType(message) }, model: { value: _vm.messageValues[index].value, callback: function callback($$v) {
_vm.$set(_vm.messageValues[index], "value", $$v);
}, expression: "messageValues[index].value" } })];
})], 2);
},
name: 'sku-messages',
components: {
SkuImgUploader: _SkuImgUploader2.default,
Field: _field2.default
},
props: {
messages: Array,
messageConfig: Object,
goodsId: [Number, String]
},
data: function data() {
return {
messageValues: this.resetMessageValues(this.messages)
};
},
watch: {
messages: function messages(val) {
this.messageValues = this.resetMessageValues(val);
}
},
computed: {
messagePlaceholderMap: function messagePlaceholderMap() {
return this.messageConfig.placeholderMap || {};
}
},
methods: {
resetMessageValues: function resetMessageValues(messages) {
return (messages || []).map(function () {
return { value: '' };
});
},
getType: function getType(message) {
if (+message.multiple === 1) {
return 'textarea';
}
if (message.type === 'id_no') {
return 'text';
}
return message.datetime > 0 ? 'datetime-local' : message.type;
},
getMessages: function getMessages() {
var _this = this;
var messages = {};
this.messageValues.forEach(function (item, index) {
var value = item.value;
if (_this.messages[index].datetime > 0) {
value = value.replace(/T/g, ' ');
}
messages['message_' + index] = value;
});
return messages;
},
getCartMessages: function getCartMessages() {
var _this2 = this;
var messages = {};
this.messageValues.forEach(function (item, index) {
var value = item.value;
var message = _this2.messages[index];
if (message.datetime > 0) {
value = value.replace(/T/g, ' ');
}
messages[message.name] = value;
});
return messages;
},
getPlaceholder: function getPlaceholder(message) {
var type = +message.multiple === 1 ? 'textarea' : message.type;
return this.messagePlaceholderMap[type] || this.$t('placeholder.' + type);
},
validateMessages: function validateMessages() {
var values = this.messageValues;
for (var i = 0; i < values.length; i++) {
var value = values[i].value;
var message = this.messages[i];
if (value === '') {
// 必填字段的校验
if (message.required == '1') {
// eslint-disable-line
var textType = message.type === 'image' ? 'upload' : 'fill';
return this.$t(textType) + message.name;
}
} else {
if (message.type === 'tel' && !(0, _number2.default)(value)) {
return this.$t('number');
}
if (message.type === 'email' && !(0, _email2.default)(value)) {
return this.$t('email');
}
if (message.type === 'id_no' && (value.length < 15 || value.length > 18)) {
return this.$t('id_no');
}
}
if (value.length > 200) {
return message.name + ' ' + this.$t('overlimit');
}
}
}
}
}); |
#!/bin/bash
# Install script for Linux distributions
# This is a basic installer that merely copies the include files and
# libraries to the system-wide directories.
# Copy the udev rules file and reload all rules
cp ./60-opalkelly.rules /etc/udev/rules.d
/sbin/udevadm control --reload-rules
/sbin/udevadm trigger
# Copy the API libraries and include files
cp ./API/libokFrontPanel.so /usr/local/lib/
cp ./API/okFrontPanelDLL.h /usr/local/include/
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package Business.Organization;
import Business.Role.HospitalAdminRole;
import Business.Role.Role;
import java.util.ArrayList;
/**
*
* @author Joy
*/
public class HospitalAdminOrganization extends Organization{
public HospitalAdminOrganization() {
super(Type.HospitalAdmin.getValue());
}
@Override
public ArrayList<Role> getSupportedRole() {
ArrayList<Role> roles = new ArrayList();
roles.add(new HospitalAdminRole());
return roles;
}
}
|
import * as React from 'react'
import type { BaseProvider, WebSocketProvider } from '@ethersproject/providers'
import {
WagmiClient,
ClientConfig as WagmiClientConfig,
createClient as createWagmiClient,
} from '@wagmi/core'
import { QueryClient, QueryClientProvider } from 'react-query'
import { Persister, persistQueryClient } from 'react-query/persistQueryClient'
import { createWebStoragePersister } from 'react-query/createWebStoragePersister'
import { deserialize, serialize } from './utils'
export type DecoratedWagmiClient<
TProvider extends BaseProvider = BaseProvider,
TWebSocketProvider extends WebSocketProvider = WebSocketProvider,
> = WagmiClient<TProvider, TWebSocketProvider> & { queryClient: QueryClient }
export const Context = React.createContext<
DecoratedWagmiClient<BaseProvider, WebSocketProvider> | undefined
>(undefined)
export type ClientConfig<
TProvider extends BaseProvider = BaseProvider,
TWebSocketProvider extends WebSocketProvider = WebSocketProvider,
> = WagmiClientConfig<TProvider, TWebSocketProvider> & {
queryClient?: QueryClient
persister?: Persister
}
export function createClient<
TProvider extends BaseProvider,
TWebSocketProvider extends WebSocketProvider,
>({
queryClient = new QueryClient({
defaultOptions: {
queries: {
cacheTime: 60 * 60 * 24, // 24 hours
networkMode: 'offlineFirst',
refetchOnWindowFocus: false,
retry: 0,
},
mutations: {
networkMode: 'offlineFirst',
},
},
}),
persister = typeof window !== 'undefined'
? createWebStoragePersister({
key: 'wagmi.cache',
storage: window.localStorage,
serialize,
deserialize,
})
: undefined,
...config
}: ClientConfig<TProvider, TWebSocketProvider> = {}) {
const client = createWagmiClient<TProvider, TWebSocketProvider>(config)
if (persister)
persistQueryClient({
queryClient,
persister,
dehydrateOptions: {
shouldDehydrateQuery: (query) => query.cacheTime !== 0,
},
})
return Object.assign(client, { queryClient })
}
export type ProviderProps<
TProvider extends BaseProvider = BaseProvider,
TWebSocketProvider extends WebSocketProvider = WebSocketProvider,
> = {
/** React-decorated WagmiClient instance */
client?: DecoratedWagmiClient<TProvider, TWebSocketProvider>
}
export function Provider<
TProvider extends BaseProvider,
TWebSocketProvider extends WebSocketProvider,
>({
children,
client = createClient<TProvider, TWebSocketProvider>(),
}: React.PropsWithChildren<ProviderProps<TProvider, TWebSocketProvider>>) {
// Attempt to connect on mount
React.useEffect(() => {
;(async () => {
if (!client.config.autoConnect) return
await client.autoConnect()
})()
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [])
return (
<Context.Provider value={client as unknown as DecoratedWagmiClient}>
<QueryClientProvider client={client.queryClient}>
{children}
</QueryClientProvider>
</Context.Provider>
)
}
export function useClient<
TProvider extends BaseProvider,
TWebSocketProvider extends WebSocketProvider = WebSocketProvider,
>() {
const client = React.useContext(Context) as unknown as DecoratedWagmiClient<
TProvider,
TWebSocketProvider
>
if (!client) throw Error('Must be used within WagmiProvider')
return client
}
|
import { retryAsyncDecorator, retryDecorator } from "./decorators";
import sinon = require("sinon");
import { isTooManyTries } from "./tooManyTries";
const should = require("chai").should();
describe("Retry decorator", function () {
it("async decorator should return the valid result", async function () {
const param = "Question";
const answer = 42;
const callback = sinon.stub();
callback
.withArgs(param)
.onFirstCall()
.resolves(answer);
const decorated = retryAsyncDecorator(callback);
(await decorated(param)).should.be.equals(answer);
callback.should.have.been.callCount(1);
});
it("async decorator should throw an exception", async function () {
const param = "Question";
const errorMsg = "BOOM";
const error = new Error(errorMsg);
const maxTry = 2;
const callback = sinon.stub();
callback.rejects(error);
const decorated = retryAsyncDecorator(callback, { maxTry, delay: 50 });
try {
await decorated(param);
throw new Error("Expected error not thrown");
} catch (error) {
(error as Error).message.should.be.equals(errorMsg);
callback.should.have.been.callCount(maxTry);
}
});
it("decorator should return the valid result", async function () {
const param = "Question";
const answer = 42;
const callback = sinon.stub();
callback
.withArgs(param)
.onFirstCall()
.returns(answer);
const decorated = retryDecorator(callback);
(await decorated(param)).should.be.equals(answer);
callback.should.have.been.callCount(1);
});
it("decorator should throw an exception", async function () {
const param = "Question";
const errorMsg = "BOOM";
const callback = sinon.stub();
const error = new Error(errorMsg);
const maxTry = 2;
callback.throws(error);
const decorated = retryDecorator(callback, { maxTry, delay: 50 });
try {
await decorated(param);
throw new Error("Expected error not thrown");
} catch (error) {
(error as Error).message.should.be.equals(errorMsg);
callback.should.have.been.callCount(maxTry);
}
});
describe("Decorator should use 'until' callback", async function () {
it("should return a valid result", async function () {
const param = "Question";
const answer = 42;
const callback = sinon.stub();
callback
.withArgs(param)
.onFirstCall()
.returns(answer);
const until = sinon.stub();
until.withArgs(answer).returns(true);
const decorated = retryDecorator(callback, { until });
(await decorated(param)).should.be.equals(answer);
callback.should.have.been.callCount(1);
until.should.have.been.callCount(1);
});
it("should return a valid result when until returs true", async function () {
const param = "Question";
const answer = 42;
const callback = sinon.stub();
callback
.withArgs(param)
.returns(answer);
const until = sinon.stub();
until.onCall(0).returns(false);
until.onCall(1).returns(true);
const decorated = retryDecorator(callback, { delay: 5, until });
(await decorated(param)).should.be.equals(answer);
callback.should.have.been.callCount(2);
until.should.have.been.callCount(2);
});
it("should throw an error when callback fails", async function () {
const param = "Question";
const errorMsg = "BOOM";
const callback = sinon.stub();
const error = new Error(errorMsg);
const maxTry = 2;
const until = sinon.stub();
callback.throws(error);
const decorated = retryDecorator(callback, { maxTry, delay: 50, until });
try {
await decorated(param);
throw new Error("Expected error not thrown");
} catch (error) {
(error as Error).message.should.be.equals(errorMsg);
callback.should.have.been.callCount(maxTry);
until.should.have.been.callCount(0);
}
});
it("should throw a TooManyTries when 'until' always return false", async function () {
const maxTry = 3;
const param = "Question";
const answer = 42;
const callback = sinon.stub();
callback
.withArgs(param)
.returns(answer);
const until = sinon.stub();
until.withArgs(answer).returns(false);
const decorated = retryDecorator(
callback,
{ maxTry: 3, delay: 10, until },
);
try {
await decorated(param);
throw new Error("Expected error not thrown");
} catch (error) {
isTooManyTries(error).should.be.true;
callback.should.have.been.callCount(maxTry);
until.should.have.been.callCount(maxTry);
}
});
});
});
|
package com.alipay.api.domain;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
/**
* 证券展位准入VO
*
* @author auto create
* @since 1.0, 2021-09-18 09:37:29
*/
public class StockPositionVO extends AlipayObject {
private static final long serialVersionUID = 4661946857249875111L;
/**
* 若返回结果为true表示用户准入该展位,可以展示对应的内容,反之不可以展示对应内容。
*/
@ApiField("is_show")
private Boolean isShow;
/**
* 小程序展位ID,由蚂蚁端指定建立分配
*/
@ApiField("position_code")
private String positionCode;
public Boolean getIsShow() {
return this.isShow;
}
public void setIsShow(Boolean isShow) {
this.isShow = isShow;
}
public String getPositionCode() {
return this.positionCode;
}
public void setPositionCode(String positionCode) {
this.positionCode = positionCode;
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.code = void 0;
var code = {
"viewBox": "0 0 512 512",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M168,392c-6.143,0-12.285-2.344-16.971-7.029l-112-112c-9.373-9.373-9.373-24.569,0-33.941l112-112\r\n\t\tc9.373-9.372,24.568-9.372,33.941,0c9.371,9.372,9.371,24.568,0,33.941L89.941,256l95.029,95.029\r\n\t\tc9.371,9.373,9.371,24.568,0,33.941C180.283,389.656,174.143,392,168,392z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M168,392c-6.143,0-12.285-2.344-16.971-7.029l-112-112c-9.373-9.373-9.373-24.569,0-33.941l112-112\r\n\t\tc9.373-9.372,24.568-9.372,33.941,0c9.371,9.372,9.371,24.568,0,33.941L89.941,256l95.029,95.029\r\n\t\tc9.371,9.373,9.371,24.568,0,33.941C180.283,389.656,174.143,392,168,392z"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M344,392c6.143,0,12.285-2.344,16.971-7.029l112-112c9.373-9.373,9.373-24.569,0-33.941l-112-112\r\n\t\tc-9.373-9.372-24.568-9.372-33.941,0c-9.371,9.372-9.371,24.568,0,33.941L422.059,256l-95.029,95.029\r\n\t\tc-9.371,9.373-9.371,24.568,0,33.941C331.717,389.656,337.857,392,344,392z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M344,392c6.143,0,12.285-2.344,16.971-7.029l112-112c9.373-9.373,9.373-24.569,0-33.941l-112-112\r\n\t\tc-9.373-9.372-24.568-9.372-33.941,0c-9.371,9.372-9.371,24.568,0,33.941L422.059,256l-95.029,95.029\r\n\t\tc-9.371,9.373-9.371,24.568,0,33.941C331.717,389.656,337.857,392,344,392z"
},
"children": []
}]
}]
}]
};
exports.code = code; |
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//using System.Linq;
package com.quantconnect.lean.data.universeselection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.quantconnect.lean.securities.Security;
/**
* Defines the additions and subtractions to the algorithm's security subscriptions
*/
public class SecurityChanges {
/**
* Gets an instance that represents no changes have been made
*/
public static final SecurityChanges None = new SecurityChanges( new ArrayList<Security>(), new ArrayList<Security>() );
private final Set<Security> addedSecurities;
private final Set<Security> removedSecurities;
/**
* Gets the symbols that were added by universe selection
*/
public ImmutableList<Security> getAddedSecurities() {
return ImmutableList.copyOf( addedSecurities.stream().sorted( Comparator.comparing( x -> x.getSymbol().getValue() ) ).collect( Collectors.toList() ) );
}
/**
* Gets the symbols that were removed by universe selection. This list may
* include symbols that were removed, but are still receiving data due to
* existing holdings or open orders
*/
public ImmutableList<Security> getRemovedSecurities() {
return ImmutableList.copyOf( removedSecurities.stream().sorted( Comparator.comparing( x -> x.getSymbol().getValue() ) ).collect( Collectors.toList() ) );
}
/**
* Initializes a new instance of the <see cref="SecurityChanges"/> class
* @param addedSecurities Added symbols list
* @param removedSecurities Removed symbols list
*/
public SecurityChanges( final Iterable<Security> addedSecurities, final Iterable<Security> removedSecurities ) {
this.addedSecurities = new HashSet<>();
this.removedSecurities = new HashSet<>();
Iterables.addAll( this.addedSecurities, addedSecurities );
Iterables.addAll( this.removedSecurities, removedSecurities );
}
/**
* Returns a new instance of <see cref="SecurityChanges"/> with the specified securities marked as added
* @param securities The added securities
* @returns A new security changes instance with the specified securities marked as added
*/
public static SecurityChanges added( final Security... securities ) {
if( securities == null || securities.length == 0 )
return None;
return new SecurityChanges( Arrays.asList( securities ), new ArrayList<Security>() );
}
/**
* Returns a new instance of <see cref="SecurityChanges"/> with the specified securities marked as removed
* @param securities The removed securities
* @returns A new security changes instance with the specified securities marked as removed
*/
public static SecurityChanges removed( final Security... securities ) {
if( securities == null || securities.length == 0 )
return None;
return new SecurityChanges( new ArrayList<Security>(), Arrays.asList( securities ) );
}
/**
* Combines the results of two <see cref="SecurityChanges"/>
* @param left The left side of the operand
* @param right The right side of the operand
* @returns Adds the additions together and removes any removals found in the additions, that is, additions take precendence
*/
public static SecurityChanges merge( final SecurityChanges left, final SecurityChanges right ) {
// common case is adding something to nothing, shortcut these to prevent linqness
if( left == None ) return right;
if( right == None ) return left;
final Set<Security> additions = Sets.union( left.addedSecurities, right.addedSecurities );
final List<Security> removals = Sets.union( left.removedSecurities, right.removedSecurities ).stream().filter( x -> !additions.contains( x ) ).collect( Collectors.toList() );
return new SecurityChanges( additions, removals );
}
/**
* Returns a String that represents the current object.
* @returns A String that represents the current object.
*/
@Override
public String toString() {
if( addedSecurities.size() == 0 && removedSecurities.size() == 0 )
return "SecurityChanges: None";
String added = "";
if( addedSecurities.size() != 0 )
added = " Added: " + String.join( ",", getAddedSecurities().stream().map( x -> x.getSymbol().getId().toString() ).collect( Collectors.toList() ) );
String removed = "";
if( removedSecurities.size() != 0 )
removed = " Removed: " + String.join( ",", getRemovedSecurities().stream().map( x -> x.getSymbol().getId().toString() ).collect( Collectors.toList() ) );
return "SecurityChanges: " + added + removed;
}
}
|
public class Equipment
{
public EquipmentClassEnum EquipmentClassEnum
{
get
{
return EquipmentClass.EquipmentClassEnum.ClassA; // Replace with the appropriate enum value
}
}
public EquipmentGroupEnum EquipmentGroupEnum
{
get
{
return EquipmentGroup.EquipmentGroupEnum.Group1; // Replace with the appropriate enum value
}
}
} |
#!/bin/bash
cat ../FileName.txt | grep "Ubuntu"
|
<reponame>romzc/faq-accordion-card<gh_stars>0
/** Get all elementos with accordion class name <div> */
const togleParagraph = document.getElementsByClassName('accordion');
const closeAll = ( list) => {
for ( let element of list ) {
element.children[0].children[0].classList.remove('focus');
element.children[0].children[1].classList.remove('close');
element.children[1].classList.remove('active');
}
};
const accordion = ( list ) => {
for( let element of list ) {
element.children[0].addEventListener('click', ( ) =>{
/** icon arrow down rotate 180 degrease */
let auxFocus = element.children[0].children[0];
let auxClose = element.children[0].children[1];
let auxActive = element.children[1];
closeAll(list);
auxFocus.classList.toggle('focus');
auxClose.classList.toggle('close');
/** makes the p tag visible */
auxActive.classList.toggle('active');
});
}
}
accordion(togleParagraph); |
# Define the model architecture
model = tf.keras.Sequential([
tf.keras.layers.InputLayer(input_shape=(input_dim,)),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dropout(0.2),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dropout(0.2),
tf.keras.layers.Dense(num_classes, activation='softmax'),
])
# Compile the model using an optimizer and a loss function
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
# Train the model
model.fit(X_train, y_train, epochs=10) |
#!/bin/bash
# LinuxGSM alert_slack.sh module
# Author: Daniel Gibbs
# Contributors: http://linuxgsm.com/contrib
# Website: https://linuxgsm.com
# Description: Sends Slack alert.
functionselfname="$(basename "$(readlink -f "${BASH_SOURCE[0]}")")"
if ! command -v jq > /dev/null; then
fn_print_fail_nl "Sending Slack alert: jq is missing."
fn_script_log_fatal "Sending Slack alert: jq is missing."
fi
json=$(cat <<EOF
{
"attachments": [
{
"color": "#36a64f",
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "*LinuxGSM Alert*"
}
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "*${alertemoji} ${alertsubject}* \n ${alertbody}"
}
},
{
"type": "divider"
},
{
"type": "section",
"fields": [
{
"type": "mrkdwn",
"text": "*Game:* \n ${gamename}"
},
{
"type": "mrkdwn",
"text": "*Server IP:* \n ${alertip}:${port}"
},
{
"type": "mrkdwn",
"text": "*Server Name:* \n ${servername}"
}
]
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "Hostname: ${HOSTNAME} / More info: ${alerturl}"
}
}
]
}
]
}
EOF
)
fn_print_dots "Sending Slack alert"
slacksend=$(curl --connect-timeout 10 -sSL -H "Content-Type: application/json" -X POST -d "$(echo -n "$json" | jq -c .)" "${slackwebhook}")
if [ "${slacksend}" == "ok" ]; then
fn_print_ok_nl "Sending Slack alert"
fn_script_log_pass "Sending Slack alert"
else
fn_print_fail_nl "Sending Slack alert: ${slacksend}"
fn_script_log_fatal "Sending Slack alert: ${slacksend}"
fi
|
#! /bin/sh
python main.py --dataset 3dchairs --num_workers 4 --batch_size 64 \
--output_save True --viz_on True \
--viz_ll_iter 1000 --viz_la_iter 5000 \
--viz_ra_iter 10000 --viz_ta_iter 10000 \
--ckpt_save_iter 10000 --max_iter 1e6 \
--lr_VAE 1e-4 --beta1_VAE 0.9 --beta2_VAE 0.999 \
--lr_D 1e-5 --beta1_D 0.5 --beta2_D 0.9 \
--lr_r 1e-1 --beta1_r 0.5 --beta2_r 0.9 \
--name $1 --z_dim 10 --gamma 3.2 --ckpt_load last
|
#!/bin/sh
CGIT_VARS='$CGIT_TITLE:$CGIT_DESC:$CGIT_VROOT:$CGIT_SECTION_FROM_STARTPATH:$CGIT_MAX_REPO_COUNT'
# Number of fcgi workers
if [ -z "$FCGI_CHILDREN" ]; then
FCGI_CHILDREN=$(nproc)
fi
/usr/bin/spawn-fcgi -F $FCGI_CHILDREN -M 666 -s /run/fcgiwrap.sock /usr/bin/fcgiwrap
/usr/sbin/sshd
mkdir /run/nginx
/usr/sbin/nginx -g "daemon off;"
|
import json
def parseServerResponse(response):
if "message" in response:
return response["message"]
else:
return "No message received"
# Test cases
response1 = {"message": "It's running!"}
response2 = {"status": "OK"}
print(parseServerResponse(response1)) # Output: It's running!
print(parseServerResponse(response2)) # Output: No message received |
package parser
import (
"log"
"testing"
)
func TestParse(t *testing.T) {
if match, lib, license := Fetch("[SwiftyJSON]:https://github.com/SwiftyJSON/SwiftyJSON/blob/master/LICENSE"); match {
log.Println(lib)
log.Println(license)
} else {
t.Errorf("error")
}
if match, lib, license := Fetch("[OAuth2]:https://github.com/p2/OAuth2/blob/2.0.0/LICENSE.txt"); match && license != "Not Found" {
log.Println(lib)
log.Println(license)
} else {
t.Errorf("error")
}
if match, lib, license := Fetch("[SQLite.swift]:https://github.com/stephencelis/SQLite.swift/blob/master/LICENSE.txt"); match {
log.Println(lib)
log.Println(license)
} else {
t.Errorf("error")
}
if match, lib, license := Fetch("[ios-license-generator]:https://github.com/mono0926/ios-license-generator/blob/master/LICENSE"); match {
log.Println(lib)
log.Println(license)
} else {
t.Errorf("error")
}
if match, lib, license := Fetch("hoge"); match {
t.Errorf("error: (lib: %s, license: %s)", lib, license)
}
if match, lib, license := Fetch("[SwiftyJSON]:hoge"); match {
t.Errorf("error: (lib: %s, license: %s)", lib, license)
}
}
|
def gcd(a, b):
if a == 0 :
return b
return gcd(b % a, a)
#Driver program
a = 10
b = 15
print ("The gcd of", a, "and", b, "is", gcd(a, b)) |
package main
import (
"Backbone/cli"
// "ANET-chain/core"
)
func main() {
c := cli.CLI{}
c.Run()
}
|
<reponame>Vanluren/react-native-heroicons<filename>src/icons/Refresh.tsx
import * as React from "react";
import Svg, { Path, SvgProps } from "react-native-svg";
interface Props extends SvgProps {
size?: number;
}
const Refresh = ({ size = 24, ...props }: Props) => {
return (
<Svg
viewBox="0 0 20 20"
fill="currentColor"
width={size}
height={size}
{...props}
>
<Path
fillRule="evenodd"
d="M4 2a1 1 0 011 1v2.101a7.002 7.002 0 0111.601 2.566 1 1 0 11-1.885.666A5.002 5.002 0 005.999 7H9a1 1 0 010 2H4a1 1 0 01-1-1V3a1 1 0 011-1zm.008 9.057a1 1 0 011.276.61A5.002 5.002 0 0014.001 13H11a1 1 0 110-2h5a1 1 0 011 1v5a1 1 0 11-2 0v-2.101a7.002 7.002 0 01-11.601-2.566 1 1 0 01.61-1.276z"
clipRule="evenodd"
/>
</Svg>
);
};
export default Refresh;
|
#!/bin/bash
# Simple wrapper for (docked) message consumption from kafka
YELLOW=$(tput setaf 3)
RESET=$(tput sgr0)
IMAGE='wurstmeister/kafka:latest'
BROKERS="${KAFKA_BROKERS:-localhost:9092}"
OFFSET="${KAFKA_OFFSET:-earliest}"
ISOLATION_LEVEL="${KAFKA_ISOLATION_LEVEL:-read_committed}"
FROM_BEGINNING_FLAG=''
TOPIC="${1:-${KAFKA_TOPIC:-$(whoami)-test}}"
usage() {
echo 'Usage: consumer [OPTIONS]'
echo ''
echo 'OPTIONS:'
echo ''
echo '-b, --brokers,'
echo '--bootstrap-server The kafka broker list to initially use to identify the cluster.'
echo ' You can also use the $KAFKA_BROKERS var.'
echo ''
echo '--from-beginning Pass the "from beginning" flag through to the console consumer'
echo ''
echo '-g, --group, The consumer group ID to use to consume'
echo '--consumer-group'
echo ''
echo '--isolation read_committed or read_uncommitted. Defaults to the former.'
echo ' See kafka docs. You can also use the $ISOLATION_LEVEL var.'
echo ''
echo '-o, --offset The kafka offset to use if none exists for the consumer group.'
echo ' Defaults to earliest'
echo ''
echo '-t, --topic TOPIC The kafka topic from which to read; you can also use the'
echo ' $KAFKA_TOPIC env var. If neither is set, defaults'
echo " to $(whoami)-test"
}
# Parse args
while [[ "$1" != '' ]]; do
case "$1" in
-b|--brokers|--bootstrap-server)
shift
BROKERS="$1"
shift
;;
--from-beginning)
shift
FROM_BEGINNING_FLAG='--from-beginning'
;;
-g|--group|--consumer-group)
shift
GROUP="$1"
shift
;;
--isolation)
shift
ISOLATION_LEVEL="$1"
shift
;;
-o|--offset)
shift
OFFSET="$1"
shift
;;
-t|--topic)
shift
TOPIC="$1"
shift
;;
--help)
shift
usage
exit 0
;;
-*)
echo "Unrecognised flag $1" >&2
exit 1
;;
*)
echo "Unexpected argument $1" >&2
exit 2
;;
esac
done
echo "${YELLOW}Reading from $TOPIC at $BROKERS...$RESET"
docker run --rm --entrypoint kafka-console-consumer.sh --net host $IMAGE \
--topic "$TOPIC" \
--bootstrap-server "$BROKERS" \
--group "${GROUP:-"$(whoami)-docked-console-consumer-group"}" \
$FROM_BEGINNING_FLAG \
--skip-message-on-error
|
async function test() {
}
async function main() {
let p = test();
await p;
p.finally();
}
main();
|
/*
Copyright (c) 2015 <NAME>
This code uses a class extracted and adapted from the OpenCV library
for use with the Cinder C++ library, http://libcinder.org and all the
relevant portion of code is tied to OpenCV license that can be found
in Triangulation.cpp file
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
#pragma once
#include "cinder/Rect.h"
#include "cinder/Vector.h"
namespace Delaunay {
//! triangulates the list of points and returns a list of triangles indices
std::vector<uint32_t> getTriangleIndices( const ci::Rectf &rect, const std::vector<ci::vec2> &points = std::vector<ci::vec2>() );
}; |
/*
* Copyright (C) 2012 Sony Mobile Communications AB
*
* This file is part of ApkAnalyser.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jerl.bcm.util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.lang.reflect.Constructor;
import java.util.Enumeration;
import java.util.List;
import java.util.Vector;
import jerl.bcm.inj.Injection;
import jerl.bcm.inj.InjectionMethod;
import jerl.bcm.inj.impl.MethodCallCrash;
import jerl.bcm.inj.impl.MethodCallGC;
import jerl.bcm.inj.impl.MethodCallOut;
import jerl.bcm.inj.impl.MethodEntryCrash;
import jerl.bcm.inj.impl.MethodEntryOut;
import jerl.bcm.inj.impl.MethodExceptionHandlerPrintStackTrace;
import jerl.bcm.inj.impl.MethodExitCrash;
import jerl.bcm.inj.impl.MethodExitOut;
import jerl.bcm.inj.impl.MethodOffsetGC;
public class InjectionUtil {
public static final String STRING_TYPE = "java.lang.String";
public static final String INT_TYPE = "int";
public static final String BOOLEAN_TYPE = "boolean";
public static void deconstructClassToStream(ClassInjContainer cic, PrintStream out) {
out.println("\\begin{class}[" + cic.getClassName() + "]");
Enumeration<InjectionMethod> en = cic.methodInjections();
while (en.hasMoreElements()) {
Injection inj = en.nextElement();
List<String> l = deconstructInjection(inj);
if (l.size() < 2) {
// invalid
continue;
}
out.println("\t" + l.get(0));
for (int i = 1; i < l.size() - 1; i++) {
out.println("\t\t" + l.get(i));
}
out.println("\t" + l.get(l.size() - 1));
}
out.println("\\end{class}");
}
public static List<String> deconstructInjection(Injection inj) {
Vector<String> ret = new Vector<String>();
Class<?> c = inj.getClass();
List<String> l = inj.getInstanceData();
Constructor<?> constructor = getDefaultConstructor(c);
int numArgs = constructor.getParameterTypes().length;
String argTypes = createTypeString(constructor);
ret.add("\\begin{injection}[" + numArgs + "]");
ret.add("ClassName=" + c.getName());
ret.add("ArgTypes=" + argTypes);
for (int i = 0; i < l.size(); i++) {
ret.add("ArgValue[" + i + "]=" + l.get(i));
}
ret.add("\\end{injection}");
return ret;
}
public static Constructor<?> getDefaultConstructor(Class<?> c) {
Constructor<?>[] constList = c.getConstructors();
if (constList.length != 1) {
throw new IllegalArgumentException("Unable to find default constructor of class=" + c.getName());
}
return constList[0];
}
public static String createTypeString(Constructor<?> constructor) {
StringBuffer sb = new StringBuffer();
Class<?>[] classList = constructor.getParameterTypes();
for (int i = 0; i < classList.length; i++) {
String name = classList[i].getName();
// if (name.equals(STRING_TYPE) || name.equals(INT_TYPE) || name.equals(BOOLEAN_TYPE)) {
sb.append(name);
if (i < classList.length - 1) {
sb.append(",");
}
// } else {
// throw new IllegalArgumentException("Invalid argument type in constructor");
// }
}
return sb.toString();
}
public static byte[] writeTest() {
ByteArrayOutputStream baos = new ByteArrayOutputStream(1024);
PrintStream ps = new PrintStream(baos);
ClassInjContainer cic = new ClassInjContainer("com/sun/Test");
cic.addMethodInjection(new MethodEntryOut("a(III[I)V", "desc"));
cic.addMethodInjection(new MethodOffsetGC("a(III[I)V", 30));
cic.addMethodInjection(new MethodExceptionHandlerPrintStackTrace("a(III[I)V"));
cic.addMethodInjection(new MethodCallCrash("a(III[I)V", "java/io/DataInputStream.readInt()I", true));
cic.addMethodInjection(new MethodCallOut("a(Lv1;)V", "java/io/DataInputStream.readInt()I", true, "message"));
cic.addMethodInjection(new MethodExitCrash("a(III[I)V"));
cic.addMethodInjection(new MethodExitOut("a(III[I)V", "message"));
cic.addMethodInjection(new MethodEntryCrash("c(I)I"));
ClassInjContainer cic2 = new ClassInjContainer("com/sun/Test2");
cic2.addMethodInjection(new MethodEntryOut("e(III[I)V", "desc2"));
cic2.addMethodInjection(new MethodEntryOut("e(III[I)V", "desc2"));
cic2.addMethodInjection(new MethodEntryOut("e(III[I)V", "desc2"));
cic2.addMethodInjection(new MethodEntryOut("f(III[I)V", "desc2"));
cic2.addMethodInjection(new MethodEntryOut("b(III[I)V", "desc2"));
cic2.addMethodInjection(new MethodEntryOut("d(III[I)V", "desc2"));
cic2.addMethodInjection(new MethodOffsetGC("k(I)I", 10));
cic2.addMethodInjection(new MethodExceptionHandlerPrintStackTrace("a(III[I)V"));
cic2.addMethodInjection(new MethodCallCrash("f(III[I)V", "java/lang/System.currentTimeMillis()J", true));
cic2.addMethodInjection(new MethodCallGC("f(III[I)V", "java/lang/System.currentTimeMillis()J", true));
cic2.addMethodInjection(new MethodCallOut("a(Lv1;)V", "java/lang/System.currentTimeMillis()J", true, "message2"));
cic2.addMethodInjection(new MethodExitCrash("b(III[I)V"));
cic2.addMethodInjection(new MethodExitOut("d(III[I)V", "message2"));
cic2.addMethodInjection(new MethodEntryCrash("k(I)I"));
ClassInjContainer cic3 = new ClassInjContainer("b");
cic3.addMethodInjection(new MethodEntryOut("a(III)Ljavax/microedition/lcdui/Font;", "desc2"));
cic3.addMethodInjection(new MethodOffsetGC("a(Ljava/lang/String;)Ljava/io/InputStream;", 10));
cic3.addMethodInjection(new MethodCallOut("a(Ljava/lang/String;)Ljavax/microedition/lcdui/Image;", "javax/microedition/lcdui/Image.createImage(Ljava/lang/String;)Ljavax/microedition/lcdui/Image;", true, "message"));
deconstructClassToStream(cic, System.out);
deconstructClassToStream(cic2, System.out);
deconstructClassToStream(cic3, System.out);
deconstructClassToStream(cic, ps);
deconstructClassToStream(cic2, ps);
deconstructClassToStream(cic3, ps);
ps.flush();
byte[] bBuf = baos.toByteArray();
return bBuf;
}
public static void main(String[] args) {
// simulate construction of Injections and writing them to a stream.
byte[] bBuf = writeTest();
System.out.println("*****************************");
// load injections and also print them, just to verify
try {
InjectionBuilder ib = new InjectionBuilder(new ByteArrayInputStream(bBuf));
Enumeration<String> enumClassNames = ib.getClassNamesForInjections();
while (enumClassNames.hasMoreElements()) {
String className = enumClassNames.nextElement();
ClassInjContainer cic = ib.getClassInjContainer(className);
deconstructClassToStream(cic, System.out);
// when applying injections we just need this:
cic.methodInjectionsToArray();
// to be passed in to InjectionEngine.preformInjection()
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
def bin_permutations():
output = []
for i in range(2 ** 4):
output.append("{:04b}".format(i))
return output |
// Copyright 2021 The Terasology Foundation
// SPDX-License-Identifier: Apache-2.0
package org.terasology.engine.rendering.assets.atlas;
import org.terasology.gestalt.assets.AssetData;
import org.terasology.engine.rendering.assets.texture.subtexture.SubtextureData;
import org.terasology.gestalt.naming.Name;
import java.util.Map;
/**
*/
public class AtlasData implements AssetData {
private Map<Name, SubtextureData> subtextures;
public AtlasData(Map<Name, SubtextureData> subtextureMap) {
this.subtextures = subtextureMap;
}
public Map<Name, SubtextureData> getSubtextures() {
return subtextures;
}
}
|
#!/bin/sh
SELF=$(realpath "${0}")
# shellcheck source=./config/terraform-defaults.sh
. "$(dirname "${SELF}")/config/terraform-defaults.sh"
# shellcheck disable=SC2068
terraform \
workspace \
new \
-lock=true \
-lock-timeout="${TERRAFORM_LOCK_TIMEOUT}" \
${@}
|
#!/usr/bin/env bash
set -x
set -e
#################
## CONFIGURE ##
#################
if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then
export PATH=/opt/cmake-3.16.2/bin:$PATH
fi
cmake --version
if [[ "$LANGUAGE" == "cpp" ]]; then
LDFLAGS=-v cmake -S. -Bbuild
elif [[ "$LANGUAGE" == "python" ]]; then
python --version
cmake -S. -Bbuild -DBUILD_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3.7
elif [[ "$LANGUAGE" == "dotnet" ]]; then
if [ "${TRAVIS_OS_NAME}" == osx ];then
# Installer changes path but won't be picked up in current terminal session
# Need to explicitly add location
export PATH=/usr/local/share/dotnet:"${PATH}"
fi
dotnet --info
cmake -S. -Bbuild -DBUILD_DOTNET=ON
elif [[ "$LANGUAGE" == "java" ]]; then
java -version
cmake -S. -Bbuild -DBUILD_JAVA=ON
fi
#############
## BUILD ##
#############
cmake --build build --target all -- VERBOSE=1
############
## TEST ##
############
cmake --build build --target test
# vim: set tw=0 ts=2 sw=2 expandtab:
|
import { __assign, __extends } from "tslib";
import { get } from '@antv/util';
import GroupComponent from '../abstract/group-component';
import Theme from '../util/theme';
import { pointsToBBox } from '../util/util';
import { renderTag } from '../util/graphic';
var DataRegionAnnotation = /** @class */ (function (_super) {
__extends(DataRegionAnnotation, _super);
function DataRegionAnnotation() {
return _super !== null && _super.apply(this, arguments) || this;
}
/**
* 默认的配置项
* @returns {object} 默认的配置项
*/
DataRegionAnnotation.prototype.getDefaultCfg = function () {
var cfg = _super.prototype.getDefaultCfg.call(this);
return __assign(__assign({}, cfg), { name: 'annotation', type: 'dataRegion', locationType: 'points', points: [], lineLength: 0, region: {}, text: {}, defaultCfg: {
region: {
style: {
lineWidth: 0,
fill: Theme.regionColor,
opacity: 0.4,
},
},
text: {
content: '',
style: {
textAlign: 'center',
textBaseline: 'bottom',
fontSize: 12,
fill: Theme.textColor,
fontFamily: Theme.fontFamily,
},
},
} });
};
DataRegionAnnotation.prototype.renderInner = function (group) {
var regionStyle = get(this.get('region'), 'style', {});
var textStyle = get(this.get('text'), 'style', {});
var lineLength = this.get('lineLength') || 0;
var points = this.get('points');
if (!points.length) {
return;
}
var bbox = pointsToBBox(points);
// render region
var path = [];
path.push(['M', points[0].x, bbox.minY - lineLength]);
points.forEach(function (point) {
path.push(['L', point.x, point.y]);
});
path.push(['L', points[points.length - 1].x, points[points.length - 1].y - lineLength]);
this.addShape(group, {
type: 'path',
id: this.getElementId('region'),
name: 'annotation-region',
attrs: __assign({ path: path }, regionStyle),
});
// render text
var textCfg = __assign({ id: this.getElementId('text'), name: 'annotation-text', x: (bbox.minX + bbox.maxX) / 2, y: bbox.minY - lineLength }, this.get('text'));
renderTag(group, textCfg);
};
return DataRegionAnnotation;
}(GroupComponent));
export default DataRegionAnnotation;
//# sourceMappingURL=data-region.js.map |
package view
import (
"fmt"
"reflect"
)
type FrontController func(content View) ViewWithURL
func (self FrontController) ContentFunc(contentFunc interface{}) ViewWithURL {
v := reflect.ValueOf(contentFunc)
t := v.Type()
if t.Kind() != reflect.Func {
panic(fmt.Errorf("FrontController.ForContent: contentFunc must be a function, got %s", t))
}
if t.NumIn() != 2 {
panic(fmt.Errorf("FrontController.ForContent: contentFunc must have two arguments, got %d", t.NumIn()))
}
if t.In(0) != reflect.TypeOf((*Context)(nil)) {
panic(fmt.Errorf("FrontController.ForContent: contentFunc's first argument must be of type *Context, got %s", t.In(0)))
}
if t.NumOut() != 2 {
panic(fmt.Errorf("FrontController.ForContent: contentFunc must have two results, got %d", t.NumOut()))
}
if t.Out(0) != reflect.TypeOf((*View)(nil)).Elem() {
panic(fmt.Errorf("FrontController.ForContent: contentFunc's first result must be of type View, got %s", t.Out(0)))
}
if t.Out(1) != reflect.TypeOf((*error)(nil)).Elem() {
panic(fmt.Errorf("FrontController.ForContent: contentFunc's second result must be of type error, got %s", t.Out(1)))
}
content := DynamicView(
func(ctx *Context) (View, error) {
if reflect.TypeOf(ctx.Data) != t.In(1) {
panic(fmt.Errorf("FrontController: Context.Data must be of type %s, got %T", t.In(1), ctx.Data))
}
args := []reflect.Value{reflect.ValueOf(ctx), reflect.ValueOf(ctx.Data)}
results := v.Call(args)
view, _ := results[0].Interface().(View)
err, _ := results[1].Interface().(error)
return view, err
},
)
return self(content)
}
|
#!/bin/bash
#
# File: llnms-list-networks.bash
# Author: Marvin Smith
# Date: 12/8/2013
#
# Purpose: List networks registered in LLNMS
#
#-------------------------------------#
#- Warning Function -#
#- -#
#- $1 - Error Message -#
#- $2 - Line Number (Optional). -#
#- $3 - File Name (Optional). -$
#-------------------------------------#
warning(){
# If the user only gives the warning message
if [ $# -eq 1 ]; then
echo "warning: $1"
# If the user only gives the line number
elif [ $# -eq 2 ]; then
echo "warning: $1 Line: $2, File: `basename $0`"
# If the user gives the line number and file
else
echo "warning: $1 Line: $2, File: $3"
fi
}
#-------------------------------------#
#- Error Function -#
#- -#
#- $1 - Error Message -#
#- $2 - Line Number (Optional). -#
#- $3 - File Name (Optional). -$
#-------------------------------------#
error(){
# If the user only gives the error message
if [ $# -eq 1 ]; then
echo "error: $1"
# If the user only gives the line number
elif [ $# -eq 2 ]; then
echo "error: $1 Line: $2, File: `basename $0`"
# If the user gives the line number and file
else
echo "error: $1 Line: $2, File: $3"
fi
}
#-------------------------------------#
# Version Function #
#-------------------------------------#
version(){
echo "`basename $0` Information"
echo ''
echo " LLNMS Version ${LLNMS_MAJOR}.${LLNMS_MINOR}.${LLNMS_SUBMINOR}"
}
#-------------------------------------#
# Usage Instructions #
#-------------------------------------#
usage(){
echo "$0: [options]"
echo ''
echo ' options:'
echo ' -h, --help : Print Usage Instructions'
echo ' -v, --version : Print Program Version Information'
echo ''
echo ' Formatting'
echo ' -l, --list : Print in a List format'
echo ' -p, --pretty : Print in a human-readable format (DEFAULT)'
echo ' -x, --xml : Print in a XML format'
echo ''
echo ' --name-only : Print only network names'
echo ' --file-only : Print only filenames'
}
#-------------------------------------#
# Main Function -#
#-------------------------------------#
# Source llnms home
if [ "$LLNMS_HOME" = "" ]; then
LLNMS_HOME="/var/tmp/llnms"
fi
# Import the version info
. $LLNMS_HOME/config/llnms-info
# Set the output format
OUTPUT_FORMAT="LIST"
NAME_ONLY=0
FILE_ONLY=0
# parse command-line options
for OPTION in $@; do
case $OPTION in
# Print Usage Instructions
"-h" | "--help" )
usage
exit 1
;;
# Print Version Information
"-v" | "--version" )
version
exit 1
;;
# Set format to pretty
"-p" | "--pretty" )
OUTPUT_FORMAT="PRETTY"
;;
# Set format to list
'-l' | '--list' )
OUTPUT_FORMAT='LIST'
;;
# Set format to xml
"-x" | "--xml" )
OUTPUT_FORMAT="XML"
;;
# Print only names
'--name-only' )
NAME_ONLY=1
;;
# Print only files
'--file-only' )
FILE_ONLY=1
;;
# Print Error
*)
error "Unknown option $OPTION"
;;
esac
done
# Start printing xml info if output type is xml
if [ "$OUTPUT_FORMAT" = "XML" ]; then
OUTPUT="<llnms-list-network-output>\n"
fi
# Iterate through each network file, printing information about each file
NETWORK_FILES=`ls $LLNMS_HOME/networks/*.llnms-network.xml 2> /dev/null`
for NETWORK_FILE in $NETWORK_FILES; do
# Print the name
NETWORK_NAME="`$LLNMS_HOME/bin/llnms-print-network-info -n -f $NETWORK_FILE`"
if [ "$OUTPUT_FORMAT" = 'LIST' -a "$NAME_ONLY" = '1' ]; then
printf "$NETWORK_NAME"
elif [ "$OUTPUT_FORMAT" = 'LIST' -a "$FILE_ONLY" = '0' -a "$NAME_ONLY" = '0' ]; then
printf "$NETWORK_NAME "
fi
# Print the address start
ADDRESS_START="`$LLNMS_HOME/bin/llnms-print-network-info -s -f $NETWORK_FILE`"
if [ "$OUTPUT_FORMAT" = 'LIST' -a "$FILE_ONLY" = '0' -a "$NAME_ONLY" = '0' ]; then
printf "$ADDRESS_START "
fi
# Print the address end
ADDRESS_END="`$LLNMS_HOME/bin/llnms-print-network-info -e -f $NETWORK_FILE`"
if [ "$OUTPUT_FORMAT" = 'LIST' -a "$FILE_ONLY" = '0' -a "$NAME_ONLY" = '0' ]; then
printf "$ADDRESS_END "
fi
# Print the filename
if [ "$OUTPUT_FORMAT" = 'LIST' -a "$NAME_ONLY" = '0' ]; then
printf "$NETWORK_FILE "
fi
if [ "$FILE_ONLY" = '1' ]; then
printf "$NETWORK_FILE"
fi
# Print a new line
printf "\n"
done
|
<reponame>jrfaller/maracas
package main.unused.classTypeChanged;
public class ClassTypeChangedC2A {
}
|
<filename>software/webapp/src/test/java/brooklyn/entity/webapp/ControlledDynamicWebAppClusterIntegrationTest.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.entity.webapp;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import java.net.URL;
import java.util.List;
import java.util.concurrent.Callable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import brooklyn.entity.BrooklynAppLiveTestSupport;
import brooklyn.entity.Entity;
import brooklyn.entity.basic.Attributes;
import brooklyn.entity.basic.Lifecycle;
import brooklyn.entity.proxy.AbstractController;
import brooklyn.entity.proxy.LoadBalancer;
import brooklyn.entity.proxy.nginx.NginxController;
import brooklyn.entity.proxying.EntitySpec;
import brooklyn.entity.webapp.ControlledDynamicWebAppClusterTest.RecordingSensorEventListener;
import brooklyn.entity.webapp.jboss.JBoss7Server;
import brooklyn.entity.webapp.tomcat.TomcatServer;
import brooklyn.location.basic.LocalhostMachineProvisioningLocation;
import brooklyn.test.Asserts;
import brooklyn.test.EntityTestUtils;
import brooklyn.test.HttpTestUtils;
import brooklyn.test.entity.TestJavaWebAppEntity;
import brooklyn.util.collections.CollectionFunctionals;
import brooklyn.util.collections.MutableMap;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
public class ControlledDynamicWebAppClusterIntegrationTest extends BrooklynAppLiveTestSupport {
private static final Logger log = LoggerFactory.getLogger(ControlledDynamicWebAppClusterIntegrationTest.class);
private static final int TIMEOUT_MS = 10*1000;
private URL warUrl;
private LocalhostMachineProvisioningLocation loc;
private List<LocalhostMachineProvisioningLocation> locs;
@BeforeMethod(alwaysRun=true)
public void setUp() throws Exception {
super.setUp();
String warPath = "hello-world.war";
warUrl = getClass().getClassLoader().getResource(warPath);
loc = app.newLocalhostProvisioningLocation();
locs = ImmutableList.of(loc);
}
@Test(groups="Integration")
public void testConfiguresController() {
ControlledDynamicWebAppCluster cluster = app.createAndManageChild(EntitySpec.create(ControlledDynamicWebAppCluster.class)
.configure("initialSize", 1)
.configure("memberSpec", EntitySpec.create(JBoss7Server.class).configure("war", warUrl.toString())));
app.start(locs);
String url = cluster.getController().getAttribute(NginxController.ROOT_URL);
HttpTestUtils.assertHttpStatusCodeEventuallyEquals(url, 200);
HttpTestUtils.assertContentEventuallyContainsText(url, "Hello");
}
@Test(groups="Integration")
public void testSetsToplevelHostnameFromController() {
ControlledDynamicWebAppCluster cluster = app.createAndManageChild(EntitySpec.create(ControlledDynamicWebAppCluster.class)
.configure("initialSize", 1)
.configure("memberSpec", EntitySpec.create(JBoss7Server.class).configure("war", warUrl.toString())));
app.start(locs);
String expectedHostname = cluster.getController().getAttribute(LoadBalancer.HOSTNAME);
String expectedRootUrl = cluster.getController().getAttribute(LoadBalancer.ROOT_URL);
boolean expectedServiceUp = true;
assertNotNull(expectedHostname);
assertNotNull(expectedRootUrl);
EntityTestUtils.assertAttributeEqualsEventually(MutableMap.of("timeout", TIMEOUT_MS), cluster, ControlledDynamicWebAppCluster.HOSTNAME, expectedHostname);
EntityTestUtils.assertAttributeEqualsEventually(MutableMap.of("timeout", TIMEOUT_MS), cluster, ControlledDynamicWebAppCluster.ROOT_URL, expectedRootUrl);
EntityTestUtils.assertAttributeEqualsEventually(MutableMap.of("timeout", TIMEOUT_MS), cluster, ControlledDynamicWebAppCluster.SERVICE_UP, expectedServiceUp);
}
@Test(groups="Integration")
public void testCustomWebClusterSpecGetsMemberSpec() {
ControlledDynamicWebAppCluster cluster = app.createAndManageChild(EntitySpec.create(ControlledDynamicWebAppCluster.class)
.configure("initialSize", 1)
.configure(ControlledDynamicWebAppCluster.MEMBER_SPEC, EntitySpec.create(JBoss7Server.class)
.configure(JBoss7Server.ROOT_WAR, warUrl.toString()))
.configure(ControlledDynamicWebAppCluster.WEB_CLUSTER_SPEC, EntitySpec.create(DynamicWebAppCluster.class)
.displayName("mydisplayname")));
app.start(locs);
String url = cluster.getController().getAttribute(NginxController.ROOT_URL);
HttpTestUtils.assertContentEventuallyContainsText(url, "Hello");
// and make sure it really was using our custom spec
assertEquals(cluster.getCluster().getDisplayName(), "mydisplayname");
}
// Needs to be integration test because still using nginx controller; could pass in mock controller
@Test(groups="Integration")
public void testSetsServiceLifecycle() {
ControlledDynamicWebAppCluster cluster = app.createAndManageChild( EntitySpec.create(ControlledDynamicWebAppCluster.class)
.configure("initialSize", 1)
.configure(ControlledDynamicWebAppCluster.MEMBER_SPEC, EntitySpec.create(TestJavaWebAppEntity.class)) );
EntityTestUtils.assertAttributeEqualsEventually(cluster, Attributes.SERVICE_STATE_ACTUAL, Lifecycle.STOPPED);
RecordingSensorEventListener<Lifecycle> listener = new RecordingSensorEventListener<Lifecycle>(true);
app.subscribe(cluster, Attributes.SERVICE_STATE_ACTUAL, listener);
app.start(locs);
Asserts.eventually(Suppliers.ofInstance(listener.getValues()), CollectionFunctionals.sizeEquals(2));
assertEquals(listener.getValues(), ImmutableList.of(Lifecycle.STARTING, Lifecycle.RUNNING), "vals="+listener.getValues());
listener.getValues().clear();
app.stop();
EntityTestUtils.assertAttributeEqualsEventually(cluster, Attributes.SERVICE_STATE_ACTUAL, Lifecycle.STOPPED);
Asserts.eventually(Suppliers.ofInstance(listener.getValues()), CollectionFunctionals.sizeEquals(2));
assertEquals(listener.getValues(), ImmutableList.of(Lifecycle.STOPPING, Lifecycle.STOPPED), "vals="+listener.getValues());
}
@Test(groups="Integration")
public void testTomcatAbsoluteRedirect() {
final ControlledDynamicWebAppCluster cluster = app.createAndManageChild(EntitySpec.create(ControlledDynamicWebAppCluster.class)
.configure(ControlledDynamicWebAppCluster.MEMBER_SPEC, EntitySpec.create(TomcatServer.class)
.configure(TomcatServer.ROOT_WAR, "classpath://hello-world.war"))
.configure("initialSize", 1)
.configure(AbstractController.SERVICE_UP_URL_PATH, "hello/redirectAbsolute")
);
app.start(locs);
final NginxController nginxController = (NginxController) cluster.getController();
Asserts.succeedsEventually(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return nginxController.getServerPoolAddresses().size() == 1;
}
});
Entity tomcatServer = Iterables.getOnlyElement(cluster.getCluster().getMembers());
EntityTestUtils.assertAttributeEqualsEventually(tomcatServer, Attributes.SERVICE_UP, true);
EntityTestUtils.assertAttributeEqualsContinually(nginxController, Attributes.SERVICE_UP, true);
app.stop();
}
}
|
#!/bin/bash
#SBATCH --partition=gpu_p1
#SBATCH --time=20:00:00
#SBATCH --mem=60G
#SBATCH --cpus-per-task=10
#SBATCH --threads-per-core=1 # on réserve des coeurs physiques et non logiques
#SBATCH --ntasks=1
#SBATCH --workdir=/gpfswork/rech/zft/upd53tc/jobs/AD-DL/train/patch_level/multi_cnn
#SBATCH --output=./exp16/pytorch_job_%j.out
#SBATCH --error=./exp16/pytorch_job_%j.err
#SBATCH --job-name=exp16_cnn
#SBATCH --gres=gpu:1
#SBATCH --mail-type=END
#SBATCH --mail-user=mauricio.diaz@inria.fr
#export http_proxy=http://10.10.2.1:8123
#export https_proxy=http://10.10.2.1:8123
# Experiment training autoencoder
eval "$(conda shell.bash hook)"
conda activate clinicadl_env_py37
# Network structure
NETWORK="Conv4_FC3"
NETWORK_TYPE="multi"
COHORT="ADNI"
DATE="reproducibility_results"
NUM_CNN=36
USE_EXTRACTED_PATCHES=1
# Input arguments to clinicaaddl
CAPS_DIR="$SCRATCH/../commun/datasets/${COHORT}_rerun"
TSV_PATH="$HOME/code/AD-DL/data/$COHORT/lists_by_diagnosis/train"
OUTPUT_DIR="$SCRATCH/results/$DATE/"
# Computation ressources
NUM_PROCESSORS=32
GPU=1
# Dataset Management
PREPROCESSING='linear'
DIAGNOSES="AD CN"
SPLITS=5
SPLIT=$1
# Training arguments
EPOCHS=200
BATCH=32
BASELINE=0
ACCUMULATION=1
EVALUATION=20
LR=1e-5
WEIGHT_DECAY=1e-4
GREEDY_LEARNING=0
SIGMOID=0
NORMALIZATION=1
PATIENCE=15
# Pretraining
T_BOOL=1
T_PATH="patch3D_model-Conv4_FC3_preprocessing-linear_task-autoencoder_baseline-0_norm-1_splits-5"
T_PATH="$SCRATCH/results/$DATE/$T_PATH"
T_DIFF=0
# Other options
OPTIONS=""
if [ $GPU = 1 ]; then
OPTIONS="${OPTIONS} --use_gpu"
fi
if [ $NORMALIZATION = 1 ]; then
OPTIONS="${OPTIONS} --minmaxnormalization"
fi
if [ $T_BOOL = 1 ]; then
OPTIONS="$OPTIONS --transfer_learning_autoencoder --transfer_learning_path $T_PATH"
fi
if [ $BASELINE = 1 ]; then
echo "using only baseline data"
OPTIONS="$OPTIONS --baseline"
fi
if [ $USE_EXTRACTED_PATCHES = 1 ]; then
echo "Using extracted slices/patches"
OPTIONS="$OPTIONS --use_extracted_patches"
fi
TASK_NAME="${DIAGNOSES// /_}"
NAME="patch3D_model-${NETWORK}_preprocessing-${PREPROCESSING}_task-autoencoder_baseline-${BASELINE}_norm-${NORMALIZATION}_${NETWORK_TYPE}-cnn_selectionThreshold-0"
if [ $SPLITS > 0 ]; then
echo "Use of $SPLITS-fold cross validation, split $SPLIT"
NAME="${NAME}_splits-${SPLITS}"
fi
echo $NAME
# Run clinicaaddl
clinicadl train \
patch \
$CAPS_DIR \
$TSV_PATH \
$OUTPUT_DIR$NAME \
$NETWORK \
--network_type $NETWORK_TYPE \
--num_cnn $NUM_CNN \
--batch_size $BATCH \
--evaluation_steps $EVALUATION \
--preprocessing $PREPROCESSING \
--diagnoses $DIAGNOSES \
--n_splits $SPLITS \
--split $SPLIT \
--accumulation_steps $ACCUMULATION \
--epochs $EPOCHS \
--learning_rate $LR \
--weight_decay $WEIGHT_DECAY \
--patience $PATIENCE \
$OPTIONS
|
package edu.ncsu.csc316.airline_mileage.data;
/**
* Creates airline object
* @author <NAME> (anfeisal)
*
*/
public class Airline {
private String name;
private String code;
private String callSign;
private String country;
/**
* Constructs new airline object
* @param name the name of the airline
* @param code the code of the airline
* @param callSign the call sign of the airline
* @param country the country of the airline
*/
public Airline(String name, String code, String callSign, String country) {
this.setName(name);
this.setCode(code);
this.setCallSign(callSign);
this.setCountry(country);
}
/**
* Returns name of airline
* @return the name
*/
public String getName() {
return name;
}
/**
* Sets name of airline
* @param name the name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* Returns code of the airline
* @return the code
*/
public String getCode() {
return code;
}
/**
* Sets the code of the airline
* @param code the code to set
*/
public void setCode(String code) {
this.code = code;
}
/**
* Returns the call sign
* @return the callSign
*/
public String getCallSign() {
return callSign;
}
/**
* Sets the call sign
* @param callSign the callSign to set
*/
public void setCallSign(String callSign) {
this.callSign = callSign;
}
/**
* Returns the country of the airline
* @return the country
*/
public String getCountry() {
return country;
}
/**
* Sets the country of the airline
* @param country the country to set
*/
public void setCountry(String country) {
this.country = country;
}
/**
* Returns key for airline object
* @return the key as a string
*/
public String getKey() {
return this.code;
}
}
|
<filename>handlers/machine_driver.go
package handlers
import (
"fmt"
"github.com/Sirupsen/logrus"
"github.com/rancher/event-subscriber/events"
"github.com/rancher/go-machine-service/dynamic"
"github.com/rancher/go-rancher/v3"
)
func DeactivateDriver(event *events.Event, apiClient *client.RancherClient) error {
return removeDriver(event, apiClient, false)
}
func RemoveDriver(event *events.Event, apiClient *client.RancherClient) error {
return removeDriver(event, apiClient, true)
}
func removeDriver(event *events.Event, apiClient *client.RancherClient, delete bool) error {
logger.WithFields(logrus.Fields{
"resourceId": event.ResourceID,
"eventId": event.ID,
"name": event.Name,
}).Info("Event")
driverInfo, err := apiClient.MachineDriver.ById(event.ResourceID)
if err != nil {
return err
}
if err := dynamic.RemoveSchemas(driverInfo.Name+"Config", apiClient); err != nil {
return err
}
if driverInfo.Checksum == "" || delete {
driver, err := getDriver(event.ResourceID, apiClient)
if err == nil {
logger.Infof("Removing driver %s", driverInfo.Name)
driver.Remove()
}
}
if err := dynamic.UploadMachineSchemas(apiClient); err != nil {
return err
}
reply := newReply(event)
return publishReply(reply, apiClient)
}
func ErrorDriver(event *events.Event, apiClient *client.RancherClient) error {
logger.WithFields(logrus.Fields{
"resourceId": event.ResourceID,
"eventId": event.ID,
"name": event.Name,
}).Info("Event")
driver, err := getDriver(event.ResourceID, apiClient)
if err != nil {
// mask error
return nil
}
driver.ClearError()
reply := newReply(event)
return publishReply(reply, apiClient)
}
func ActivateDriver(event *events.Event, apiClient *client.RancherClient) error {
logger.WithFields(logrus.Fields{
"resourceId": event.ResourceID,
"eventId": event.ID,
"name": event.Name,
}).Info("Event")
driver, err := activate(event.ResourceID, apiClient)
if err != nil {
return err
}
version, err := dynamic.DriverSchemaVersion(apiClient)
if err != nil {
return err
}
reply := newReply(event)
reply.Data = map[string]interface{}{
"name": driver.FriendlyName(),
"defaultActive": false,
"schemaVersion": version,
}
if err := dynamic.UploadMachineSchemas(apiClient, driver.FriendlyName()); err != nil {
return err
}
return publishReply(reply, apiClient)
}
func getDriver(id string, apiClient *client.RancherClient) (*dynamic.Driver, error) {
driverInfo, err := apiClient.MachineDriver.ById(id)
if err != nil {
return nil, err
}
return dynamic.NewDriver(driverInfo.Builtin, driverInfo.Name, driverInfo.Url, driverInfo.Checksum), nil
}
func activate(id string, apiClient *client.RancherClient) (*dynamic.Driver, error) {
driver, err := getDriver(id, apiClient)
if err != nil {
return nil, err
}
if err := driver.Stage(); err != nil {
return nil, err
}
opts := client.NewListOpts()
opts.Filters["name"] = driver.FriendlyName()
opts.Filters["state"] = "active"
existing, err := apiClient.MachineDriver.List(opts)
if err != nil {
return nil, err
}
if len(existing.Data) > 0 {
return nil, fmt.Errorf("An active driver name %s already exists", driver.Name())
}
if err := driver.Install(); err != nil {
logger.Errorf("Failed to download/install driver %s: %v", driver.Name(), err)
return nil, err
}
return driver, dynamic.GenerateAndUploadSchema(driver.Name())
}
|
#!/usr/bin/env bash
./update.sh
ssh -t pi@dex.local 'cd ~/conf-room-monitor/;python run_sensors.py' |
export CODAR_CHEETAH_GROUP_WALLTIME="1800"
export CODAR_CHEETAH_GROUP_MAX_PROCS="1"
export CODAR_CHEETAH_SCHEDULER_ACCOUNT=""
# queue on PBS, partition on SLURM
export CODAR_CHEETAH_SCHEDULER_QUEUE=""
# SLURM specific options
export CODAR_CHEETAH_SCHEDULER_CONSTRAINT=""
export CODAR_CHEETAH_SCHEDULER_LICENSE=""
export CODAR_CHEETAH_SCHEDULER_RESERVATION=""
export CODAR_CHEETAH_CAMPAIGN_NAME="codar.cheetah.e-small-one-node"
export CODAR_CHEETAH_GROUP_NAME="all-methods-small"
export CODAR_CHEETAH_GROUP_NODES="1"
export CODAR_CHEETAH_GROUP_NODE_EXCLUSIVE="False"
export CODAR_CHEETAH_GROUP_PROCESSES_PER_NODE="1"
export CODAR_CHEETAH_MACHINE_NAME="local"
|
<gh_stars>1-10
require "rails_helper"
RSpec.describe AnonymousFeedback::DocumentTypesController, type: :controller do
describe "#index" do
before { login_as_stub_user }
context "with existing content items" do
let!(:sa_content_items) { create_list(:content_item, 2, document_type: "smart_answer") }
let!(:cs_content_items) { create_list(:content_item, 3, document_type: "case_study") }
let!(:no_doctype_content_items) { create_list(:content_item, 3, document_type: "") }
let!(:nil_doctype_content_items) { create_list(:content_item, 3, document_type: nil) }
before do
get :index
end
subject { response }
it { is_expected.to be_successful }
it "returns a result" do
expect(JSON.parse(response.body)).to be_eql(%w[case_study smart_answer])
end
it "filters out nils" do
expect(JSON.parse(response.body)).to_not include(nil)
end
it "filters out blank document types" do
expect(JSON.parse(response.body)).to_not include("")
end
end
context "with no content items" do
before do
get :index
end
subject { response }
it { is_expected.to be_successful }
it "returns an empty array" do
expect(JSON.parse(response.body)).to be_eql([])
end
end
end
describe "#show" do
before { login_as_stub_user }
let!(:no_doctype_content_items) do
create(
:content_item,
document_type: "",
created_at: 2.days.ago,
anonymous_contacts: create_list(:anonymous_contact, 2, created_at: 2.days.ago),
)
end
let!(:nil_doctype_content_items) do
create(
:content_item,
document_type: nil,
created_at: 2.days.ago,
anonymous_contacts: create_list(:anonymous_contact, 3, created_at: 2.days.ago),
)
end
let!(:sa_content_items) do
create(
:content_item,
document_type: "smart_answer",
created_at: 70.days.ago,
anonymous_contacts: create_list(:anonymous_contact, 1, created_at: 70.days.ago),
)
end
let!(:cs_content_items) do
create(
:content_item,
document_type: "case_study",
created_at: 32.days.ago,
anonymous_contacts: create_list(:anonymous_contact, 4, created_at: 32.days.ago),
)
end
context "with no ordering" do
subject { response }
it "returns the last_7_days ordered summary for smart_answer" do
get :show, params: { document_type: "smart_answer" }
expect(JSON.parse(subject.body)).to eq(
"document_type" => "smart_answer",
"anonymous_feedback_counts" => [
{
"path" => "/search",
"last_7_days" => 0,
"last_30_days" => 0,
"last_90_days" => 1,
},
],
)
end
end
context "with a valid ordering" do
subject { response }
it "returns the ordered summary for the document_type" do
get :show, params: { document_type: "smart_answer", ordering: "last_30_days" }
expect(JSON.parse(subject.body)).to eq(
"document_type" => "smart_answer",
"anonymous_feedback_counts" => [
{
"path" => "/search",
"last_7_days" => 0,
"last_30_days" => 0,
"last_90_days" => 1,
},
],
)
end
it "returns the ordered summary for case_study" do
get :show, params: { document_type: "case_study", ordering: "last_30_days" }
expect(JSON.parse(subject.body)).to eq(
"document_type" => "case_study",
"anonymous_feedback_counts" => [
{
"path" => "/search",
"last_7_days" => 0,
"last_30_days" => 0,
"last_90_days" => 4,
},
],
)
end
end
context "with invalid ordering" do
before { get :show, params: { document_type: "smart_answer", ordering: "foobar" } }
subject { response }
it { is_expected.to be_successful }
it "returns the default ordered summary for the organisation" do
expect(JSON.parse(subject.body)).to eq(
"document_type" => "smart_answer",
"anonymous_feedback_counts" => [
{
"path" => "/search",
"last_7_days" => 0,
"last_30_days" => 0,
"last_90_days" => 1,
},
],
)
end
end
context "with an invalid document_type" do
before { get :show, params: { document_type: "invalid_document_type" } }
subject { response }
it { is_expected.to be_not_found }
end
context "with an empty document_type" do
before { get :show, params: { document_type: "" } }
subject { response }
it "returns the default ordered summary for the organisation" do
expect(JSON.parse(subject.body)).to eq(
"document_type" => "",
"anonymous_feedback_counts" => [
{
"path" => "/search",
"last_7_days" => 2,
"last_30_days" => 2,
"last_90_days" => 2,
},
],
)
end
end
end
end
|
from sqlalchemy import Column, String, Integer
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class ImageCaption(Base):
__tablename__ = "ImageCaptions"
image_path = Column(String(64), primary_key=True, index=True, unique=False, nullable=False)
caption_index = Column(Integer, primary_key=True, nullable=False)
set = Column(String(64), primary_key=True, index=True, unique=False, nullable=True)
caption = Column(String(1024), index=False, unique=False, nullable=True)
class ImageCaptionManager:
def __init__(self, db):
self.db = db
def add_caption(self, image_path, caption_index, caption, set=None):
new_caption = ImageCaption(image_path=image_path, caption_index=caption_index, set=set, caption=caption)
self.db.add(new_caption)
self.db.commit()
def get_captions(self, image_path):
captions = self.db.query(ImageCaption).filter_by(image_path=image_path).all()
return [caption.caption for caption in captions]
def delete_caption(self, image_path, caption_index):
caption_to_delete = self.db.query(ImageCaption).filter_by(image_path=image_path, caption_index=caption_index).first()
if caption_to_delete:
self.db.delete(caption_to_delete)
self.db.commit()
else:
raise ValueError("Caption not found for the given image path and index") |
/*
* Copyright 2009-2012 The MyBatis Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ibatis.jdbc;
import java.util.ArrayList;
import java.util.List;
public class SqlBuilder {
private static final String AND = ") \nAND (";
private static final String OR = ") \nOR (";
private static final ThreadLocal<SQL> localSQL = new ThreadLocal<SQL>();
public static void BEGIN() {
RESET();
}
public static void RESET() {
localSQL.set(new SQL());
}
public static void UPDATE(String table) {
sql().statementType = SQL.StatementType.UPDATE;
sql().tables.add(table);
}
public static void SET(String sets) {
sql().sets.add(sets);
}
public static String SQL() {
try {
return sql().sql();
} finally {
RESET();
}
}
public static void INSERT_INTO(String tableName) {
sql().statementType = SQL.StatementType.INSERT;
sql().tables.add(tableName);
}
public static void VALUES(String columns, String values) {
sql().columns.add(columns);
sql().values.add(values);
}
public static void SELECT(String columns) {
sql().statementType = SQL.StatementType.SELECT;
sql().select.add(columns);
}
public static void SELECT_DISTINCT(String columns) {
sql().distinct = true;
SELECT(columns);
}
public static void DELETE_FROM(String table) {
sql().statementType = SQL.StatementType.DELETE;
sql().tables.add(table);
}
public static void FROM(String table) {
sql().tables.add(table);
}
public static void JOIN(String join) {
sql().join.add(join);
}
public static void INNER_JOIN(String join) {
sql().innerJoin.add(join);
}
public static void LEFT_OUTER_JOIN(String join) {
sql().leftOuterJoin.add(join);
}
public static void RIGHT_OUTER_JOIN(String join) {
sql().rightOuterJoin.add(join);
}
public static void OUTER_JOIN(String join) {
sql().outerJoin.add(join);
}
public static void WHERE(String conditions) {
sql().where.add(conditions);
sql().lastList = sql().where;
}
public static void OR() {
sql().lastList.add(OR);
}
public static void AND() {
sql().lastList.add(AND);
}
public static void GROUP_BY(String columns) {
sql().groupBy.add(columns);
}
public static void HAVING(String conditions) {
sql().having.add(conditions);
sql().lastList = sql().having;
}
public static void ORDER_BY(String columns) {
sql().orderBy.add(columns);
}
private static SQL sql() {
SQL sql = localSQL.get();
if (sql == null) {
RESET();
sql = localSQL.get();
}
return sql;
}
private static class SQL {
public enum StatementType {
DELETE,
INSERT,
SELECT,
UPDATE
}
StatementType statementType;
List<String> sets = new ArrayList<String>();
List<String> select = new ArrayList<String>();
List<String> tables = new ArrayList<String>();
List<String> join = new ArrayList<String>();
List<String> innerJoin = new ArrayList<String>();
List<String> outerJoin = new ArrayList<String>();
List<String> leftOuterJoin = new ArrayList<String>();
List<String> rightOuterJoin = new ArrayList<String>();
List<String> where = new ArrayList<String>();
List<String> having = new ArrayList<String>();
List<String> groupBy = new ArrayList<String>();
List<String> orderBy = new ArrayList<String>();
List<String> lastList = new ArrayList<String>();
List<String> columns = new ArrayList<String>();
List<String> values = new ArrayList<String>();
boolean distinct;
private void sqlClause(StringBuilder builder, String keyword, List<String> parts, String open, String close, String conjunction) {
if (!parts.isEmpty()) {
if (builder.length() > 0) builder.append("\n");
builder.append(keyword);
builder.append(" ");
builder.append(open);
String last = "________";
for (int i = 0, n = parts.size(); i < n; i++) {
String part = parts.get(i);
if (i > 0 && !part.equals(AND) && !part.equals(OR) && !last.equals(AND) && !last.equals(OR)) {
builder.append(conjunction);
}
builder.append(part);
last = part;
}
builder.append(close);
}
}
private String selectSQL() {
StringBuilder builder = new StringBuilder();
if (distinct) {
sqlClause(builder, "SELECT DISTINCT", select, "", "", ", ");
} else {
sqlClause(builder, "SELECT", select, "", "", ", ");
}
sqlClause(builder, "FROM", tables, "", "", ", ");
sqlClause(builder, "JOIN", join, "", "", "\nJOIN ");
sqlClause(builder, "INNER JOIN", innerJoin, "", "", "\nINNER JOIN ");
sqlClause(builder, "OUTER JOIN", outerJoin, "", "", "\nOUTER JOIN ");
sqlClause(builder, "LEFT OUTER JOIN", leftOuterJoin, "", "", "\nLEFT OUTER JOIN ");
sqlClause(builder, "RIGHT OUTER JOIN", rightOuterJoin, "", "", "\nRIGHT OUTER JOIN ");
sqlClause(builder, "WHERE", where, "(", ")", " AND ");
sqlClause(builder, "GROUP BY", groupBy, "", "", ", ");
sqlClause(builder, "HAVING", having, "(", ")", " AND ");
sqlClause(builder, "ORDER BY", orderBy, "", "", ", ");
return builder.toString();
}
private String insertSQL() {
StringBuilder builder = new StringBuilder();
sqlClause(builder, "INSERT INTO", tables, "", "", "");
sqlClause(builder, "", columns, "(", ")", ", ");
sqlClause(builder, "VALUES", values, "(", ")", ", ");
return builder.toString();
}
private String deleteSQL() {
StringBuilder builder = new StringBuilder();
sqlClause(builder, "DELETE FROM", tables, "", "", "");
sqlClause(builder, "WHERE", where, "(", ")", " AND ");
return builder.toString();
}
private String updateSQL() {
StringBuilder builder = new StringBuilder();
sqlClause(builder, "UPDATE", tables, "", "", "");
sqlClause(builder, "SET", sets, "", "", ", ");
sqlClause(builder, "WHERE", where, "(", ")", " AND ");
return builder.toString();
}
public String sql() {
if (statementType == null) {
return null;
}
String answer;
switch (statementType) {
case DELETE:
answer = deleteSQL();
break;
case INSERT:
answer = insertSQL();
break;
case SELECT:
answer = selectSQL();
break;
case UPDATE:
answer = updateSQL();
break;
default:
answer = null;
}
return answer;
}
}
}
|
import * as React from 'react';
import { Circle, G, Path } from 'react-native-svg';
import withIcon from '../../lib/withIcon';
type Props = {
opacity?: string;
color?: string;
secondaryColor?: string;
set?: string;
strokeWidth?: string | number;
};
const User = ({
color, secondaryColor, strokeWidth, opacity, set,
}: Props) => {
const Bold = () => (
<G transform="translate(4 2)">
<Path
d="M0,16.575c0-2.722,3.686-3.4,8-3.4,4.339,0,8,.7,8,3.424S12.315,20,8,20C3.662,20,0,19.3,0,16.575ZM2.706,5.291A5.294,5.294,0,1,1,8,10.583,5.274,5.274,0,0,1,2.706,5.291Z"
transform="translate(0 0)"
fill={color}
/>
</G>
);
const Bulk = () => (
<G transform="translate(4 2)">
<Path
d="M8,0C3.684,0,0,.68,0,3.4S3.661,6.825,8,6.825c4.313,0,8-.679,8-3.4S12.334,0,8,0"
transform="translate(0 13.175)"
fill={color}
/>
<Path
d="M5.292,10.584A5.292,5.292,0,1,0,0,5.292a5.273,5.273,0,0,0,5.292,5.292"
transform="translate(2.705 0)"
fill={secondaryColor}
opacity={opacity}
/>
</G>
);
const Light = () => (
<G transform="translate(4 2.5)">
<Circle
cx="4.778"
cy="4.778"
r="4.778"
transform="translate(2.801 0)"
fill="none"
stroke={color}
strokeLinecap="round"
strokeLinejoin="round"
strokeMiterlimit="10"
strokeWidth={strokeWidth}
/>
<Path
d="M0,3.016a2.215,2.215,0,0,1,.22-.97A4.042,4.042,0,0,1,3.039.426,16.787,16.787,0,0,1,5.382.1,25.053,25.053,0,0,1,9.767.1a16.979,16.979,0,0,1,2.343.33c1.071.22,2.362.659,2.819,1.62a2.27,2.27,0,0,1,0,1.95c-.458.961-1.748,1.4-2.819,1.611a15.716,15.716,0,0,1-2.343.339A25.822,25.822,0,0,1,6.2,6a4.066,4.066,0,0,1-.815-.055,15.423,15.423,0,0,1-2.334-.339C1.968,5.4.687,4.957.22,4A2.279,2.279,0,0,1,0,3.016Z"
transform="translate(0 13.185)"
fill="none"
stroke={color}
strokeLinecap="round"
strokeLinejoin="round"
strokeMiterlimit="10"
strokeWidth={strokeWidth}
/>
</G>
);
const Broken = () => (
<G transform="translate(4 2)">
<Path
d="M6.978,19.985a.717.717,0,0,1-.7-.738.737.737,0,0,1,.742-.7q.483.014.981.014c4.349,0,6.555-.751,6.555-2.233,0-1.5-2.206-2.255-6.555-2.255s-6.555.752-6.555,2.235c0,.485.173,1.188,1.675,1.694a.718.718,0,1,1-.464,1.359C.461,18.624,0,17.3,0,16.31c0-3.29,4.566-3.67,8-3.67,1.978,0,8,0,8,3.691C16,19.619,11.435,20,8,20Q7.48,20,6.978,19.985ZM2.613,5.354A5.387,5.387,0,1,1,8,10.706,5.377,5.377,0,0,1,2.613,5.354Zm1.446,0A3.942,3.942,0,1,0,8,1.437,3.933,3.933,0,0,0,4.059,5.354Z"
fill={color}
/>
</G>
);
const TwoTone = () => (
<G transform="translate(4 2)">
<Circle
cx="4.778"
cy="4.778"
r="4.778"
transform="translate(2.801 0)"
fill="none"
stroke={secondaryColor}
strokeLinecap="round"
strokeLinejoin="round"
strokeMiterlimit="10"
strokeWidth={strokeWidth}
opacity={opacity}
/>
<Path
d="M0,3.016a2.215,2.215,0,0,1,.22-.97A4.042,4.042,0,0,1,3.039.426,16.787,16.787,0,0,1,5.382.1,25.053,25.053,0,0,1,9.767.1a16.979,16.979,0,0,1,2.343.33c1.071.22,2.362.659,2.819,1.62a2.27,2.27,0,0,1,0,1.95c-.458.961-1.748,1.4-2.819,1.611a15.716,15.716,0,0,1-2.343.339A25.822,25.822,0,0,1,6.2,6a4.066,4.066,0,0,1-.815-.055,15.423,15.423,0,0,1-2.334-.339C1.968,5.4.687,4.957.22,4A2.279,2.279,0,0,1,0,3.016Z"
transform="translate(0 13.185)"
fill="none"
stroke={color}
strokeLinecap="round"
strokeLinejoin="round"
strokeMiterlimit="10"
strokeWidth={strokeWidth}
/>
</G>
);
const Curved = () => (
<G transform="translate(5 2.4)">
<Path
d="M6.845,7.3C3.153,7.3,0,6.726,0,4.425S3.133,0,6.845,0c3.692,0,6.845,2.1,6.845,4.4S10.556,7.3,6.845,7.3Z"
transform="translate(0 11.962)"
fill="none"
stroke={color}
strokeLinecap="round"
strokeLinejoin="round"
strokeMiterlimit="10"
strokeWidth={strokeWidth}
/>
<Path
d="M4.387,8.774a4.372,4.372,0,1,0-.031,0Z"
transform="translate(2.45 0)"
fill="none"
stroke={color}
strokeLinecap="round"
strokeLinejoin="round"
strokeMiterlimit="10"
strokeWidth={strokeWidth}
/>
</G>
);
switch (set) {
case 'bold':
return <Bold />;
case 'bulk':
return <Bulk />;
case 'broken':
return <Broken />;
case 'two-tone':
return <TwoTone />;
case 'curved':
return <Curved />;
default:
return <Light />;
}
};
User.displayName = 'IconlyUser';
export default withIcon(User);
|
import json
import os
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db.models.query_utils import Q
from common.constants import DISCIPLINE_CODES
from common.models import OfficerAllegation
class Command(BaseCommand):
help = 'Generate sunburst csv data'
levels = [{
'name': 'Unsustained',
'condition': ~Q(final_finding='SU'),
'children': [{
'condition': Q(final_finding='DS'),
'name': 'Discharged',
}, {
'condition': Q(final_finding='EX'),
'name': 'Exonerated',
}, {
'condition': Q(final_finding='NA'),
'name': 'No Affidavit',
}, {
'condition': Q(final_finding='NC'),
'name': 'No Cooperation',
}, {
'condition': Q(final_finding='NS'),
'name': 'Not Sustained',
}, {
'condition': Q(final_finding='UN'),
'name': 'Unfounded',
}]
}, {
'name': 'Sustained',
'condition': Q(final_finding='SU'),
'children': [{
'name': 'Disciplined',
'condition': Q(final_outcome__in=DISCIPLINE_CODES),
'children': [{
'condition': Q(final_outcome='100'),
'name': 'Reprimand',
}, {
'condition': Q(final_outcome__in=[str(x).zfill(3) for x in range(1, 10)]), # 001 to 009
'name': '1 to 9 days',
}, {
'condition': Q(final_outcome__in=[str(x).zfill(3) for x in range(10, 31)]), # 010 to 031
'name': '10 to 30 days',
}, {
'condition': Q(final_outcome='200'),
'name': '30+ days',
}, {
'condition': Q(final_outcome='300'),
'name': 'Termination',
}, {
'condition': Q(final_outcome='400'),
'name': 'Separation',
}]
}, {
'name': 'Not Disciplined',
'condition': ~Q(final_outcome__in=DISCIPLINE_CODES),
'children': [{
'condition': Q(final_outcome=None),
'name': 'Unknown',
}, {
'condition': Q(final_outcome='000'),
'name': 'Noted',
}, {
'condition': Q(final_outcome='500'),
'name': 'Reinstated by Police Board',
}, {
'condition': Q(final_outcome='600'),
'name': 'No action taken',
}, {
'condition': Q(final_outcome='700'),
'name': 'Reinstated by Court Action',
}, {
'condition': Q(final_outcome='800'),
'name': 'Not served (resigned)',
}, {
'condition': Q(final_outcome='900'),
'name': 'Not served (inactive)',
}]
}]
}]
def fetch_output(self, levels, objects):
output = []
for level in levels:
results = objects.filter(level['condition'])
obj = {
'name': level['name'],
'size': results.count(),
}
if 'children' in level:
del obj['size']
obj['children'] = self.fetch_output(level['children'], results)
# TODO: recursive call could lead to slow code and memory issue
output.append(obj)
return output
def calculate_end(self, row):
if not row.get('children'):
return 0
return row['size'] - sum(r['size'] for r in row['children'])
def save(self, output):
output_file = os.path.join(
settings.BASE_DIR, 'common/static/sunburst.json')
with open(output_file, 'w') as f:
json.dump({
'name': 'Allegation',
'children': output,
}, f)
os.system("cat %s" % output_file)
def handle(self, *args, **options):
objects = OfficerAllegation.objects.all()
output = self.fetch_output(self.levels, objects)
self.save(output)
|
<reponame>zju-3dv/multi-person3dpose<gh_stars>100-1000
# --------------------------------------------------------
# Faster R-CNN
# Licensed under The MIT License [see LICENSE for details]
# Written by <NAME>
# --------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from detection_opr.utils.bbox_transform import bbox_transform_inv, clip_boxes
from detection_opr.utils.nms_wrapper import nms
import tensorflow as tf
from config import cfg
def proposal_layer(rpn_cls_prob, rpn_bbox_pred, im_info, cfg_key, _feat_stride,
anchors, num_anchors, is_tfchannel=False):
"""A simplified version compared to fast/er RCNN
For details please see the technical report
"""
if type(cfg_key) == bytes:
cfg_key = cfg_key.decode('utf-8')
if cfg_key == 'TRAIN':
pre_nms_topN = cfg.TRAIN.RPN_PRE_NMS_TOP_N
post_nms_topN = cfg.TRAIN.RPN_POST_NMS_TOP_N
nms_thresh = cfg.TRAIN.RPN_NMS_THRESH
else:
pre_nms_topN = cfg.TEST.RPN_PRE_NMS_TOP_N
post_nms_topN = cfg.TEST.RPN_POST_NMS_TOP_N
nms_thresh = cfg.TEST.RPN_NMS_THRESH
im_info = im_info[0]
# from IPython import embed; embed()
# Get the scores and bounding boxes
if is_tfchannel:
scores = rpn_cls_prob.reshape(-1, 2)
scores = scores[:, 1]
else:
scores = rpn_cls_prob[:, :, :, num_anchors:]
rpn_bbox_pred = rpn_bbox_pred.reshape((-1, 4))
# if cfg_key == 'TRAIN' and 'RPN_NORMALIZE_TARGETS' in cfg.TRAIN.keys() \
if 'RPN_NORMALIZE_TARGETS' in cfg.TRAIN.keys() \
and cfg.TRAIN.RPN_NORMALIZE_TARGETS:
rpn_bbox_pred *= cfg.TRAIN.RPN_NORMALIZE_STDS
rpn_bbox_pred += cfg.TRAIN.RPN_NORMALIZE_MEANS
scores = scores.reshape((-1, 1))
proposals = bbox_transform_inv(anchors, rpn_bbox_pred)
proposals = clip_boxes(proposals, im_info[:2])
# filter boxes
min_size = 0
if cfg_key == 'TRAIN':
if 'RPN_MIN_SIZE' in cfg.TRAIN.keys():
min_size = cfg.TRAIN.RPN_MIN_SIZE
elif cfg_key == 'TEST':
if 'RPN_MIN_SIZE' in cfg.TEST.keys():
min_size = cfg.TEST.RPN_MIN_SIZE
if min_size > 0:
keep = _filter_boxes(proposals, min_size * im_info[2])
proposals = proposals[keep, :]
scores = scores[keep]
# Pick the top region proposals
order = scores.ravel().argsort()[::-1]
if pre_nms_topN > 0:
order = order[:pre_nms_topN]
proposals = proposals[order, :]
scores = scores[order]
# Non-maximal suppression
keep = nms(np.hstack((proposals, scores)), nms_thresh)
if post_nms_topN > 0:
keep = keep[:post_nms_topN]
proposals = proposals[keep, :]
scores = scores[keep]
# Only support single image as input
batch_inds = np.zeros((proposals.shape[0], 1), dtype=np.float32)
blob = np.hstack((batch_inds, proposals.astype(np.float32, copy=False)))
return blob, scores.flatten()
def proposal_without_nms_layer(rpn_cls_prob, rpn_bbox_pred, im_info, cfg_key,
feat_stride, anchors, num_anchors,
is_tfchannel=False):
if type(cfg_key) == bytes:
cfg_key = cfg_key.decode('utf-8')
if cfg_key == 'TRAIN':
pre_nms_topN = cfg.TRAIN.RPN_PRE_NMS_TOP_N
else:
pre_nms_topN = cfg.TEST.RPN_PRE_NMS_TOP_N
im_info = im_info[0]
# Get the scores and bounding boxes
if is_tfchannel:
scores = rpn_cls_prob.reshape(-1, 2)
scores = scores[:, 1]
else:
scores = rpn_cls_prob[:, :, :, num_anchors:]
rpn_bbox_pred = rpn_bbox_pred.reshape((-1, 4))
if 'RPN_NORMALIZE_TARGETS' in cfg.TRAIN.keys() \
and cfg.TRAIN.RPN_NORMALIZE_TARGETS:
rpn_bbox_pred *= cfg.TRAIN.RPN_NORMALIZE_STDS
rpn_bbox_pred += cfg.TRAIN.RPN_NORMALIZE_MEANS
scores = scores.reshape((-1, 1))
proposals = bbox_transform_inv(anchors, rpn_bbox_pred)
proposals = clip_boxes(proposals, im_info[:2])
# filter boxes
min_size = 0
if cfg_key == 'TRAIN':
if 'RPN_MIN_SIZE' in cfg.TRAIN.keys():
min_size = cfg.TRAIN.RPN_MIN_SIZE
elif cfg_key == 'TEST':
if 'RPN_MIN_SIZE' in cfg.TEST.keys():
min_size = cfg.TEST.RPN_MIN_SIZE
if min_size > 0:
keep = _filter_boxes(proposals, min_size * im_info[2])
proposals = proposals[keep, :]
scores = scores[keep]
# Pick the top region proposals
order = scores.ravel().argsort()[::-1]
if pre_nms_topN > 0:
order = order[:pre_nms_topN]
proposals = proposals[order, :]
scores = scores[order].flatten()
##why add one, because tf nms assume x2,y2 does not include border
proposals_addone = np.array(proposals)
proposals_addone[:, 2] += 1
proposals_addone[:, 3] += 1
return proposals, scores, proposals_addone
def _filter_boxes(boxes, min_size):
"""Remove all boxes with any side smaller than min_size."""
ws = boxes[:, 2] - boxes[:, 0] + 1
hs = boxes[:, 3] - boxes[:, 1] + 1
keep = np.where((ws >= min_size) & (hs >= min_size))[0]
return keep
|
def sort_dict(myDict):
sorted_keys = sorted(myDict.keys())
sorted_values = []
for key in sorted_keys:
sorted_values.append(myDict[key])
return dict(zip(sorted_keys, sorted_values)) |
import { Pipe } from '@angular/core'
import { PipeTransform} from '@angular/core'
@Pipe({
name : 'genderPipe'
})
export class GenderPipe implements PipeTransform
{
transform(value : string, gender : string){
return (gender.toLowerCase()=='male' ? 'Mr.' : 'Miss.') + value;
}
} |
<filename>node_modules/@chakra-ui/hooks/dist/types/use-controllable.d.ts
import * as React from "react";
export declare function useControllableProp<T>(prop: T | undefined, state: T): readonly [boolean, T];
export interface UseControllableStateProps<T> {
/**
* The value to used in controlled mode
*/
value?: T;
/**
* The initial value to be used, in uncontrolled mode
*/
defaultValue?: T | (() => T);
/**
* The callback fired when the value changes
*/
onChange?: (value: T) => void;
/**
* The function that determines if the state should be updated
*/
shouldUpdate?: (prev: T, next: T) => boolean;
}
/**
* React hook for using controlling component state.
* @param props
*/
export declare function useControllableState<T>(props: UseControllableStateProps<T>): [T, React.Dispatch<React.SetStateAction<T>>];
//# sourceMappingURL=use-controllable.d.ts.map |
angular.module('pascalprecht.translate').factory('$translateCookieStorage', [
'$cookieStore',
function ($cookieStore) {
var $translateCookieStorage = {
get: function (name) {
return $cookieStore.get(name);
},
set: function (name, value) {
$cookieStore.put(name, value);
}
};
return $translateCookieStorage;
}
]); |
<gh_stars>0
/* - Coeus web framework -------------------------
*
* Licensed under the Apache License, Version 2.0.
*
* Author: <NAME>
*/
package com.tzavellas.coeus.util.internal
/**
* Helper methods for working with Strings.
*/
object Strings {
def camelCaseToDashed(str: String): String = {
val result = new StringBuilder(str.length)
var i = 0
while (i < str.length) {
val c = str.charAt(i)
if (c.isUpper) {
if (i == 0) {
result += c.toLower
} else {
result += '-'
result += c.toLower
}
} else {
result += c
}
i += 1
}
result.toString
}
def firstCharToLower(s: String): String = {
if (s.isEmpty) s else {
if (s(0).isLower) s else s(0).toLower + s.substring(1)
}
}
def removeSuffix(str: String, suffix: String) = {
if (str.endsWith(suffix))
str.substring(0, str.length - suffix.length)
else
str
}
def removePrefix(str: String, prefix: String) = {
if (str.startsWith(prefix))
str.substring(prefix.length, str.length)
else
str
}
def stripEndChars(s: String, c: Char): String = {
var i = s.length - 1
while(i >= 0) {
if (s.charAt(i) != c)
return s.substring(0, i + 1)
i -= 1
}
return ""
}
def nullSafeToString(a: Any) = if (a == null) "null" else a.toString
}
|
<gh_stars>1-10
import React, { Component } from 'react'
import PropTypes from 'prop-types'
import { Grid, Table, TableBody, TableCell, TableContainer, TableHead, TableRow, Paper } from '@material-ui/core'
// Styles
import './ComboReview.css'
export class ComboReview extends Component {
static get propTypes() {
return {
userFile : PropTypes.object,
passFile : PropTypes.object,
manglingOptions : PropTypes.object
}
}
static defaultProps = {
userFile : null,
passFile : null,
manglingOptions : null
}
render() {
return (
<div className='ComboReview'>
<Grid container>
<Grid item xs={12}>
<p>Please review the following changes</p>
</Grid>
<Grid item container xs={12}>
<h3>Your Files</h3>
</Grid>
<Grid item container xs={12}>
<TableContainer elevation={2} className='ComboReview-Mangling-Container' component={Paper}>
<Table>
<TableHead>
<TableRow>
<TableCell>Filename</TableCell>
<TableCell align='right'>File Size (MB)</TableCell>
</TableRow>
</TableHead>
<TableBody>
<TableRow>
<TableCell>{this.props.userFile.name}</TableCell>
<TableCell align='right'>
~ {Math.round(this.props.userFile.size / (1024 * 1024) + 1)} MB
</TableCell>
</TableRow>
<TableRow>
<TableCell>{this.props.passFile.name}</TableCell>
<TableCell align='right'>
~ {Math.round(this.props.passFile.size / (1024 * 1024) + 1)} MB
</TableCell>
</TableRow>
</TableBody>
</Table>
</TableContainer>
</Grid>
<Grid item container xs={12}>
<h3>Mangling Options</h3>
</Grid>
<Grid item xs={12}>
<TableContainer elevation={2} className='ComboReview-Mangling-Container' component={Paper}>
<Table className='ComboReview-Table'>
<TableHead>
<TableRow>
<TableCell>Option</TableCell>
<TableCell align='right'>Selected (Yes / No)</TableCell>
</TableRow>
</TableHead>
<TableBody>
{Object.entries(this.props.manglingOptions).map((rule) => (
<TableRow key={rule[0]}>
<TableCell component='th' scope='row'>
{rule[0]}
</TableCell>
<TableCell align='right'>{rule[1] ? 'Yes' : 'No'}</TableCell>
</TableRow>
))}
</TableBody>
</Table>
</TableContainer>
</Grid>
</Grid>
</div>
)
}
}
export default ComboReview
|
require("./../css/estilos.css")
document.write('H.')
consoloe.log('Hola desde webpack, en un webpack.config')
|
#!/bin/bash
echo "Sourcing cse_install_distro.sh for Ubuntu"
removeMoby() {
wait_for_apt_locks
retrycmd_if_failure 10 5 60 apt-get purge -y moby-engine moby-cli
}
removeContainerd() {
wait_for_apt_locks
retrycmd_if_failure 10 5 60 apt-get purge -y moby-containerd
}
installDeps() {
retrycmd_if_failure_no_stats 120 5 25 curl -fsSL https://packages.microsoft.com/config/ubuntu/${UBUNTU_RELEASE}/packages-microsoft-prod.deb > /tmp/packages-microsoft-prod.deb || exit $ERR_MS_PROD_DEB_DOWNLOAD_TIMEOUT
retrycmd_if_failure 60 5 10 dpkg -i /tmp/packages-microsoft-prod.deb || exit $ERR_MS_PROD_DEB_PKG_ADD_FAIL
aptmarkWALinuxAgent hold
apt_get_update || exit $ERR_APT_UPDATE_TIMEOUT
apt_get_dist_upgrade || exit $ERR_APT_DIST_UPGRADE_TIMEOUT
for apt_package in apache2-utils apt-transport-https blobfuse=1.3.7 ca-certificates ceph-common cgroup-lite cifs-utils conntrack cracklib-runtime ebtables ethtool fuse git glusterfs-client htop iftop init-system-helpers iotop iproute2 ipset iptables jq libpam-pwquality libpwquality-tools mount nfs-common pigz socat sysfsutils sysstat traceroute util-linux xz-utils zip; do
if ! apt_get_install 30 1 600 $apt_package; then
journalctl --no-pager -u $apt_package
exit $ERR_APT_INSTALL_TIMEOUT
fi
done
}
installGPUDrivers() {
mkdir -p $GPU_DEST/tmp
retrycmd_if_failure_no_stats 120 5 25 curl -fsSL https://nvidia.github.io/nvidia-docker/gpgkey > $GPU_DEST/tmp/aptnvidia.gpg || exit $ERR_GPU_DRIVERS_INSTALL_TIMEOUT
wait_for_apt_locks
retrycmd_if_failure 120 5 25 apt-key add $GPU_DEST/tmp/aptnvidia.gpg || exit $ERR_GPU_DRIVERS_INSTALL_TIMEOUT
wait_for_apt_locks
retrycmd_if_failure_no_stats 120 5 25 curl -fsSL https://nvidia.github.io/nvidia-docker/ubuntu${UBUNTU_RELEASE}/nvidia-docker.list > $GPU_DEST/tmp/nvidia-docker.list || exit $ERR_GPU_DRIVERS_INSTALL_TIMEOUT
wait_for_apt_locks
retrycmd_if_failure_no_stats 120 5 25 cat $GPU_DEST/tmp/nvidia-docker.list > /etc/apt/sources.list.d/nvidia-docker.list || exit $ERR_GPU_DRIVERS_INSTALL_TIMEOUT
apt_get_update
retrycmd_if_failure 30 5 3600 apt-get install -y linux-headers-$(uname -r) gcc make dkms || exit $ERR_GPU_DRIVERS_INSTALL_TIMEOUT
retrycmd_if_failure 30 5 60 curl -fLS https://us.download.nvidia.com/tesla/$GPU_DV/NVIDIA-Linux-x86_64-${GPU_DV}.run -o ${GPU_DEST}/nvidia-drivers-${GPU_DV} || exit $ERR_GPU_DRIVERS_INSTALL_TIMEOUT
tmpDir=$GPU_DEST/tmp
if ! (
set -e -o pipefail
cd "${tmpDir}"
retrycmd_if_failure 30 5 3600 apt-get download nvidia-docker2="${NVIDIA_DOCKER_VERSION}+${NVIDIA_DOCKER_SUFFIX}" || exit $ERR_GPU_DRIVERS_INSTALL_TIMEOUT
); then
exit $ERR_GPU_DRIVERS_INSTALL_TIMEOUT
fi
}
installSGXDrivers() {
echo "Installing SGX driver"
local VERSION
VERSION=$(grep DISTRIB_RELEASE /etc/*-release| cut -f 2 -d "=")
case $VERSION in
"18.04")
SGX_DRIVER_URL="https://download.01.org/intel-sgx/dcap-1.2/linux/dcap_installers/ubuntuServer18.04/sgx_linux_x64_driver_1.12_c110012.bin"
;;
"16.04")
SGX_DRIVER_URL="https://download.01.org/intel-sgx/dcap-1.2/linux/dcap_installers/ubuntuServer16.04/sgx_linux_x64_driver_1.12_c110012.bin"
;;
"*")
echo "Version $VERSION is not supported"
exit 1
;;
esac
local PACKAGES="make gcc dkms"
wait_for_apt_locks
retrycmd_if_failure 30 5 3600 apt-get -y install $PACKAGES || exit $ERR_SGX_DRIVERS_INSTALL_TIMEOUT
local SGX_DRIVER
SGX_DRIVER=$(basename $SGX_DRIVER_URL)
local OE_DIR=/opt/azure/containers/oe
mkdir -p ${OE_DIR}
retrycmd_if_failure 120 5 25 curl -fsSL ${SGX_DRIVER_URL} -o ${OE_DIR}/${SGX_DRIVER} || exit $ERR_SGX_DRIVERS_INSTALL_TIMEOUT
chmod a+x ${OE_DIR}/${SGX_DRIVER}
${OE_DIR}/${SGX_DRIVER} || exit $ERR_SGX_DRIVERS_START_FAIL
}
updateAptWithMicrosoftPkg() {
retrycmd_if_failure_no_stats 120 5 25 curl https://packages.microsoft.com/config/ubuntu/${UBUNTU_RELEASE}/prod.list > /tmp/microsoft-prod.list || exit $ERR_MOBY_APT_LIST_TIMEOUT
retrycmd_if_failure 10 5 10 cp /tmp/microsoft-prod.list /etc/apt/sources.list.d/ || exit $ERR_MOBY_APT_LIST_TIMEOUT
retrycmd_if_failure_no_stats 120 5 25 curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > /tmp/microsoft.gpg || exit $ERR_MS_GPG_KEY_DOWNLOAD_TIMEOUT
retrycmd_if_failure 10 5 10 cp /tmp/microsoft.gpg /etc/apt/trusted.gpg.d/ || exit $ERR_MS_GPG_KEY_DOWNLOAD_TIMEOUT
apt_get_update || exit $ERR_APT_UPDATE_TIMEOUT
}
# CSE+VHD can dictate the containerd version, users don't care as long as it works
installStandaloneContainerd() {
CONTAINERD_VERSION=$1
# azure-built runtimes have a "+azure" suffix in their version strings (i.e 1.4.1+azure). remove that here.
CURRENT_VERSION=$(containerd -version | cut -d " " -f 3 | sed 's|v||' | cut -d "+" -f 1)
# v1.4.1 is our lowest supported version of containerd
#if there is no containerd_version input from RP, use hardcoded version
if [[ -z ${CONTAINERD_VERSION} ]]; then
CONTAINERD_VERSION="1.4.4"
echo "Containerd Version not specified, using default version: ${CONTAINERD_VERSION}"
else
echo "Using specified Containerd Version: ${CONTAINERD_VERSION}"
fi
if semverCompare ${CURRENT_VERSION:-"0.0.0"} ${CONTAINERD_VERSION}; then
echo "currently installed containerd version ${CURRENT_VERSION} is greater than (or equal to) target base version ${CONTAINERD_VERSION}. skipping installStandaloneContainerd."
else
echo "installing containerd version ${CONTAINERD_VERSION}"
removeMoby
removeContainerd
# if containerd version has been overriden then there should exist a local .deb file for it on aks VHDs (best-effort)
# if no files found then try fetching from packages.microsoft repo
if [[ "${IS_VHD:-"false"}" = true ]]; then
CONTAINERD_DEB_TMP="moby-containerd_${CONTAINERD_VERSION/-/\~}+azure-1_amd64.deb"
CONTAINERD_DEB_FILE="$CONTAINERD_DOWNLOADS_DIR/${CONTAINERD_DEB_TMP}"
if [[ -f "${CONTAINERD_DEB_FILE}" ]]; then
installStandaloneContainerdFromFile ${CONTAINERD_DEB_FILE} || exit $ERR_CONTAINERD_INSTALL_TIMEOUT
return 0
fi
fi
updateAptWithMicrosoftPkg
apt_get_install 20 30 120 moby-containerd=${CONTAINERD_VERSION}* --allow-downgrades || exit $ERR_CONTAINERD_INSTALL_TIMEOUT
fi
ensureRunc
}
installStandaloneContainerdFromFile() {
CONTAINERD_DEB_FILE=$1
wait_for_apt_locks
retrycmd_if_failure 10 5 600 apt-get -y -f install ${CONTAINERD_DEB_FILE} --allow-downgrades
if [[ $? -ne 0 ]]; then
return 1
fi
rm -Rf $CONTAINERD_DOWNLOADS_DIR &
}
downloadContainerd() {
CONTAINERD_VERSION=$1
# currently upstream maintains the package on a storage endpoint rather than an actual apt repo
CONTAINERD_DOWNLOAD_URL="https://mobyartifacts.azureedge.net/moby/moby-containerd/${CONTAINERD_VERSION}+azure/bionic/linux_amd64/moby-containerd_${CONTAINERD_VERSION/-/\~}+azure-1_amd64.deb"
mkdir -p $CONTAINERD_DOWNLOADS_DIR
CONTAINERD_DEB_TMP=${CONTAINERD_DOWNLOAD_URL##*/}
retrycmd_curl_file 120 5 60 "$CONTAINERD_DOWNLOADS_DIR/${CONTAINERD_DEB_TMP}" ${CONTAINERD_DOWNLOAD_URL} || exit $ERR_CONTAINERD_DOWNLOAD_TIMEOUT
CONTAINERD_DEB_FILE="$CONTAINERD_DOWNLOADS_DIR/${CONTAINERD_DEB_TMP}"
}
installMoby() {
CURRENT_VERSION=$(dockerd --version | grep "Docker version" | cut -d "," -f 1 | cut -d " " -f 3 | cut -d "+" -f 1)
local MOBY_VERSION="19.03.14"
if semverCompare ${CURRENT_VERSION:-"0.0.0"} ${MOBY_VERSION}; then
echo "currently installed moby-docker version ${CURRENT_VERSION} is greater than (or equal to) target base version ${MOBY_VERSION}. skipping installMoby."
else
removeMoby
updateAptWithMicrosoftPkg
MOBY_CLI=${MOBY_VERSION}
if [[ "${MOBY_CLI}" == "3.0.4" ]]; then
MOBY_CLI="3.0.3"
fi
apt_get_install 20 30 120 moby-engine=${MOBY_VERSION}* moby-cli=${MOBY_CLI}* --allow-downgrades || exit $ERR_MOBY_INSTALL_TIMEOUT
fi
ensureRunc
}
ensureRunc() {
CURRENT_VERSION=$(runc --version | head -n1 | sed 's/runc version //' | sed 's/-/~/')
local TARGET_VERSION="1.0.0~rc95"
# runc rc93 has a regression that causes pods to be stuck in containercreation
# https://github.com/opencontainers/runc/issues/2865
# not using semverCompare b/c we need to downgrade
if [ "${CURRENT_VERSION}" != "${TARGET_VERSION}" ]; then
apt_get_install 20 30 120 moby-runc=${TARGET_VERSION}* --allow-downgrades || exit $ERR_RUNC_INSTALL_TIMEOUT
fi
}
cleanUpGPUDrivers() {
rm -Rf $GPU_DEST
rm -f /etc/apt/sources.list.d/nvidia-docker.list
}
#EOF
|
package structure.trees;
public interface DataStructure<T> {
void put(String key, T value);
boolean contains(String key);
T get(String key);
void delete(String key);
} |
###############################################################################
#
# $Id$ $Name$
#
# The contents of this file are subject to the AAF SDK Public Source
# License Agreement Version 2.0 (the "License"); You may not use this
# file except in compliance with the License. The License is available
# in AAFSDKPSL.TXT, or you may obtain a copy of the License from the
# Advanced Media Workflow Association, Inc., or its successor.
#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
# the License for the specific language governing rights and limitations
# under the License. Refer to Section 3.3 of the License for proper use
# of this Exhibit.
#
# WARNING: Please contact the Advanced Media Workflow Association,
# Inc., for more information about any additional licenses to
# intellectual property covering the AAF Standard that may be required
# to create and distribute AAF compliant products.
# (http://www.amwa.tv/policies).
#
# Copyright Notices:
# The Original Code of this file is Copyright 1998-2009, licensor of the
# Advanced Media Workflow Association. All rights reserved.
#
# The Initial Developer of the Original Code of this file and the
# licensor of the Advanced Media Workflow Association is
# Avid Technology.
# All rights reserved.
#
###############################################################################
echo "// This file contains invocations of the macros described below."
echo "//"
echo "// To use this file -"
echo "// 1) #define the macros to suit your usage"
echo "// 2) #include this file"
echo "// 3) #undef the macros"
echo ""
echo "// Default empty definitions so that you only have to define"
echo "// those macros you actually want to use."
echo "//"
echo \#ifndef AAF_BEGIN_OBJECT_MAP
echo \#define AAF_BEGIN_OBJECT_MAP\(x\)
echo \#endif
echo ""
echo \#ifndef AAF_END_OBJECT_MAP
echo \#define AAF_END_OBJECT_MAP\(\)
echo \#endif
echo ""
echo \#ifndef AAF_OBJECT_ENTRY
echo \#define AAF_OBJECT_ENTRY\(name\)
echo \#endif
echo ""
echo \#ifndef AAF_PRIVATE_OBJECT_ENTRY
echo \#define AAF_PRIVATE_OBJECT_ENTRY\(name\)
echo \#endif
echo ""
echo ""
echo "//"
echo "// Include all objects in the following table:"
echo "//"
echo AAF_BEGIN_OBJECT_MAP\(AAFObjectMap\)
for AAF in ${PRIVATE_AAFOBJECTS} ; do \
echo " AAF_PRIVATE_OBJECT_ENTRY("$AAF")"
done
for AAF in ${AAFOBJECTS} ; do \
echo " AAF_OBJECT_ENTRY("$AAF")"
done
echo AAF_END_OBJECT_MAP\(\)
|
import json
json_string = '{"name": "John", "age": 30, "is_married": false}'
data = json.loads(json_string)
name = data['name']
age = data['age']
is_married = data['is_married']
print('Name: ', name)
print('Age: ', age)
print('Is Married: ', is_married) |
<filename>puzzle_editing/management/commands/deploy_puzzles.py
import json
import os
import shutil
from zipfile import ZipFile
import git
from django.conf import settings
from django.core.management.base import BaseCommand
from django.core.management.base import CommandError
from puzzle_editing.models import PuzzlePostprod
class Command(BaseCommand):
help = """Sync puzzles into Hunt Repository."""
def handle(self, *args, **options):
repo = git.Repo.init(settings.HUNT_REPO)
if (
repo.is_dirty()
or len(repo.untracked_files) > 0
or repo.head.reference.name != "master"
):
raise CommandError("Repository is in a broken state.")
origin = repo.remotes.origin
origin.pull()
puzzleFolder = os.path.join(settings.HUNT_REPO, "puzzle")
shutil.rmtree(puzzleFolder)
os.makedirs(puzzleFolder)
for pp in PuzzlePostprod.objects.all():
answers = pp.puzzle.answers.all()
answer = "???"
if answers:
answer = ", ".join(answers)
metadata = {
"puzzle_title": pp.puzzle.name,
"credits": "by %s" % (pp.authors),
"answer": answer,
"puzzle_idea_id": pp.puzzle.id,
"puzzle_slug": pp.slug,
}
puzzlePath = os.path.join(puzzleFolder, pp.slug)
os.makedirs(puzzlePath)
zipFile = pp.zip_file
with ZipFile(zipFile) as zf:
zf.extractall(puzzlePath)
with open(os.path.join(puzzlePath, "metadata.json"), "w") as mf:
json.dump(metadata, mf)
repo.git.add(puzzlePath)
if repo.is_dirty() or len(repo.untracked_files) > 0:
repo.git.add(update=True)
repo.git.add(A=True)
repo.git.commit("-m", "Postprodding all puzzles.")
origin.push()
|
<reponame>c-hive/team-contribution-calendar<filename>src/utils/GitLabUtils/GitLabUtils.js
/* eslint-disable no-console */
import * as proxy from "../Proxy/Proxy";
import * as javaScriptUtils from "../JavaScriptUtils/JavaScriptUtils";
export const getJsonFormattedCalendarAsync = async (
proxyServerUrl,
gitLabUsername
) => {
const url = proxy.getGitLabProxyUrl(proxyServerUrl, gitLabUsername);
const responseData = await fetch(url);
if (javaScriptUtils.isSuccess(responseData.status)) {
return responseData.json().then(parsedCalendar => ({
parsedCalendar,
error: false,
errorMessage: null
}));
}
return {
error: true,
errorMessage: `Could not fetch the calendar of ${gitLabUsername}.`
};
};
|
<gh_stars>1-10
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.streampipes.model;
import org.apache.commons.lang.RandomStringUtils;
import io.fogsy.empire.annotations.Namespaces;
import io.fogsy.empire.annotations.RdfId;
import io.fogsy.empire.annotations.RdfProperty;
import io.fogsy.empire.annotations.RdfsClass;
import org.apache.streampipes.vocabulary.StreamPipes;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.OneToMany;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@Namespaces({StreamPipes.NS_PREFIX, StreamPipes.NS})
@RdfsClass(StreamPipes.MESSAGE)
@Entity
public class MessageLd {
private static final String prefix = "urn:streampipes.org:spi:";
@RdfId
@RdfProperty(StreamPipes.HAS_ELEMENT_NAME)
private String elementId;
@RdfProperty(StreamPipes.MESSAGE_SUCCESS)
private boolean success;
@RdfProperty(StreamPipes.MESSAGE_ELEMENT_NAME)
private String elementName;
@OneToMany(fetch = FetchType.EAGER,
cascade = {CascadeType.ALL})
@RdfProperty(StreamPipes.NOTIFICATIONS)
private List<NotificationLd> notifications;
public MessageLd() {
this.elementId = prefix
+ this.getClass().getSimpleName().toLowerCase()
+ ":"
+ RandomStringUtils.randomAlphabetic(6);
this.elementName = "";
}
public MessageLd(MessageLd other) {
this();
this.success = other.isSuccess();
this.elementName = other.getElementName();
this.notifications = other.getNotifications();
}
public MessageLd(boolean success){
this();
this.success = success;
this.notifications = null;
}
public MessageLd(boolean success, List<NotificationLd> notifications) {
this();
this.success = success;
this.notifications = notifications;
}
public MessageLd(boolean success, List<NotificationLd> notifications, String elementName) {
this(success, notifications);
this.elementName = elementName;
}
public MessageLd(boolean success, NotificationLd...notifications) {
this();
this.success = success;
this.notifications = new ArrayList<>();
this.notifications.addAll(Arrays.asList(notifications));
}
public boolean isSuccess() {
return success;
}
public void setSuccess(boolean success) {
this.success = success;
}
public List<NotificationLd> getNotifications() {
return notifications;
}
public void setNotifications(List<NotificationLd> notifications) {
this.notifications = notifications;
}
public boolean addNotification(NotificationLd notification)
{
return notifications.add(notification);
}
public String getElementName() {
return elementName;
}
public void setElementName(String elementName) {
this.elementName = elementName;
}
public String getElementId() {
return elementId;
}
public void setElementId(String elementId) {
this.elementId = elementId;
}
}
|
public String restoreWarnings(int[] warningCodes) {
// Restore the warning codes
// Assume a method called restoreWarningCode exists to restore a single warning code
for (int code : warningCodes) {
restoreWarningCode(code);
}
// Generate the message
StringBuilder message = new StringBuilder("Warnings ");
for (int i = 0; i < warningCodes.length; i++) {
message.append(warningCodes[i]);
if (i < warningCodes.length - 1) {
message.append(", ");
}
}
message.append(" have been restored.");
return message.toString();
} |
import { User } from '@prisma/client';
import { hash } from 'bcryptjs';
import auth from 'config/auth';
import { prisma } from 'infra/prisma/client';
import { sign } from 'jsonwebtoken';
import { emit } from 'process';
import { validPassword } from '../constants';
import { generateRandomEmail, pickRandomName } from '../utils';
//TODO: necessário melhorar esta função
export async function createUser(email?: string, name?: string, password?: string): Promise<User> {
const user = await prisma.user.create({
data: {
name: name || 'teste',
email: email || generateRandomEmail(),
password: await hash(password || '<PASSWORD>', 8),
},
});
return user;
}
export async function getUserByEmail(email: string): Promise<User | null> {
return prisma.user.findUnique({
where: {
email: email,
},
});
}
export async function authenticateUser(user: User): Promise<string> {
const token = sign({}, auth.secret_token, {
subject: user.id,
expiresIn: auth.expires_in_token,
});
return token;
}
|
#!/bin/bash
set -e
set -x
target=$1
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
sudo mkdir $target/root/upm_build
|
const routes = require('express').Router();
routes.get('*', (req, res) => {
res.redirect('/index.html');
})
module.exports = routes;
|
<reponame>Newbie9/NFT_mintingDapp
import React, { useState, useEffect } from 'react'
import { Flex, VStack, Box, HStack, Text, Accordion, AccordionItem, AccordionButton, AccordionPanel, AccordionIcon, Image, Spacer } from '@chakra-ui/react'
import cerceve1 from "../assets/images/cerceve1.png"
import nftGif from "../assets/images/nftGif.gif"
import sampleim1 from "../assets/images/sample_im1.png"
import sampleim2 from "../assets/images/sample_im2.png"
import sampleim3 from "../assets/images/sample_im3.png"
import sampleim4 from "../assets/images/sample_im4.png"
import roadmapimg from "../assets/images/roadmap.png"
import staking from "../assets/images/staking.PNG"
import {
Timeline,
Events,
UrlButton,
ImageEvent,
TextEvent,
YouTubeEvent,
themes, createTheme
} from '@merc/react-timeline';
import styled from "styled-components";
function Roadmap() {
const [click, setClick] = useState(false);
const [button, setButton] = useState(false);
const [innerWidth, setinnerWidth] = useState(0);
const handleClick = () => setClick(!click);
const closeMobileMenu = () => setClick(false)
const showButton = () => {
if (window.innerWidth <= 960) {
setButton(true);
} else {
setButton(false);
}
};
useEffect(() => {
showButton();
setinnerWidth(window.innerWidth)
}, []);
const customTheme = createTheme(themes.default, {
"timeline": {
"a": {
"color": "yellow"
},
"marginLeft": "330px",
"alignSelf": "middle"
},
"timelineTrack": {
"marginLeft": "40rem",
"left": "50%",
"width": "3px",
"height": "100%",
"backgroundColor": "#C6C5E7",
"content": "''",
"background": "linear-gradient(to bottom, #d85bcf 0%,#f0799d 100%);",
"marginLeft": "33%"
},
"marker": {
"backgroundColor": "#C6C5E7",
"border": "2px solid #fff",
"borderRadius": "50%",
"width": "16px",
"height": "16px",
"zIndex": 100,
"left": "1px",
"marginLeft": "33%"
},
"card": {
"borderRadius": "4px",
"backgroundColor": "#eee",
"color": "#52392E",
"padding": "10px",
"boxShadow": "0 4px 6px 0 hsla(0, 0%, 0%, 0.9)",
"width": "100%",
"maxWidth": "560px",
"a": {
"color": "#EC24B5"
}
},
"date": {
"backgroundColor": "#9221C1",
"padding": "6px",
"color": "#fff",
"borderRadius": "4px",
"fontWeight": 500,
"fontSize": ".85rem"
},
"events": {
"padding": "10px"
},
"event": {
"marginBottom": "20px",
},
"textAtom": {}
});
window.addEventListener('resize', showButton);
return (
<div>
<VStack w='100%'>
<Image src={staking} w='60%' alignSelf={'center'}>
</Image>
<VStack w='100%' marginBottom={'6em'} bgImage={roadmapimg} backgroundSize={'50% 100%'} backgroundPosition={'center center'} backgroundRepeat='no-repeat'>
<Text mx='auto' alignSelf={'middle'} fontWeight="semibold" marginTop='10px' fontSize={!button ? '4xl' : '2xl'} color='#52392E'>
Roadmap
</Text>
<Timeline theme={customTheme} >
<Events>
<TextEvent date="04/2022" text="Sophia The Cat collection opens to minting. Staking is open to earn $KAT coin. Minters will be able to mint from collection using $KAT coin" />
<TextEvent date="05/2022" text="All charities that are promised will be done and shared with public" />
<TextEvent date="06/2022" text="We drop a special 'Paintings on photos' Travel edition collection. This collection will be open to minting by $KAT coin also" />
<TextEvent date="07/2022" text="New merchandise is out, special design of Sophia The Cat Shirts will be open to buy at printify.com" />
<TextEvent date="08/2022" text="There will be a YouTube channel where you can find animated short films of Sophia's daily life." />
</Events>
</Timeline>
</VStack>
</VStack>
</div>
)
}
export default Roadmap
|
#!/bin/bash
exec java -jar /plantuml/plantuml.jar "$@"
|
#!/bin/sh
(cd {{ rootProject.projectDir }} && {{ service.opts['environmentCommand'] }} && {{ service.opts['stopCommand'] }} -Pinstance.name={{ instance.name }}) |
package com.jira.client.web.common.enums;
import lombok.Getter;
import org.springframework.util.StringUtils;
import java.util.Objects;
/**
* @author XIAXINYU3
* @date 2020/12/3
*/
@Getter
public enum IssueType {
DEFECT("defect", "故障"),
STORY("story", "故事");
IssueType(String code, String name) {
this.code = code;
this.name = name;
}
private String code;
private String name;
public static IssueType of4Name(String name) {
if (StringUtils.isEmpty(name)) {
return null;
}
for (IssueType type : IssueType.values()) {
if (Objects.equals(type.name, name)) {
return type;
}
}
throw new RuntimeException(String.format("不能识别JIRA问题类型:%s", name));
}
}
|
import { Component } from '@angular/core';
import { MatDialog } from '@angular/material/dialog';
import { Title } from '@angular/platform-browser';
import { ContactsFormComponent } from './contacts-form/contacts-form.component';
@Component({
selector: 'app-contacts',
templateUrl: './contacts.component.html',
styleUrls: ['./contacts.component.scss']
})
export class ContactsComponent {
constructor(
public dialog: MatDialog,
private titleService: Title,
) {
this.titleService.setTitle('Agenda - IGTI');
}
openContactForm(contact): void {
const dialogRef = this.dialog.open(ContactsFormComponent, {
width: '500px',
data: contact
});
}
}
|
from backend.cache import sadd
from typing import List
def add_to_cache_set(set_name: str, members: List[str]) -> None:
for member in members:
sadd(set_name, member) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.